blob: 1030760c5a1c41df659ee93835b889048233e33a [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070075// mm_camera has 2 partial results: 3A, and final result.
76// HDR+ requests have 3 partial results: postview, next request ready, and final result.
77#define PARTIAL_RESULT_COUNT 3
Thierry Strudel3d639192016-09-09 11:52:26 -070078#define FRAME_SKIP_DELAY 0
79
80#define MAX_VALUE_8BIT ((1<<8)-1)
81#define MAX_VALUE_10BIT ((1<<10)-1)
82#define MAX_VALUE_12BIT ((1<<12)-1)
83
84#define VIDEO_4K_WIDTH 3840
85#define VIDEO_4K_HEIGHT 2160
86
Jason Leeb9e76432017-03-10 17:14:19 -080087#define MAX_EIS_WIDTH 3840
88#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070089
90#define MAX_RAW_STREAMS 1
91#define MAX_STALLING_STREAMS 1
92#define MAX_PROCESSED_STREAMS 3
93/* Batch mode is enabled only if FPS set is equal to or greater than this */
94#define MIN_FPS_FOR_BATCH_MODE (120)
95#define PREVIEW_FPS_FOR_HFR (30)
96#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080097#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070098#define MAX_HFR_BATCH_SIZE (8)
99#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +0100101#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700148std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -0700155bool gEnableMultipleHdrplusOutputs = false; // Whether to enable multiple output from Easel HDR+.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800157// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
158bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700159
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700160std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700161
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
164 {"On", CAM_CDS_MODE_ON},
165 {"Off", CAM_CDS_MODE_OFF},
166 {"Auto",CAM_CDS_MODE_AUTO}
167};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700168const QCamera3HardwareInterface::QCameraMap<
169 camera_metadata_enum_android_video_hdr_mode_t,
170 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
171 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
172 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
173};
174
Thierry Strudel54dc9782017-02-15 12:12:10 -0800175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_binning_correction_mode_t,
177 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
178 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
179 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
180};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700181
182const QCamera3HardwareInterface::QCameraMap<
183 camera_metadata_enum_android_ir_mode_t,
184 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
185 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
186 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
187 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
188};
Thierry Strudel3d639192016-09-09 11:52:26 -0700189
190const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_effect_mode_t,
192 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
193 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
194 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
195 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
196 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
197 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
198 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
199 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
202};
203
204const QCamera3HardwareInterface::QCameraMap<
205 camera_metadata_enum_android_control_awb_mode_t,
206 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
207 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
208 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
209 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
210 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
211 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
212 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
215 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
216};
217
218const QCamera3HardwareInterface::QCameraMap<
219 camera_metadata_enum_android_control_scene_mode_t,
220 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
221 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
222 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
223 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
227 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
228 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
229 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
230 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
231 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
232 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
233 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
234 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
235 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800236 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
237 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_af_mode_t,
242 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
245 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
246 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
247 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_color_correction_aberration_mode_t,
254 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
255 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
256 CAM_COLOR_CORRECTION_ABERRATION_OFF },
257 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
258 CAM_COLOR_CORRECTION_ABERRATION_FAST },
259 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
260 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_control_ae_antibanding_mode_t,
265 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
270};
271
272const QCamera3HardwareInterface::QCameraMap<
273 camera_metadata_enum_android_control_ae_mode_t,
274 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
275 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
278 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700279 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
280 { (camera_metadata_enum_android_control_ae_mode_t)
281 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_flash_mode_t,
286 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
287 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
288 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
289 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_statistics_face_detect_mode_t,
294 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
297 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
298};
299
300const QCamera3HardwareInterface::QCameraMap<
301 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
302 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
303 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
304 CAM_FOCUS_UNCALIBRATED },
305 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
306 CAM_FOCUS_APPROXIMATE },
307 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
308 CAM_FOCUS_CALIBRATED }
309};
310
311const QCamera3HardwareInterface::QCameraMap<
312 camera_metadata_enum_android_lens_state_t,
313 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
314 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
315 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
316};
317
318const int32_t available_thumbnail_sizes[] = {0, 0,
319 176, 144,
320 240, 144,
321 256, 144,
322 240, 160,
323 256, 154,
324 240, 240,
325 320, 240};
326
327const QCamera3HardwareInterface::QCameraMap<
328 camera_metadata_enum_android_sensor_test_pattern_mode_t,
329 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
335 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
336};
337
338/* Since there is no mapping for all the options some Android enum are not listed.
339 * Also, the order in this list is important because while mapping from HAL to Android it will
340 * traverse from lower to higher index which means that for HAL values that are map to different
341 * Android values, the traverse logic will select the first one found.
342 */
343const QCamera3HardwareInterface::QCameraMap<
344 camera_metadata_enum_android_sensor_reference_illuminant1_t,
345 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
361 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
362};
363
364const QCamera3HardwareInterface::QCameraMap<
365 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
366 { 60, CAM_HFR_MODE_60FPS},
367 { 90, CAM_HFR_MODE_90FPS},
368 { 120, CAM_HFR_MODE_120FPS},
369 { 150, CAM_HFR_MODE_150FPS},
370 { 180, CAM_HFR_MODE_180FPS},
371 { 210, CAM_HFR_MODE_210FPS},
372 { 240, CAM_HFR_MODE_240FPS},
373 { 480, CAM_HFR_MODE_480FPS},
374};
375
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_instant_aec_mode_t,
378 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
379 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
381 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
382};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800383
384const QCamera3HardwareInterface::QCameraMap<
385 qcamera3_ext_exposure_meter_mode_t,
386 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
387 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
388 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
389 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
390 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
391 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
392 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
393 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
394};
395
396const QCamera3HardwareInterface::QCameraMap<
397 qcamera3_ext_iso_mode_t,
398 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
399 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
400 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
401 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
402 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
403 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
404 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
405 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
406 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
407};
408
Thierry Strudel3d639192016-09-09 11:52:26 -0700409camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
410 .initialize = QCamera3HardwareInterface::initialize,
411 .configure_streams = QCamera3HardwareInterface::configure_streams,
412 .register_stream_buffers = NULL,
413 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
414 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
415 .get_metadata_vendor_tag_ops = NULL,
416 .dump = QCamera3HardwareInterface::dump,
417 .flush = QCamera3HardwareInterface::flush,
418 .reserved = {0},
419};
420
421// initialise to some default value
422uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
423
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700424static inline void logEaselEvent(const char *tag, const char *event) {
425 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
426 struct timespec ts = {};
427 static int64_t kMsPerSec = 1000;
428 static int64_t kNsPerMs = 1000000;
429 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
430 if (res != OK) {
431 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
432 } else {
433 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
434 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
435 }
436 }
437}
438
Thierry Strudel3d639192016-09-09 11:52:26 -0700439/*===========================================================================
440 * FUNCTION : QCamera3HardwareInterface
441 *
442 * DESCRIPTION: constructor of QCamera3HardwareInterface
443 *
444 * PARAMETERS :
445 * @cameraId : camera ID
446 *
447 * RETURN : none
448 *==========================================================================*/
449QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
450 const camera_module_callbacks_t *callbacks)
451 : mCameraId(cameraId),
452 mCameraHandle(NULL),
453 mCameraInitialized(false),
454 mCallbackOps(NULL),
455 mMetadataChannel(NULL),
456 mPictureChannel(NULL),
457 mRawChannel(NULL),
458 mSupportChannel(NULL),
459 mAnalysisChannel(NULL),
460 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700461 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700462 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800463 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100464 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800465 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700466 mChannelHandle(0),
467 mFirstConfiguration(true),
468 mFlush(false),
469 mFlushPerf(false),
470 mParamHeap(NULL),
471 mParameters(NULL),
472 mPrevParameters(NULL),
473 m_bIsVideo(false),
474 m_bIs4KVideo(false),
475 m_bEisSupportedSize(false),
476 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800477 m_bEis3PropertyEnabled(false),
Binhao Lin09245482017-08-31 18:25:29 -0700478 m_bAVTimerEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700479 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700480 mShutterDispatcher(this),
481 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mMinProcessedFrameDuration(0),
483 mMinJpegFrameDuration(0),
484 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100485 mExpectedFrameDuration(0),
486 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700487 mMetaFrameCount(0U),
488 mUpdateDebugLevel(false),
489 mCallbacks(callbacks),
490 mCaptureIntent(0),
491 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800492 /* DevCamDebug metadata internal m control*/
493 mDevCamDebugMetaEnable(0),
494 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700495 mBatchSize(0),
496 mToBeQueuedVidBufs(0),
497 mHFRVideoFps(DEFAULT_VIDEO_FPS),
498 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800499 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800500 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mFirstFrameNumberInBatch(0),
502 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800503 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700504 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
505 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000506 mPDSupported(false),
507 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700508 mInstantAEC(false),
509 mResetInstantAEC(false),
510 mInstantAECSettledFrameNumber(0),
511 mAecSkipDisplayFrameBound(0),
512 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700513 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800514 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700515 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700517 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700518 mState(CLOSED),
519 mIsDeviceLinked(false),
520 mIsMainCamera(true),
521 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700522 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800523 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800524 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700525 mZslEnabled(false),
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700526 mEaselMipiStarted(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800527 mIsApInputUsedForHdrPlus(false),
528 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700529 m_bSensorHDREnabled(false),
Shuzhen Wang3569d4a2017-09-04 19:10:28 -0700530 mAfTrigger(),
531 mSceneDistance(-1)
Thierry Strudel3d639192016-09-09 11:52:26 -0700532{
533 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCommon.init(gCamCapability[cameraId]);
535 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700536#ifndef USE_HAL_3_3
537 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
538#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700539 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700540#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700541 mCameraDevice.common.close = close_camera_device;
542 mCameraDevice.ops = &mCameraOps;
543 mCameraDevice.priv = this;
544 gCamCapability[cameraId]->version = CAM_HAL_V3;
545 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
546 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
547 gCamCapability[cameraId]->min_num_pp_bufs = 3;
548
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800549 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700550
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800551 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700552 mPendingLiveRequest = 0;
553 mCurrentRequestId = -1;
554 pthread_mutex_init(&mMutex, NULL);
555
556 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
557 mDefaultMetadata[i] = NULL;
558
559 // Getting system props of different kinds
560 char prop[PROPERTY_VALUE_MAX];
561 memset(prop, 0, sizeof(prop));
562 property_get("persist.camera.raw.dump", prop, "0");
563 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800564 property_get("persist.camera.hal3.force.hdr", prop, "0");
565 mForceHdrSnapshot = atoi(prop);
566
Thierry Strudel3d639192016-09-09 11:52:26 -0700567 if (mEnableRawDump)
568 LOGD("Raw dump from Camera HAL enabled");
569
570 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
571 memset(mLdafCalib, 0, sizeof(mLdafCalib));
572
Arnd Geis082a4d72017-08-24 10:33:07 -0700573 memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
Arnd Geis8cbfc182017-09-07 14:46:41 -0700574 mEaselFwUpdated = false;
Arnd Geis082a4d72017-08-24 10:33:07 -0700575
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 memset(prop, 0, sizeof(prop));
577 property_get("persist.camera.tnr.preview", prop, "0");
578 m_bTnrPreview = (uint8_t)atoi(prop);
579
580 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800581 property_get("persist.camera.swtnr.preview", prop, "1");
582 m_bSwTnrPreview = (uint8_t)atoi(prop);
583
584 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700585 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700586 m_bTnrVideo = (uint8_t)atoi(prop);
587
588 memset(prop, 0, sizeof(prop));
589 property_get("persist.camera.avtimer.debug", prop, "0");
590 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800591 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700592
Thierry Strudel54dc9782017-02-15 12:12:10 -0800593 memset(prop, 0, sizeof(prop));
594 property_get("persist.camera.cacmode.disable", prop, "0");
595 m_cacModeDisabled = (uint8_t)atoi(prop);
596
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700597 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700598 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700599
Thierry Strudel3d639192016-09-09 11:52:26 -0700600 //Load and read GPU library.
601 lib_surface_utils = NULL;
602 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700603 mSurfaceStridePadding = CAM_PAD_TO_64;
604#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700605 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
606 if (lib_surface_utils) {
607 *(void **)&LINK_get_surface_pixel_alignment =
608 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
609 if (LINK_get_surface_pixel_alignment) {
610 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
611 }
612 dlclose(lib_surface_utils);
613 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700614#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000615 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
616 mPDSupported = (0 <= mPDIndex) ? true : false;
617
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700618 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700619}
620
621/*===========================================================================
622 * FUNCTION : ~QCamera3HardwareInterface
623 *
624 * DESCRIPTION: destructor of QCamera3HardwareInterface
625 *
626 * PARAMETERS : none
627 *
628 * RETURN : none
629 *==========================================================================*/
630QCamera3HardwareInterface::~QCamera3HardwareInterface()
631{
632 LOGD("E");
633
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800634 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700635
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800636 // Disable power hint and enable the perf lock for close camera
637 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
638 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
639
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700640 // Close HDR+ client first before destroying HAL.
641 {
642 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
643 finishHdrPlusClientOpeningLocked(l);
644 if (gHdrPlusClient != nullptr) {
645 // Disable HDR+ mode.
646 disableHdrPlusModeLocked();
647 // Disconnect Easel if it's connected.
648 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
649 gHdrPlusClient = nullptr;
650 }
651 }
652
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800653 // unlink of dualcam during close camera
654 if (mIsDeviceLinked) {
655 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
656 &m_pDualCamCmdPtr->bundle_info;
657 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
658 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
659 pthread_mutex_lock(&gCamLock);
660
661 if (mIsMainCamera == 1) {
662 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
663 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
664 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
665 // related session id should be session id of linked session
666 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
667 } else {
668 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
669 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
670 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
671 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
672 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800673 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800674 pthread_mutex_unlock(&gCamLock);
675
676 rc = mCameraHandle->ops->set_dual_cam_cmd(
677 mCameraHandle->camera_handle);
678 if (rc < 0) {
679 LOGE("Dualcam: Unlink failed, but still proceed to close");
680 }
681 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700682
683 /* We need to stop all streams before deleting any stream */
684 if (mRawDumpChannel) {
685 mRawDumpChannel->stop();
686 }
687
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700688 if (mHdrPlusRawSrcChannel) {
689 mHdrPlusRawSrcChannel->stop();
690 }
691
Thierry Strudel3d639192016-09-09 11:52:26 -0700692 // NOTE: 'camera3_stream_t *' objects are already freed at
693 // this stage by the framework
694 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
695 it != mStreamInfo.end(); it++) {
696 QCamera3ProcessingChannel *channel = (*it)->channel;
697 if (channel) {
698 channel->stop();
699 }
700 }
701 if (mSupportChannel)
702 mSupportChannel->stop();
703
704 if (mAnalysisChannel) {
705 mAnalysisChannel->stop();
706 }
707 if (mMetadataChannel) {
708 mMetadataChannel->stop();
709 }
710 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700711 stopChannelLocked(/*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700712 }
713
714 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
715 it != mStreamInfo.end(); it++) {
716 QCamera3ProcessingChannel *channel = (*it)->channel;
717 if (channel)
718 delete channel;
719 free (*it);
720 }
721 if (mSupportChannel) {
722 delete mSupportChannel;
723 mSupportChannel = NULL;
724 }
725
726 if (mAnalysisChannel) {
727 delete mAnalysisChannel;
728 mAnalysisChannel = NULL;
729 }
730 if (mRawDumpChannel) {
731 delete mRawDumpChannel;
732 mRawDumpChannel = NULL;
733 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700734 if (mHdrPlusRawSrcChannel) {
735 delete mHdrPlusRawSrcChannel;
736 mHdrPlusRawSrcChannel = NULL;
737 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700738 if (mDummyBatchChannel) {
739 delete mDummyBatchChannel;
740 mDummyBatchChannel = NULL;
741 }
742
743 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800744 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700745
746 if (mMetadataChannel) {
747 delete mMetadataChannel;
748 mMetadataChannel = NULL;
749 }
750
751 /* Clean up all channels */
752 if (mCameraInitialized) {
753 if(!mFirstConfiguration){
754 //send the last unconfigure
755 cam_stream_size_info_t stream_config_info;
756 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
757 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
758 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800759 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700760 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700761 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
763 stream_config_info);
764 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
765 if (rc < 0) {
766 LOGE("set_parms failed for unconfigure");
767 }
768 }
769 deinitParameters();
770 }
771
772 if (mChannelHandle) {
773 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
774 mChannelHandle);
775 LOGH("deleting channel %d", mChannelHandle);
776 mChannelHandle = 0;
777 }
778
779 if (mState != CLOSED)
780 closeCamera();
781
782 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
783 req.mPendingBufferList.clear();
784 }
785 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700786 for (pendingRequestIterator i = mPendingRequestsList.begin();
787 i != mPendingRequestsList.end();) {
788 i = erasePendingRequest(i);
789 }
790 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
791 if (mDefaultMetadata[i])
792 free_camera_metadata(mDefaultMetadata[i]);
793
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800794 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700795
796 pthread_cond_destroy(&mRequestCond);
797
798 pthread_cond_destroy(&mBuffersCond);
799
800 pthread_mutex_destroy(&mMutex);
801 LOGD("X");
802}
803
804/*===========================================================================
805 * FUNCTION : erasePendingRequest
806 *
807 * DESCRIPTION: function to erase a desired pending request after freeing any
808 * allocated memory
809 *
810 * PARAMETERS :
811 * @i : iterator pointing to pending request to be erased
812 *
813 * RETURN : iterator pointing to the next request
814 *==========================================================================*/
815QCamera3HardwareInterface::pendingRequestIterator
816 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
817{
818 if (i->input_buffer != NULL) {
819 free(i->input_buffer);
820 i->input_buffer = NULL;
821 }
822 if (i->settings != NULL)
823 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100824
825 mExpectedInflightDuration -= i->expectedFrameDuration;
826 if (mExpectedInflightDuration < 0) {
827 LOGE("Negative expected in-flight duration!");
828 mExpectedInflightDuration = 0;
829 }
830
Thierry Strudel3d639192016-09-09 11:52:26 -0700831 return mPendingRequestsList.erase(i);
832}
833
834/*===========================================================================
835 * FUNCTION : camEvtHandle
836 *
837 * DESCRIPTION: Function registered to mm-camera-interface to handle events
838 *
839 * PARAMETERS :
840 * @camera_handle : interface layer camera handle
841 * @evt : ptr to event
842 * @user_data : user data ptr
843 *
844 * RETURN : none
845 *==========================================================================*/
846void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
847 mm_camera_event_t *evt,
848 void *user_data)
849{
850 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
851 if (obj && evt) {
852 switch(evt->server_event_type) {
853 case CAM_EVENT_TYPE_DAEMON_DIED:
854 pthread_mutex_lock(&obj->mMutex);
855 obj->mState = ERROR;
856 pthread_mutex_unlock(&obj->mMutex);
857 LOGE("Fatal, camera daemon died");
858 break;
859
860 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
861 LOGD("HAL got request pull from Daemon");
862 pthread_mutex_lock(&obj->mMutex);
863 obj->mWokenUpByDaemon = true;
864 obj->unblockRequestIfNecessary();
865 pthread_mutex_unlock(&obj->mMutex);
866 break;
867
868 default:
869 LOGW("Warning: Unhandled event %d",
870 evt->server_event_type);
871 break;
872 }
873 } else {
874 LOGE("NULL user_data/evt");
875 }
876}
877
878/*===========================================================================
879 * FUNCTION : openCamera
880 *
881 * DESCRIPTION: open camera
882 *
883 * PARAMETERS :
884 * @hw_device : double ptr for camera device struct
885 *
886 * RETURN : int32_t type of status
887 * NO_ERROR -- success
888 * none-zero failure code
889 *==========================================================================*/
890int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
891{
892 int rc = 0;
893 if (mState != CLOSED) {
894 *hw_device = NULL;
895 return PERMISSION_DENIED;
896 }
897
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700898 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800899 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700900 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
901 mCameraId);
902
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700903 if (mCameraHandle) {
904 LOGE("Failure: Camera already opened");
905 return ALREADY_EXISTS;
906 }
907
908 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700909 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700910 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700911 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700912 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700913 if (rc != 0) {
914 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
915 return rc;
916 }
Arnd Geis8cbfc182017-09-07 14:46:41 -0700917 mEaselFwUpdated = false;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700918 }
919 }
920
Thierry Strudel3d639192016-09-09 11:52:26 -0700921 rc = openCamera();
922 if (rc == 0) {
923 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800924 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700925 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700926
927 // Suspend Easel because opening camera failed.
928 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700929 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700930 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
931 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700932 if (suspendErr != 0) {
933 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
934 strerror(-suspendErr), suspendErr);
935 }
936 }
937 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800938 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700939
Thierry Strudel3d639192016-09-09 11:52:26 -0700940 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
941 mCameraId, rc);
942
943 if (rc == NO_ERROR) {
944 mState = OPENED;
945 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800946
Thierry Strudel3d639192016-09-09 11:52:26 -0700947 return rc;
948}
949
950/*===========================================================================
951 * FUNCTION : openCamera
952 *
953 * DESCRIPTION: open camera
954 *
955 * PARAMETERS : none
956 *
957 * RETURN : int32_t type of status
958 * NO_ERROR -- success
959 * none-zero failure code
960 *==========================================================================*/
961int QCamera3HardwareInterface::openCamera()
962{
963 int rc = 0;
964 char value[PROPERTY_VALUE_MAX];
965
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800966 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800967
Thierry Strudel3d639192016-09-09 11:52:26 -0700968 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
969 if (rc < 0) {
970 LOGE("Failed to reserve flash for camera id: %d",
971 mCameraId);
972 return UNKNOWN_ERROR;
973 }
974
975 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
976 if (rc) {
977 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
978 return rc;
979 }
980
981 if (!mCameraHandle) {
982 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
983 return -ENODEV;
984 }
985
986 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
987 camEvtHandle, (void *)this);
988
989 if (rc < 0) {
990 LOGE("Error, failed to register event callback");
991 /* Not closing camera here since it is already handled in destructor */
992 return FAILED_TRANSACTION;
993 }
994
995 mExifParams.debug_params =
996 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
997 if (mExifParams.debug_params) {
998 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
999 } else {
1000 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
1001 return NO_MEMORY;
1002 }
1003 mFirstConfiguration = true;
1004
1005 //Notify display HAL that a camera session is active.
1006 //But avoid calling the same during bootup because camera service might open/close
1007 //cameras at boot time during its initialization and display service will also internally
1008 //wait for camera service to initialize first while calling this display API, resulting in a
1009 //deadlock situation. Since boot time camera open/close calls are made only to fetch
1010 //capabilities, no need of this display bw optimization.
1011 //Use "service.bootanim.exit" property to know boot status.
1012 property_get("service.bootanim.exit", value, "0");
1013 if (atoi(value) == 1) {
1014 pthread_mutex_lock(&gCamLock);
1015 if (gNumCameraSessions++ == 0) {
1016 setCameraLaunchStatus(true);
1017 }
1018 pthread_mutex_unlock(&gCamLock);
1019 }
1020
1021 //fill the session id needed while linking dual cam
1022 pthread_mutex_lock(&gCamLock);
1023 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1024 &sessionId[mCameraId]);
1025 pthread_mutex_unlock(&gCamLock);
1026
1027 if (rc < 0) {
1028 LOGE("Error, failed to get sessiion id");
1029 return UNKNOWN_ERROR;
1030 } else {
1031 //Allocate related cam sync buffer
1032 //this is needed for the payload that goes along with bundling cmd for related
1033 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001034 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1035 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001036 if(rc != OK) {
1037 rc = NO_MEMORY;
1038 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1039 return NO_MEMORY;
1040 }
1041
1042 //Map memory for related cam sync buffer
1043 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001044 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1045 m_pDualCamCmdHeap->getFd(0),
1046 sizeof(cam_dual_camera_cmd_info_t),
1047 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001048 if(rc < 0) {
1049 LOGE("Dualcam: failed to map Related cam sync buffer");
1050 rc = FAILED_TRANSACTION;
1051 return NO_MEMORY;
1052 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001053 m_pDualCamCmdPtr =
1054 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001055 }
1056
1057 LOGH("mCameraId=%d",mCameraId);
1058
1059 return NO_ERROR;
1060}
1061
1062/*===========================================================================
1063 * FUNCTION : closeCamera
1064 *
1065 * DESCRIPTION: close camera
1066 *
1067 * PARAMETERS : none
1068 *
1069 * RETURN : int32_t type of status
1070 * NO_ERROR -- success
1071 * none-zero failure code
1072 *==========================================================================*/
1073int QCamera3HardwareInterface::closeCamera()
1074{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001075 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001076 int rc = NO_ERROR;
1077 char value[PROPERTY_VALUE_MAX];
1078
1079 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1080 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001081
1082 // unmap memory for related cam sync buffer
1083 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001084 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001085 if (NULL != m_pDualCamCmdHeap) {
1086 m_pDualCamCmdHeap->deallocate();
1087 delete m_pDualCamCmdHeap;
1088 m_pDualCamCmdHeap = NULL;
1089 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001090 }
1091
Thierry Strudel3d639192016-09-09 11:52:26 -07001092 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1093 mCameraHandle = NULL;
1094
1095 //reset session id to some invalid id
1096 pthread_mutex_lock(&gCamLock);
1097 sessionId[mCameraId] = 0xDEADBEEF;
1098 pthread_mutex_unlock(&gCamLock);
1099
1100 //Notify display HAL that there is no active camera session
1101 //but avoid calling the same during bootup. Refer to openCamera
1102 //for more details.
1103 property_get("service.bootanim.exit", value, "0");
1104 if (atoi(value) == 1) {
1105 pthread_mutex_lock(&gCamLock);
1106 if (--gNumCameraSessions == 0) {
1107 setCameraLaunchStatus(false);
1108 }
1109 pthread_mutex_unlock(&gCamLock);
1110 }
1111
Thierry Strudel3d639192016-09-09 11:52:26 -07001112 if (mExifParams.debug_params) {
1113 free(mExifParams.debug_params);
1114 mExifParams.debug_params = NULL;
1115 }
1116 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1117 LOGW("Failed to release flash for camera id: %d",
1118 mCameraId);
1119 }
1120 mState = CLOSED;
1121 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1122 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001123
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001124 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001125 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001126 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001127 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001128 if (rc != 0) {
1129 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1130 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001131 }
1132 }
1133
Thierry Strudel3d639192016-09-09 11:52:26 -07001134 return rc;
1135}
1136
1137/*===========================================================================
1138 * FUNCTION : initialize
1139 *
1140 * DESCRIPTION: Initialize frameworks callback functions
1141 *
1142 * PARAMETERS :
1143 * @callback_ops : callback function to frameworks
1144 *
1145 * RETURN :
1146 *
1147 *==========================================================================*/
1148int QCamera3HardwareInterface::initialize(
1149 const struct camera3_callback_ops *callback_ops)
1150{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001151 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001152 int rc;
1153
1154 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1155 pthread_mutex_lock(&mMutex);
1156
1157 // Validate current state
1158 switch (mState) {
1159 case OPENED:
1160 /* valid state */
1161 break;
1162 default:
1163 LOGE("Invalid state %d", mState);
1164 rc = -ENODEV;
1165 goto err1;
1166 }
1167
1168 rc = initParameters();
1169 if (rc < 0) {
1170 LOGE("initParamters failed %d", rc);
1171 goto err1;
1172 }
1173 mCallbackOps = callback_ops;
1174
1175 mChannelHandle = mCameraHandle->ops->add_channel(
1176 mCameraHandle->camera_handle, NULL, NULL, this);
1177 if (mChannelHandle == 0) {
1178 LOGE("add_channel failed");
1179 rc = -ENOMEM;
1180 pthread_mutex_unlock(&mMutex);
1181 return rc;
1182 }
1183
1184 pthread_mutex_unlock(&mMutex);
1185 mCameraInitialized = true;
1186 mState = INITIALIZED;
1187 LOGI("X");
1188 return 0;
1189
1190err1:
1191 pthread_mutex_unlock(&mMutex);
1192 return rc;
1193}
1194
1195/*===========================================================================
1196 * FUNCTION : validateStreamDimensions
1197 *
1198 * DESCRIPTION: Check if the configuration requested are those advertised
1199 *
1200 * PARAMETERS :
1201 * @stream_list : streams to be configured
1202 *
1203 * RETURN :
1204 *
1205 *==========================================================================*/
1206int QCamera3HardwareInterface::validateStreamDimensions(
1207 camera3_stream_configuration_t *streamList)
1208{
1209 int rc = NO_ERROR;
1210 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001211 uint32_t depthWidth = 0;
1212 uint32_t depthHeight = 0;
1213 if (mPDSupported) {
1214 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1215 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1216 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001217
1218 camera3_stream_t *inputStream = NULL;
1219 /*
1220 * Loop through all streams to find input stream if it exists*
1221 */
1222 for (size_t i = 0; i< streamList->num_streams; i++) {
1223 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1224 if (inputStream != NULL) {
1225 LOGE("Error, Multiple input streams requested");
1226 return -EINVAL;
1227 }
1228 inputStream = streamList->streams[i];
1229 }
1230 }
1231 /*
1232 * Loop through all streams requested in configuration
1233 * Check if unsupported sizes have been requested on any of them
1234 */
1235 for (size_t j = 0; j < streamList->num_streams; j++) {
1236 bool sizeFound = false;
1237 camera3_stream_t *newStream = streamList->streams[j];
1238
1239 uint32_t rotatedHeight = newStream->height;
1240 uint32_t rotatedWidth = newStream->width;
1241 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1242 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1243 rotatedHeight = newStream->width;
1244 rotatedWidth = newStream->height;
1245 }
1246
1247 /*
1248 * Sizes are different for each type of stream format check against
1249 * appropriate table.
1250 */
1251 switch (newStream->format) {
1252 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1253 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1254 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001255 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1256 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1257 mPDSupported) {
1258 if ((depthWidth == newStream->width) &&
1259 (depthHeight == newStream->height)) {
1260 sizeFound = true;
1261 }
1262 break;
1263 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001264 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1265 for (size_t i = 0; i < count; i++) {
1266 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1267 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1268 sizeFound = true;
1269 break;
1270 }
1271 }
1272 break;
1273 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001274 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1275 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001276 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001277 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001278 if ((depthSamplesCount == newStream->width) &&
1279 (1 == newStream->height)) {
1280 sizeFound = true;
1281 }
1282 break;
1283 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001284 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1285 /* Verify set size against generated sizes table */
1286 for (size_t i = 0; i < count; i++) {
1287 if (((int32_t)rotatedWidth ==
1288 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1289 ((int32_t)rotatedHeight ==
1290 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1291 sizeFound = true;
1292 break;
1293 }
1294 }
1295 break;
1296 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1297 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1298 default:
1299 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1300 || newStream->stream_type == CAMERA3_STREAM_INPUT
1301 || IS_USAGE_ZSL(newStream->usage)) {
1302 if (((int32_t)rotatedWidth ==
1303 gCamCapability[mCameraId]->active_array_size.width) &&
1304 ((int32_t)rotatedHeight ==
1305 gCamCapability[mCameraId]->active_array_size.height)) {
1306 sizeFound = true;
1307 break;
1308 }
1309 /* We could potentially break here to enforce ZSL stream
1310 * set from frameworks always is full active array size
1311 * but it is not clear from the spc if framework will always
1312 * follow that, also we have logic to override to full array
1313 * size, so keeping the logic lenient at the moment
1314 */
1315 }
1316 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1317 MAX_SIZES_CNT);
1318 for (size_t i = 0; i < count; i++) {
1319 if (((int32_t)rotatedWidth ==
1320 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1321 ((int32_t)rotatedHeight ==
1322 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1323 sizeFound = true;
1324 break;
1325 }
1326 }
1327 break;
1328 } /* End of switch(newStream->format) */
1329
1330 /* We error out even if a single stream has unsupported size set */
1331 if (!sizeFound) {
1332 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1333 rotatedWidth, rotatedHeight, newStream->format,
1334 gCamCapability[mCameraId]->active_array_size.width,
1335 gCamCapability[mCameraId]->active_array_size.height);
1336 rc = -EINVAL;
1337 break;
1338 }
1339 } /* End of for each stream */
1340 return rc;
1341}
1342
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001343/*===========================================================================
1344 * FUNCTION : validateUsageFlags
1345 *
1346 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1347 *
1348 * PARAMETERS :
1349 * @stream_list : streams to be configured
1350 *
1351 * RETURN :
1352 * NO_ERROR if the usage flags are supported
1353 * error code if usage flags are not supported
1354 *
1355 *==========================================================================*/
1356int QCamera3HardwareInterface::validateUsageFlags(
1357 const camera3_stream_configuration_t* streamList)
1358{
1359 for (size_t j = 0; j < streamList->num_streams; j++) {
1360 const camera3_stream_t *newStream = streamList->streams[j];
1361
1362 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1363 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1364 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1365 continue;
1366 }
1367
Jason Leec4cf5032017-05-24 18:31:41 -07001368 // Here we only care whether it's EIS3 or not
1369 char is_type_value[PROPERTY_VALUE_MAX];
1370 property_get("persist.camera.is_type", is_type_value, "4");
1371 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1372 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1373 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1374 isType = IS_TYPE_NONE;
1375
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001376 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1377 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1378 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1379 bool forcePreviewUBWC = true;
1380 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1381 forcePreviewUBWC = false;
1382 }
1383 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001384 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001385 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001386 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001387 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001388 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001389
1390 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1391 // So color spaces will always match.
1392
1393 // Check whether underlying formats of shared streams match.
1394 if (isVideo && isPreview && videoFormat != previewFormat) {
1395 LOGE("Combined video and preview usage flag is not supported");
1396 return -EINVAL;
1397 }
1398 if (isPreview && isZSL && previewFormat != zslFormat) {
1399 LOGE("Combined preview and zsl usage flag is not supported");
1400 return -EINVAL;
1401 }
1402 if (isVideo && isZSL && videoFormat != zslFormat) {
1403 LOGE("Combined video and zsl usage flag is not supported");
1404 return -EINVAL;
1405 }
1406 }
1407 return NO_ERROR;
1408}
1409
1410/*===========================================================================
1411 * FUNCTION : validateUsageFlagsForEis
1412 *
1413 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1414 *
1415 * PARAMETERS :
1416 * @stream_list : streams to be configured
1417 *
1418 * RETURN :
1419 * NO_ERROR if the usage flags are supported
1420 * error code if usage flags are not supported
1421 *
1422 *==========================================================================*/
1423int QCamera3HardwareInterface::validateUsageFlagsForEis(
1424 const camera3_stream_configuration_t* streamList)
1425{
1426 for (size_t j = 0; j < streamList->num_streams; j++) {
1427 const camera3_stream_t *newStream = streamList->streams[j];
1428
1429 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1430 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1431
1432 // Because EIS is "hard-coded" for certain use case, and current
1433 // implementation doesn't support shared preview and video on the same
1434 // stream, return failure if EIS is forced on.
1435 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1436 LOGE("Combined video and preview usage flag is not supported due to EIS");
1437 return -EINVAL;
1438 }
1439 }
1440 return NO_ERROR;
1441}
1442
Thierry Strudel3d639192016-09-09 11:52:26 -07001443/*==============================================================================
1444 * FUNCTION : isSupportChannelNeeded
1445 *
1446 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1447 *
1448 * PARAMETERS :
1449 * @stream_list : streams to be configured
1450 * @stream_config_info : the config info for streams to be configured
1451 *
1452 * RETURN : Boolen true/false decision
1453 *
1454 *==========================================================================*/
1455bool QCamera3HardwareInterface::isSupportChannelNeeded(
1456 camera3_stream_configuration_t *streamList,
1457 cam_stream_size_info_t stream_config_info)
1458{
1459 uint32_t i;
1460 bool pprocRequested = false;
1461 /* Check for conditions where PProc pipeline does not have any streams*/
1462 for (i = 0; i < stream_config_info.num_streams; i++) {
1463 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1464 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1465 pprocRequested = true;
1466 break;
1467 }
1468 }
1469
1470 if (pprocRequested == false )
1471 return true;
1472
1473 /* Dummy stream needed if only raw or jpeg streams present */
1474 for (i = 0; i < streamList->num_streams; i++) {
1475 switch(streamList->streams[i]->format) {
1476 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1477 case HAL_PIXEL_FORMAT_RAW10:
1478 case HAL_PIXEL_FORMAT_RAW16:
1479 case HAL_PIXEL_FORMAT_BLOB:
1480 break;
1481 default:
1482 return false;
1483 }
1484 }
1485 return true;
1486}
1487
1488/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001489 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001490 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001491 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001492 *
1493 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001494 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001495 *
1496 * RETURN : int32_t type of status
1497 * NO_ERROR -- success
1498 * none-zero failure code
1499 *
1500 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001501int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001502{
1503 int32_t rc = NO_ERROR;
1504
1505 cam_dimension_t max_dim = {0, 0};
1506 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1507 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1508 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1509 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1510 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1511 }
1512
1513 clear_metadata_buffer(mParameters);
1514
1515 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1516 max_dim);
1517 if (rc != NO_ERROR) {
1518 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1519 return rc;
1520 }
1521
1522 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1523 if (rc != NO_ERROR) {
1524 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1525 return rc;
1526 }
1527
1528 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001529 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001530
1531 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1532 mParameters);
1533 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001534 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001535 return rc;
1536 }
1537
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001538 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001539 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1540 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1541 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1542 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1543 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001544
1545 return rc;
1546}
1547
1548/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001549 * FUNCTION : getCurrentSensorModeInfo
1550 *
1551 * DESCRIPTION: Get sensor mode information that is currently selected.
1552 *
1553 * PARAMETERS :
1554 * @sensorModeInfo : sensor mode information (output)
1555 *
1556 * RETURN : int32_t type of status
1557 * NO_ERROR -- success
1558 * none-zero failure code
1559 *
1560 *==========================================================================*/
1561int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1562{
1563 int32_t rc = NO_ERROR;
1564
1565 clear_metadata_buffer(mParameters);
1566 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1567
1568 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1569 mParameters);
1570 if (rc != NO_ERROR) {
1571 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1572 return rc;
1573 }
1574
1575 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1576 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1577 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1578 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1579 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1580 sensorModeInfo.num_raw_bits);
1581
1582 return rc;
1583}
1584
1585/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001586 * FUNCTION : addToPPFeatureMask
1587 *
1588 * DESCRIPTION: add additional features to pp feature mask based on
1589 * stream type and usecase
1590 *
1591 * PARAMETERS :
1592 * @stream_format : stream type for feature mask
1593 * @stream_idx : stream idx within postprocess_mask list to change
1594 *
1595 * RETURN : NULL
1596 *
1597 *==========================================================================*/
1598void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1599 uint32_t stream_idx)
1600{
1601 char feature_mask_value[PROPERTY_VALUE_MAX];
1602 cam_feature_mask_t feature_mask;
1603 int args_converted;
1604 int property_len;
1605
1606 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001607#ifdef _LE_CAMERA_
1608 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1609 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1610 property_len = property_get("persist.camera.hal3.feature",
1611 feature_mask_value, swtnr_feature_mask_value);
1612#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001613 property_len = property_get("persist.camera.hal3.feature",
1614 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001615#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001616 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1617 (feature_mask_value[1] == 'x')) {
1618 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1619 } else {
1620 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1621 }
1622 if (1 != args_converted) {
1623 feature_mask = 0;
1624 LOGE("Wrong feature mask %s", feature_mask_value);
1625 return;
1626 }
1627
1628 switch (stream_format) {
1629 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1630 /* Add LLVD to pp feature mask only if video hint is enabled */
1631 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1632 mStreamConfigInfo.postprocess_mask[stream_idx]
1633 |= CAM_QTI_FEATURE_SW_TNR;
1634 LOGH("Added SW TNR to pp feature mask");
1635 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1636 mStreamConfigInfo.postprocess_mask[stream_idx]
1637 |= CAM_QCOM_FEATURE_LLVD;
1638 LOGH("Added LLVD SeeMore to pp feature mask");
1639 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001640 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1641 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1642 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1643 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001644 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1645 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1646 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1647 CAM_QTI_FEATURE_BINNING_CORRECTION;
1648 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001649 break;
1650 }
1651 default:
1652 break;
1653 }
1654 LOGD("PP feature mask %llx",
1655 mStreamConfigInfo.postprocess_mask[stream_idx]);
1656}
1657
1658/*==============================================================================
1659 * FUNCTION : updateFpsInPreviewBuffer
1660 *
1661 * DESCRIPTION: update FPS information in preview buffer.
1662 *
1663 * PARAMETERS :
1664 * @metadata : pointer to metadata buffer
1665 * @frame_number: frame_number to look for in pending buffer list
1666 *
1667 * RETURN : None
1668 *
1669 *==========================================================================*/
1670void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1671 uint32_t frame_number)
1672{
1673 // Mark all pending buffers for this particular request
1674 // with corresponding framerate information
1675 for (List<PendingBuffersInRequest>::iterator req =
1676 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1677 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1678 for(List<PendingBufferInfo>::iterator j =
1679 req->mPendingBufferList.begin();
1680 j != req->mPendingBufferList.end(); j++) {
1681 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1682 if ((req->frame_number == frame_number) &&
1683 (channel->getStreamTypeMask() &
1684 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1685 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1686 CAM_INTF_PARM_FPS_RANGE, metadata) {
1687 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1688 struct private_handle_t *priv_handle =
1689 (struct private_handle_t *)(*(j->buffer));
1690 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1691 }
1692 }
1693 }
1694 }
1695}
1696
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001697/*==============================================================================
1698 * FUNCTION : updateTimeStampInPendingBuffers
1699 *
1700 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1701 * of a frame number
1702 *
1703 * PARAMETERS :
1704 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1705 * @timestamp : timestamp to be set
1706 *
1707 * RETURN : None
1708 *
1709 *==========================================================================*/
1710void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1711 uint32_t frameNumber, nsecs_t timestamp)
1712{
1713 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1714 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
Binhao Lin09245482017-08-31 18:25:29 -07001715 // WAR: save the av_timestamp to the next frame
1716 if(req->frame_number == frameNumber + 1) {
1717 req->av_timestamp = timestamp;
1718 }
1719
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001720 if (req->frame_number != frameNumber)
1721 continue;
1722
1723 for (auto k = req->mPendingBufferList.begin();
1724 k != req->mPendingBufferList.end(); k++ ) {
Binhao Lin09245482017-08-31 18:25:29 -07001725 // WAR: update timestamp when it's not VT usecase
1726 QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
1727 if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
1728 m_bAVTimerEnabled)) {
1729 struct private_handle_t *priv_handle =
1730 (struct private_handle_t *) (*(k->buffer));
1731 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1732 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001733 }
1734 }
1735 return;
1736}
1737
Thierry Strudel3d639192016-09-09 11:52:26 -07001738/*===========================================================================
1739 * FUNCTION : configureStreams
1740 *
1741 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1742 * and output streams.
1743 *
1744 * PARAMETERS :
1745 * @stream_list : streams to be configured
1746 *
1747 * RETURN :
1748 *
1749 *==========================================================================*/
1750int QCamera3HardwareInterface::configureStreams(
1751 camera3_stream_configuration_t *streamList)
1752{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001753 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001754 int rc = 0;
1755
1756 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001757 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001758 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001759 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001760
1761 return rc;
1762}
1763
1764/*===========================================================================
1765 * FUNCTION : configureStreamsPerfLocked
1766 *
1767 * DESCRIPTION: configureStreams while perfLock is held.
1768 *
1769 * PARAMETERS :
1770 * @stream_list : streams to be configured
1771 *
1772 * RETURN : int32_t type of status
1773 * NO_ERROR -- success
1774 * none-zero failure code
1775 *==========================================================================*/
1776int QCamera3HardwareInterface::configureStreamsPerfLocked(
1777 camera3_stream_configuration_t *streamList)
1778{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001779 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001780 int rc = 0;
1781
1782 // Sanity check stream_list
1783 if (streamList == NULL) {
1784 LOGE("NULL stream configuration");
1785 return BAD_VALUE;
1786 }
1787 if (streamList->streams == NULL) {
1788 LOGE("NULL stream list");
1789 return BAD_VALUE;
1790 }
1791
1792 if (streamList->num_streams < 1) {
1793 LOGE("Bad number of streams requested: %d",
1794 streamList->num_streams);
1795 return BAD_VALUE;
1796 }
1797
1798 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1799 LOGE("Maximum number of streams %d exceeded: %d",
1800 MAX_NUM_STREAMS, streamList->num_streams);
1801 return BAD_VALUE;
1802 }
1803
Jason Leec4cf5032017-05-24 18:31:41 -07001804 mOpMode = streamList->operation_mode;
1805 LOGD("mOpMode: %d", mOpMode);
1806
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001807 rc = validateUsageFlags(streamList);
1808 if (rc != NO_ERROR) {
1809 return rc;
1810 }
1811
Chien-Yu Chen11c8edc2017-09-11 20:54:24 -07001812 // Disable HDR+ if it's enabled;
Chien-Yu Chen153c5172017-09-08 11:33:19 -07001813 {
1814 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1815 finishHdrPlusClientOpeningLocked(l);
1816 disableHdrPlusModeLocked();
1817 }
1818
Thierry Strudel3d639192016-09-09 11:52:26 -07001819 /* first invalidate all the steams in the mStreamList
1820 * if they appear again, they will be validated */
1821 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1822 it != mStreamInfo.end(); it++) {
1823 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1824 if (channel) {
1825 channel->stop();
1826 }
1827 (*it)->status = INVALID;
1828 }
1829
1830 if (mRawDumpChannel) {
1831 mRawDumpChannel->stop();
1832 delete mRawDumpChannel;
1833 mRawDumpChannel = NULL;
1834 }
1835
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001836 if (mHdrPlusRawSrcChannel) {
1837 mHdrPlusRawSrcChannel->stop();
1838 delete mHdrPlusRawSrcChannel;
1839 mHdrPlusRawSrcChannel = NULL;
1840 }
1841
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 if (mSupportChannel)
1843 mSupportChannel->stop();
1844
1845 if (mAnalysisChannel) {
1846 mAnalysisChannel->stop();
1847 }
1848 if (mMetadataChannel) {
1849 /* If content of mStreamInfo is not 0, there is metadata stream */
1850 mMetadataChannel->stop();
1851 }
1852 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07001853 stopChannelLocked(/*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001854 }
1855
1856 pthread_mutex_lock(&mMutex);
1857
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001858 mPictureChannel = NULL;
1859
Thierry Strudel3d639192016-09-09 11:52:26 -07001860 // Check state
1861 switch (mState) {
1862 case INITIALIZED:
1863 case CONFIGURED:
1864 case STARTED:
1865 /* valid state */
1866 break;
1867 default:
1868 LOGE("Invalid state %d", mState);
1869 pthread_mutex_unlock(&mMutex);
1870 return -ENODEV;
1871 }
1872
1873 /* Check whether we have video stream */
1874 m_bIs4KVideo = false;
1875 m_bIsVideo = false;
1876 m_bEisSupportedSize = false;
1877 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001878 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001879 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001880 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001881 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001882 uint32_t videoWidth = 0U;
1883 uint32_t videoHeight = 0U;
1884 size_t rawStreamCnt = 0;
1885 size_t stallStreamCnt = 0;
1886 size_t processedStreamCnt = 0;
1887 // Number of streams on ISP encoder path
1888 size_t numStreamsOnEncoder = 0;
1889 size_t numYuv888OnEncoder = 0;
1890 bool bYuv888OverrideJpeg = false;
1891 cam_dimension_t largeYuv888Size = {0, 0};
1892 cam_dimension_t maxViewfinderSize = {0, 0};
1893 bool bJpegExceeds4K = false;
1894 bool bJpegOnEncoder = false;
1895 bool bUseCommonFeatureMask = false;
1896 cam_feature_mask_t commonFeatureMask = 0;
1897 bool bSmallJpegSize = false;
1898 uint32_t width_ratio;
1899 uint32_t height_ratio;
1900 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1901 camera3_stream_t *inputStream = NULL;
1902 bool isJpeg = false;
1903 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001904 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001905 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001906
1907 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1908
1909 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001910 uint8_t eis_prop_set;
1911 uint32_t maxEisWidth = 0;
1912 uint32_t maxEisHeight = 0;
1913
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001914 // Initialize all instant AEC related variables
1915 mInstantAEC = false;
1916 mResetInstantAEC = false;
1917 mInstantAECSettledFrameNumber = 0;
1918 mAecSkipDisplayFrameBound = 0;
1919 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001920 mCurrFeatureState = 0;
1921 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001922
Binhao Lin09245482017-08-31 18:25:29 -07001923 m_bAVTimerEnabled = false;
1924
Thierry Strudel3d639192016-09-09 11:52:26 -07001925 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1926
1927 size_t count = IS_TYPE_MAX;
1928 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1929 for (size_t i = 0; i < count; i++) {
1930 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001931 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1932 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 break;
1934 }
1935 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001936
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001937 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001938 maxEisWidth = MAX_EIS_WIDTH;
1939 maxEisHeight = MAX_EIS_HEIGHT;
1940 }
1941
1942 /* EIS setprop control */
1943 char eis_prop[PROPERTY_VALUE_MAX];
1944 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001945 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001946 eis_prop_set = (uint8_t)atoi(eis_prop);
1947
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001948 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001949 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1950
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001951 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1952 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001953
Thierry Strudel3d639192016-09-09 11:52:26 -07001954 /* stream configurations */
1955 for (size_t i = 0; i < streamList->num_streams; i++) {
1956 camera3_stream_t *newStream = streamList->streams[i];
1957 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1958 "height = %d, rotation = %d, usage = 0x%x",
1959 i, newStream->stream_type, newStream->format,
1960 newStream->width, newStream->height, newStream->rotation,
1961 newStream->usage);
1962 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1963 newStream->stream_type == CAMERA3_STREAM_INPUT){
1964 isZsl = true;
1965 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001966 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1967 IS_USAGE_PREVIEW(newStream->usage)) {
1968 isPreview = true;
1969 }
1970
Thierry Strudel3d639192016-09-09 11:52:26 -07001971 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1972 inputStream = newStream;
1973 }
1974
Emilian Peev7650c122017-01-19 08:24:33 -08001975 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1976 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001977 isJpeg = true;
1978 jpegSize.width = newStream->width;
1979 jpegSize.height = newStream->height;
1980 if (newStream->width > VIDEO_4K_WIDTH ||
1981 newStream->height > VIDEO_4K_HEIGHT)
1982 bJpegExceeds4K = true;
1983 }
1984
1985 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1986 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1987 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001988 // In HAL3 we can have multiple different video streams.
1989 // The variables video width and height are used below as
1990 // dimensions of the biggest of them
1991 if (videoWidth < newStream->width ||
1992 videoHeight < newStream->height) {
1993 videoWidth = newStream->width;
1994 videoHeight = newStream->height;
1995 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001996 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1997 (VIDEO_4K_HEIGHT <= newStream->height)) {
1998 m_bIs4KVideo = true;
1999 }
2000 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
2001 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002002
Thierry Strudel3d639192016-09-09 11:52:26 -07002003 }
2004 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
2005 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
2006 switch (newStream->format) {
2007 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002008 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2009 depthPresent = true;
2010 break;
2011 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002012 stallStreamCnt++;
2013 if (isOnEncoder(maxViewfinderSize, newStream->width,
2014 newStream->height)) {
2015 numStreamsOnEncoder++;
2016 bJpegOnEncoder = true;
2017 }
2018 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
2019 newStream->width);
2020 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2021 newStream->height);;
2022 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2023 "FATAL: max_downscale_factor cannot be zero and so assert");
2024 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2025 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2026 LOGH("Setting small jpeg size flag to true");
2027 bSmallJpegSize = true;
2028 }
2029 break;
2030 case HAL_PIXEL_FORMAT_RAW10:
2031 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2032 case HAL_PIXEL_FORMAT_RAW16:
2033 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002034 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2035 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2036 pdStatCount++;
2037 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002038 break;
2039 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2040 processedStreamCnt++;
2041 if (isOnEncoder(maxViewfinderSize, newStream->width,
2042 newStream->height)) {
2043 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2044 !IS_USAGE_ZSL(newStream->usage)) {
2045 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2046 }
2047 numStreamsOnEncoder++;
2048 }
2049 break;
2050 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2051 processedStreamCnt++;
2052 if (isOnEncoder(maxViewfinderSize, newStream->width,
2053 newStream->height)) {
2054 // If Yuv888 size is not greater than 4K, set feature mask
2055 // to SUPERSET so that it support concurrent request on
2056 // YUV and JPEG.
2057 if (newStream->width <= VIDEO_4K_WIDTH &&
2058 newStream->height <= VIDEO_4K_HEIGHT) {
2059 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2060 }
2061 numStreamsOnEncoder++;
2062 numYuv888OnEncoder++;
2063 largeYuv888Size.width = newStream->width;
2064 largeYuv888Size.height = newStream->height;
2065 }
2066 break;
2067 default:
2068 processedStreamCnt++;
2069 if (isOnEncoder(maxViewfinderSize, newStream->width,
2070 newStream->height)) {
2071 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2072 numStreamsOnEncoder++;
2073 }
2074 break;
2075 }
2076
2077 }
2078 }
2079
2080 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2081 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2082 !m_bIsVideo) {
2083 m_bEisEnable = false;
2084 }
2085
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002086 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2087 pthread_mutex_unlock(&mMutex);
2088 return -EINVAL;
2089 }
2090
Thierry Strudel54dc9782017-02-15 12:12:10 -08002091 uint8_t forceEnableTnr = 0;
2092 char tnr_prop[PROPERTY_VALUE_MAX];
2093 memset(tnr_prop, 0, sizeof(tnr_prop));
2094 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2095 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2096
Thierry Strudel3d639192016-09-09 11:52:26 -07002097 /* Logic to enable/disable TNR based on specific config size/etc.*/
2098 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002099 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2100 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002101 else if (forceEnableTnr)
2102 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002103
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002104 char videoHdrProp[PROPERTY_VALUE_MAX];
2105 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2106 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2107 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2108
2109 if (hdr_mode_prop == 1 && m_bIsVideo &&
2110 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2111 m_bVideoHdrEnabled = true;
2112 else
2113 m_bVideoHdrEnabled = false;
2114
2115
Thierry Strudel3d639192016-09-09 11:52:26 -07002116 /* Check if num_streams is sane */
2117 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2118 rawStreamCnt > MAX_RAW_STREAMS ||
2119 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2120 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2121 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2122 pthread_mutex_unlock(&mMutex);
2123 return -EINVAL;
2124 }
2125 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002126 if (isZsl && m_bIs4KVideo) {
2127 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 }
2131 /* Check if stream sizes are sane */
2132 if (numStreamsOnEncoder > 2) {
2133 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2134 pthread_mutex_unlock(&mMutex);
2135 return -EINVAL;
2136 } else if (1 < numStreamsOnEncoder){
2137 bUseCommonFeatureMask = true;
2138 LOGH("Multiple streams above max viewfinder size, common mask needed");
2139 }
2140
2141 /* Check if BLOB size is greater than 4k in 4k recording case */
2142 if (m_bIs4KVideo && bJpegExceeds4K) {
2143 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2144 pthread_mutex_unlock(&mMutex);
2145 return -EINVAL;
2146 }
2147
Emilian Peev7650c122017-01-19 08:24:33 -08002148 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2149 depthPresent) {
2150 LOGE("HAL doesn't support depth streams in HFR mode!");
2151 pthread_mutex_unlock(&mMutex);
2152 return -EINVAL;
2153 }
2154
Thierry Strudel3d639192016-09-09 11:52:26 -07002155 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2156 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2157 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2158 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2159 // configurations:
2160 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2161 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2162 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2163 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2164 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2165 __func__);
2166 pthread_mutex_unlock(&mMutex);
2167 return -EINVAL;
2168 }
2169
2170 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2171 // the YUV stream's size is greater or equal to the JPEG size, set common
2172 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2173 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2174 jpegSize.width, jpegSize.height) &&
2175 largeYuv888Size.width > jpegSize.width &&
2176 largeYuv888Size.height > jpegSize.height) {
2177 bYuv888OverrideJpeg = true;
2178 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2179 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2180 }
2181
2182 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2183 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2184 commonFeatureMask);
2185 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2186 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2187
2188 rc = validateStreamDimensions(streamList);
2189 if (rc == NO_ERROR) {
2190 rc = validateStreamRotations(streamList);
2191 }
2192 if (rc != NO_ERROR) {
2193 LOGE("Invalid stream configuration requested!");
2194 pthread_mutex_unlock(&mMutex);
2195 return rc;
2196 }
2197
Emilian Peev0f3c3162017-03-15 12:57:46 +00002198 if (1 < pdStatCount) {
2199 LOGE("HAL doesn't support multiple PD streams");
2200 pthread_mutex_unlock(&mMutex);
2201 return -EINVAL;
2202 }
2203
2204 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2205 (1 == pdStatCount)) {
2206 LOGE("HAL doesn't support PD streams in HFR mode!");
2207 pthread_mutex_unlock(&mMutex);
2208 return -EINVAL;
2209 }
2210
Thierry Strudel3d639192016-09-09 11:52:26 -07002211 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2212 for (size_t i = 0; i < streamList->num_streams; i++) {
2213 camera3_stream_t *newStream = streamList->streams[i];
2214 LOGH("newStream type = %d, stream format = %d "
2215 "stream size : %d x %d, stream rotation = %d",
2216 newStream->stream_type, newStream->format,
2217 newStream->width, newStream->height, newStream->rotation);
2218 //if the stream is in the mStreamList validate it
2219 bool stream_exists = false;
2220 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2221 it != mStreamInfo.end(); it++) {
2222 if ((*it)->stream == newStream) {
2223 QCamera3ProcessingChannel *channel =
2224 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2225 stream_exists = true;
2226 if (channel)
2227 delete channel;
2228 (*it)->status = VALID;
2229 (*it)->stream->priv = NULL;
2230 (*it)->channel = NULL;
2231 }
2232 }
2233 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2234 //new stream
2235 stream_info_t* stream_info;
2236 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2237 if (!stream_info) {
2238 LOGE("Could not allocate stream info");
2239 rc = -ENOMEM;
2240 pthread_mutex_unlock(&mMutex);
2241 return rc;
2242 }
2243 stream_info->stream = newStream;
2244 stream_info->status = VALID;
2245 stream_info->channel = NULL;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07002246 stream_info->id = i;
Thierry Strudel3d639192016-09-09 11:52:26 -07002247 mStreamInfo.push_back(stream_info);
2248 }
2249 /* Covers Opaque ZSL and API1 F/W ZSL */
2250 if (IS_USAGE_ZSL(newStream->usage)
2251 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2252 if (zslStream != NULL) {
2253 LOGE("Multiple input/reprocess streams requested!");
2254 pthread_mutex_unlock(&mMutex);
2255 return BAD_VALUE;
2256 }
2257 zslStream = newStream;
2258 }
2259 /* Covers YUV reprocess */
2260 if (inputStream != NULL) {
2261 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2262 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2263 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2264 && inputStream->width == newStream->width
2265 && inputStream->height == newStream->height) {
2266 if (zslStream != NULL) {
2267 /* This scenario indicates multiple YUV streams with same size
2268 * as input stream have been requested, since zsl stream handle
2269 * is solely use for the purpose of overriding the size of streams
2270 * which share h/w streams we will just make a guess here as to
2271 * which of the stream is a ZSL stream, this will be refactored
2272 * once we make generic logic for streams sharing encoder output
2273 */
2274 LOGH("Warning, Multiple ip/reprocess streams requested!");
2275 }
2276 zslStream = newStream;
2277 }
2278 }
2279 }
2280
2281 /* If a zsl stream is set, we know that we have configured at least one input or
2282 bidirectional stream */
2283 if (NULL != zslStream) {
2284 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2285 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2286 mInputStreamInfo.format = zslStream->format;
2287 mInputStreamInfo.usage = zslStream->usage;
2288 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2289 mInputStreamInfo.dim.width,
2290 mInputStreamInfo.dim.height,
2291 mInputStreamInfo.format, mInputStreamInfo.usage);
2292 }
2293
2294 cleanAndSortStreamInfo();
2295 if (mMetadataChannel) {
2296 delete mMetadataChannel;
2297 mMetadataChannel = NULL;
2298 }
2299 if (mSupportChannel) {
2300 delete mSupportChannel;
2301 mSupportChannel = NULL;
2302 }
2303
2304 if (mAnalysisChannel) {
2305 delete mAnalysisChannel;
2306 mAnalysisChannel = NULL;
2307 }
2308
2309 if (mDummyBatchChannel) {
2310 delete mDummyBatchChannel;
2311 mDummyBatchChannel = NULL;
2312 }
2313
Emilian Peev7650c122017-01-19 08:24:33 -08002314 if (mDepthChannel) {
2315 mDepthChannel = NULL;
2316 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002317 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002318
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002319 mShutterDispatcher.clear();
2320 mOutputBufferDispatcher.clear();
2321
Thierry Strudel2896d122017-02-23 19:18:03 -08002322 char is_type_value[PROPERTY_VALUE_MAX];
2323 property_get("persist.camera.is_type", is_type_value, "4");
2324 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2325
Binhao Line406f062017-05-03 14:39:44 -07002326 char property_value[PROPERTY_VALUE_MAX];
2327 property_get("persist.camera.gzoom.at", property_value, "0");
2328 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002329 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2330 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2331 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2332 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002333
2334 property_get("persist.camera.gzoom.4k", property_value, "0");
2335 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2336
Thierry Strudel3d639192016-09-09 11:52:26 -07002337 //Create metadata channel and initialize it
2338 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2339 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2340 gCamCapability[mCameraId]->color_arrangement);
2341 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2342 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002343 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002344 if (mMetadataChannel == NULL) {
2345 LOGE("failed to allocate metadata channel");
2346 rc = -ENOMEM;
2347 pthread_mutex_unlock(&mMutex);
2348 return rc;
2349 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002350 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002351 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2352 if (rc < 0) {
2353 LOGE("metadata channel initialization failed");
2354 delete mMetadataChannel;
2355 mMetadataChannel = NULL;
2356 pthread_mutex_unlock(&mMutex);
2357 return rc;
2358 }
2359
Thierry Strudel2896d122017-02-23 19:18:03 -08002360 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002361 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002362 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002363 // Keep track of preview/video streams indices.
2364 // There could be more than one preview streams, but only one video stream.
2365 int32_t video_stream_idx = -1;
2366 int32_t preview_stream_idx[streamList->num_streams];
2367 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002368 bool previewTnr[streamList->num_streams];
2369 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2370 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2371 // Loop through once to determine preview TNR conditions before creating channels.
2372 for (size_t i = 0; i < streamList->num_streams; i++) {
2373 camera3_stream_t *newStream = streamList->streams[i];
2374 uint32_t stream_usage = newStream->usage;
2375 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2376 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2377 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2378 video_stream_idx = (int32_t)i;
2379 else
2380 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2381 }
2382 }
2383 // By default, preview stream TNR is disabled.
2384 // Enable TNR to the preview stream if all conditions below are satisfied:
2385 // 1. preview resolution == video resolution.
2386 // 2. video stream TNR is enabled.
2387 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2388 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2389 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2390 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2391 if (m_bTnrEnabled && m_bTnrVideo &&
2392 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2393 video_stream->width == preview_stream->width &&
2394 video_stream->height == preview_stream->height) {
2395 previewTnr[preview_stream_idx[i]] = true;
2396 }
2397 }
2398
Thierry Strudel3d639192016-09-09 11:52:26 -07002399 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2400 /* Allocate channel objects for the requested streams */
2401 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002402
Thierry Strudel3d639192016-09-09 11:52:26 -07002403 camera3_stream_t *newStream = streamList->streams[i];
2404 uint32_t stream_usage = newStream->usage;
2405 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2406 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2407 struct camera_info *p_info = NULL;
2408 pthread_mutex_lock(&gCamLock);
2409 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2410 pthread_mutex_unlock(&gCamLock);
2411 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2412 || IS_USAGE_ZSL(newStream->usage)) &&
2413 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002414 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002415 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002416 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2417 if (bUseCommonFeatureMask)
2418 zsl_ppmask = commonFeatureMask;
2419 else
2420 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002421 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002422 if (numStreamsOnEncoder > 0)
2423 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2424 else
2425 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002426 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002427 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002428 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002429 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002430 LOGH("Input stream configured, reprocess config");
2431 } else {
2432 //for non zsl streams find out the format
2433 switch (newStream->format) {
2434 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2435 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002436 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2438 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2439 /* add additional features to pp feature mask */
2440 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2441 mStreamConfigInfo.num_streams);
2442
2443 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2444 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2445 CAM_STREAM_TYPE_VIDEO;
2446 if (m_bTnrEnabled && m_bTnrVideo) {
2447 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2448 CAM_QCOM_FEATURE_CPP_TNR;
2449 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2450 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2451 ~CAM_QCOM_FEATURE_CDS;
2452 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002453 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2454 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2455 CAM_QTI_FEATURE_PPEISCORE;
2456 }
Binhao Line406f062017-05-03 14:39:44 -07002457 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2458 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2459 CAM_QCOM_FEATURE_GOOG_ZOOM;
2460 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002461 } else {
2462 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2463 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002464 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002465 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2466 CAM_QCOM_FEATURE_CPP_TNR;
2467 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2468 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2469 ~CAM_QCOM_FEATURE_CDS;
2470 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002471 if(!m_bSwTnrPreview) {
2472 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2473 ~CAM_QTI_FEATURE_SW_TNR;
2474 }
Binhao Line406f062017-05-03 14:39:44 -07002475 if (is_goog_zoom_preview_enabled) {
2476 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2477 CAM_QCOM_FEATURE_GOOG_ZOOM;
2478 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002479 padding_info.width_padding = mSurfaceStridePadding;
2480 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002481 previewSize.width = (int32_t)newStream->width;
2482 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002483 }
2484 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2485 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2486 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2487 newStream->height;
2488 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2489 newStream->width;
2490 }
2491 }
2492 break;
2493 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002494 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002495 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2496 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2497 if (bUseCommonFeatureMask)
2498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2499 commonFeatureMask;
2500 else
2501 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2502 CAM_QCOM_FEATURE_NONE;
2503 } else {
2504 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2505 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2506 }
2507 break;
2508 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002509 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002510 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2511 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2512 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2513 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2514 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002515 /* Remove rotation if it is not supported
2516 for 4K LiveVideo snapshot case (online processing) */
2517 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2518 CAM_QCOM_FEATURE_ROTATION)) {
2519 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2520 &= ~CAM_QCOM_FEATURE_ROTATION;
2521 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002522 } else {
2523 if (bUseCommonFeatureMask &&
2524 isOnEncoder(maxViewfinderSize, newStream->width,
2525 newStream->height)) {
2526 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2527 } else {
2528 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2529 }
2530 }
2531 if (isZsl) {
2532 if (zslStream) {
2533 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2534 (int32_t)zslStream->width;
2535 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2536 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002537 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2538 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002539 } else {
2540 LOGE("Error, No ZSL stream identified");
2541 pthread_mutex_unlock(&mMutex);
2542 return -EINVAL;
2543 }
2544 } else if (m_bIs4KVideo) {
2545 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2546 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2547 } else if (bYuv888OverrideJpeg) {
2548 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2549 (int32_t)largeYuv888Size.width;
2550 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2551 (int32_t)largeYuv888Size.height;
2552 }
2553 break;
2554 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2555 case HAL_PIXEL_FORMAT_RAW16:
2556 case HAL_PIXEL_FORMAT_RAW10:
2557 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2558 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2559 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002560 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2561 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2562 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2563 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2564 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2565 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2566 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2567 gCamCapability[mCameraId]->dt[mPDIndex];
2568 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2569 gCamCapability[mCameraId]->vc[mPDIndex];
2570 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002571 break;
2572 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002573 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002574 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2575 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2576 break;
2577 }
2578 }
2579
2580 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2581 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2582 gCamCapability[mCameraId]->color_arrangement);
2583
2584 if (newStream->priv == NULL) {
2585 //New stream, construct channel
2586 switch (newStream->stream_type) {
2587 case CAMERA3_STREAM_INPUT:
2588 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2589 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2590 break;
2591 case CAMERA3_STREAM_BIDIRECTIONAL:
2592 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2593 GRALLOC_USAGE_HW_CAMERA_WRITE;
2594 break;
2595 case CAMERA3_STREAM_OUTPUT:
2596 /* For video encoding stream, set read/write rarely
2597 * flag so that they may be set to un-cached */
2598 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2599 newStream->usage |=
2600 (GRALLOC_USAGE_SW_READ_RARELY |
2601 GRALLOC_USAGE_SW_WRITE_RARELY |
2602 GRALLOC_USAGE_HW_CAMERA_WRITE);
2603 else if (IS_USAGE_ZSL(newStream->usage))
2604 {
2605 LOGD("ZSL usage flag skipping");
2606 }
2607 else if (newStream == zslStream
2608 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2609 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2610 } else
2611 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2612 break;
2613 default:
2614 LOGE("Invalid stream_type %d", newStream->stream_type);
2615 break;
2616 }
2617
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002618 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002619 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2620 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2621 QCamera3ProcessingChannel *channel = NULL;
2622 switch (newStream->format) {
2623 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2624 if ((newStream->usage &
2625 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2626 (streamList->operation_mode ==
2627 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2628 ) {
2629 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2630 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002631 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002632 this,
2633 newStream,
2634 (cam_stream_type_t)
2635 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2636 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2637 mMetadataChannel,
2638 0); //heap buffers are not required for HFR video channel
2639 if (channel == NULL) {
2640 LOGE("allocation of channel failed");
2641 pthread_mutex_unlock(&mMutex);
2642 return -ENOMEM;
2643 }
2644 //channel->getNumBuffers() will return 0 here so use
2645 //MAX_INFLIGH_HFR_REQUESTS
2646 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2647 newStream->priv = channel;
2648 LOGI("num video buffers in HFR mode: %d",
2649 MAX_INFLIGHT_HFR_REQUESTS);
2650 } else {
2651 /* Copy stream contents in HFR preview only case to create
2652 * dummy batch channel so that sensor streaming is in
2653 * HFR mode */
2654 if (!m_bIsVideo && (streamList->operation_mode ==
2655 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2656 mDummyBatchStream = *newStream;
2657 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002658 int bufferCount = MAX_INFLIGHT_REQUESTS;
2659 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2660 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002661 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2662 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2663 bufferCount = m_bIs4KVideo ?
2664 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2665 }
2666
Thierry Strudel2896d122017-02-23 19:18:03 -08002667 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002668 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2669 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002670 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002671 this,
2672 newStream,
2673 (cam_stream_type_t)
2674 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2675 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2676 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002677 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002678 if (channel == NULL) {
2679 LOGE("allocation of channel failed");
2680 pthread_mutex_unlock(&mMutex);
2681 return -ENOMEM;
2682 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002683 /* disable UBWC for preview, though supported,
2684 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002685 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002686 (previewSize.width == (int32_t)videoWidth)&&
2687 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002688 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002689 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002690 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002691 /* When goog_zoom is linked to the preview or video stream,
2692 * disable ubwc to the linked stream */
2693 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2694 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2695 channel->setUBWCEnabled(false);
2696 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002697 newStream->max_buffers = channel->getNumBuffers();
2698 newStream->priv = channel;
2699 }
2700 break;
2701 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2702 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2703 mChannelHandle,
2704 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002705 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 this,
2707 newStream,
2708 (cam_stream_type_t)
2709 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2710 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2711 mMetadataChannel);
2712 if (channel == NULL) {
2713 LOGE("allocation of YUV channel failed");
2714 pthread_mutex_unlock(&mMutex);
2715 return -ENOMEM;
2716 }
2717 newStream->max_buffers = channel->getNumBuffers();
2718 newStream->priv = channel;
2719 break;
2720 }
2721 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2722 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002723 case HAL_PIXEL_FORMAT_RAW10: {
2724 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2725 (HAL_DATASPACE_DEPTH != newStream->data_space))
2726 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002727 mRawChannel = new QCamera3RawChannel(
2728 mCameraHandle->camera_handle, mChannelHandle,
2729 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002730 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002731 this, newStream,
2732 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002733 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002734 if (mRawChannel == NULL) {
2735 LOGE("allocation of raw channel failed");
2736 pthread_mutex_unlock(&mMutex);
2737 return -ENOMEM;
2738 }
2739 newStream->max_buffers = mRawChannel->getNumBuffers();
2740 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2741 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002742 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002743 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002744 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2745 mDepthChannel = new QCamera3DepthChannel(
2746 mCameraHandle->camera_handle, mChannelHandle,
2747 mCameraHandle->ops, NULL, NULL, &padding_info,
2748 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2749 mMetadataChannel);
2750 if (NULL == mDepthChannel) {
2751 LOGE("Allocation of depth channel failed");
2752 pthread_mutex_unlock(&mMutex);
2753 return NO_MEMORY;
2754 }
2755 newStream->priv = mDepthChannel;
2756 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2757 } else {
2758 // Max live snapshot inflight buffer is 1. This is to mitigate
2759 // frame drop issues for video snapshot. The more buffers being
2760 // allocated, the more frame drops there are.
2761 mPictureChannel = new QCamera3PicChannel(
2762 mCameraHandle->camera_handle, mChannelHandle,
2763 mCameraHandle->ops, captureResultCb,
2764 setBufferErrorStatus, &padding_info, this, newStream,
2765 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2766 m_bIs4KVideo, isZsl, mMetadataChannel,
2767 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2768 if (mPictureChannel == NULL) {
2769 LOGE("allocation of channel failed");
2770 pthread_mutex_unlock(&mMutex);
2771 return -ENOMEM;
2772 }
2773 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2774 newStream->max_buffers = mPictureChannel->getNumBuffers();
2775 mPictureChannel->overrideYuvSize(
2776 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2777 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002778 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002779 break;
2780
2781 default:
2782 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002783 pthread_mutex_unlock(&mMutex);
2784 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002785 }
2786 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2787 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2788 } else {
2789 LOGE("Error, Unknown stream type");
2790 pthread_mutex_unlock(&mMutex);
2791 return -EINVAL;
2792 }
2793
2794 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002795 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002796 // Here we only care whether it's EIS3 or not
2797 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2798 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2799 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2800 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002801 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002802 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002803 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002804 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2805 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2806 }
2807 }
2808
2809 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2810 it != mStreamInfo.end(); it++) {
2811 if ((*it)->stream == newStream) {
2812 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2813 break;
2814 }
2815 }
2816 } else {
2817 // Channel already exists for this stream
2818 // Do nothing for now
2819 }
2820 padding_info = gCamCapability[mCameraId]->padding_info;
2821
Emilian Peev7650c122017-01-19 08:24:33 -08002822 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002823 * since there is no real stream associated with it
2824 */
Emilian Peev7650c122017-01-19 08:24:33 -08002825 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002826 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2827 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002828 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002829 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002830 }
2831
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002832 // Let buffer dispatcher know the configured streams.
2833 mOutputBufferDispatcher.configureStreams(streamList);
2834
Thierry Strudel2896d122017-02-23 19:18:03 -08002835 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2836 onlyRaw = false;
2837 }
2838
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002839 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002840 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002841 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002842 cam_analysis_info_t analysisInfo;
2843 int32_t ret = NO_ERROR;
2844 ret = mCommon.getAnalysisInfo(
2845 FALSE,
2846 analysisFeatureMask,
2847 &analysisInfo);
2848 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002849 cam_color_filter_arrangement_t analysis_color_arrangement =
2850 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2851 CAM_FILTER_ARRANGEMENT_Y :
2852 gCamCapability[mCameraId]->color_arrangement);
2853 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2854 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002855 cam_dimension_t analysisDim;
2856 analysisDim = mCommon.getMatchingDimension(previewSize,
2857 analysisInfo.analysis_recommended_res);
2858
2859 mAnalysisChannel = new QCamera3SupportChannel(
2860 mCameraHandle->camera_handle,
2861 mChannelHandle,
2862 mCameraHandle->ops,
2863 &analysisInfo.analysis_padding_info,
2864 analysisFeatureMask,
2865 CAM_STREAM_TYPE_ANALYSIS,
2866 &analysisDim,
2867 (analysisInfo.analysis_format
2868 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2869 : CAM_FORMAT_YUV_420_NV21),
2870 analysisInfo.hw_analysis_supported,
2871 gCamCapability[mCameraId]->color_arrangement,
2872 this,
2873 0); // force buffer count to 0
2874 } else {
2875 LOGW("getAnalysisInfo failed, ret = %d", ret);
2876 }
2877 if (!mAnalysisChannel) {
2878 LOGW("Analysis channel cannot be created");
2879 }
2880 }
2881
Thierry Strudel3d639192016-09-09 11:52:26 -07002882 //RAW DUMP channel
2883 if (mEnableRawDump && isRawStreamRequested == false){
2884 cam_dimension_t rawDumpSize;
2885 rawDumpSize = getMaxRawSize(mCameraId);
2886 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2887 setPAAFSupport(rawDumpFeatureMask,
2888 CAM_STREAM_TYPE_RAW,
2889 gCamCapability[mCameraId]->color_arrangement);
2890 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2891 mChannelHandle,
2892 mCameraHandle->ops,
2893 rawDumpSize,
2894 &padding_info,
2895 this, rawDumpFeatureMask);
2896 if (!mRawDumpChannel) {
2897 LOGE("Raw Dump channel cannot be created");
2898 pthread_mutex_unlock(&mMutex);
2899 return -ENOMEM;
2900 }
2901 }
2902
Thierry Strudel3d639192016-09-09 11:52:26 -07002903 if (mAnalysisChannel) {
2904 cam_analysis_info_t analysisInfo;
2905 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2906 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2907 CAM_STREAM_TYPE_ANALYSIS;
2908 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2909 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002910 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002911 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2912 &analysisInfo);
2913 if (rc != NO_ERROR) {
2914 LOGE("getAnalysisInfo failed, ret = %d", rc);
2915 pthread_mutex_unlock(&mMutex);
2916 return rc;
2917 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002918 cam_color_filter_arrangement_t analysis_color_arrangement =
2919 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2920 CAM_FILTER_ARRANGEMENT_Y :
2921 gCamCapability[mCameraId]->color_arrangement);
2922 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2923 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2924 analysis_color_arrangement);
2925
Thierry Strudel3d639192016-09-09 11:52:26 -07002926 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002927 mCommon.getMatchingDimension(previewSize,
2928 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002929 mStreamConfigInfo.num_streams++;
2930 }
2931
Thierry Strudel2896d122017-02-23 19:18:03 -08002932 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002933 cam_analysis_info_t supportInfo;
2934 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2935 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2936 setPAAFSupport(callbackFeatureMask,
2937 CAM_STREAM_TYPE_CALLBACK,
2938 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002939 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002940 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002941 if (ret != NO_ERROR) {
2942 /* Ignore the error for Mono camera
2943 * because the PAAF bit mask is only set
2944 * for CAM_STREAM_TYPE_ANALYSIS stream type
2945 */
2946 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2947 LOGW("getAnalysisInfo failed, ret = %d", ret);
2948 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002949 }
2950 mSupportChannel = new QCamera3SupportChannel(
2951 mCameraHandle->camera_handle,
2952 mChannelHandle,
2953 mCameraHandle->ops,
2954 &gCamCapability[mCameraId]->padding_info,
2955 callbackFeatureMask,
2956 CAM_STREAM_TYPE_CALLBACK,
2957 &QCamera3SupportChannel::kDim,
2958 CAM_FORMAT_YUV_420_NV21,
2959 supportInfo.hw_analysis_supported,
2960 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002961 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002962 if (!mSupportChannel) {
2963 LOGE("dummy channel cannot be created");
2964 pthread_mutex_unlock(&mMutex);
2965 return -ENOMEM;
2966 }
2967 }
2968
2969 if (mSupportChannel) {
2970 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2971 QCamera3SupportChannel::kDim;
2972 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2973 CAM_STREAM_TYPE_CALLBACK;
2974 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2975 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2976 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2977 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2978 gCamCapability[mCameraId]->color_arrangement);
2979 mStreamConfigInfo.num_streams++;
2980 }
2981
2982 if (mRawDumpChannel) {
2983 cam_dimension_t rawSize;
2984 rawSize = getMaxRawSize(mCameraId);
2985 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2986 rawSize;
2987 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2988 CAM_STREAM_TYPE_RAW;
2989 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2990 CAM_QCOM_FEATURE_NONE;
2991 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2992 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2993 gCamCapability[mCameraId]->color_arrangement);
2994 mStreamConfigInfo.num_streams++;
2995 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002996
2997 if (mHdrPlusRawSrcChannel) {
2998 cam_dimension_t rawSize;
2999 rawSize = getMaxRawSize(mCameraId);
3000 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
3001 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
3002 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
3003 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3004 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3005 gCamCapability[mCameraId]->color_arrangement);
3006 mStreamConfigInfo.num_streams++;
3007 }
3008
Thierry Strudel3d639192016-09-09 11:52:26 -07003009 /* In HFR mode, if video stream is not added, create a dummy channel so that
3010 * ISP can create a batch mode even for preview only case. This channel is
3011 * never 'start'ed (no stream-on), it is only 'initialized' */
3012 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
3013 !m_bIsVideo) {
3014 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3015 setPAAFSupport(dummyFeatureMask,
3016 CAM_STREAM_TYPE_VIDEO,
3017 gCamCapability[mCameraId]->color_arrangement);
3018 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
3019 mChannelHandle,
3020 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003021 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07003022 this,
3023 &mDummyBatchStream,
3024 CAM_STREAM_TYPE_VIDEO,
3025 dummyFeatureMask,
3026 mMetadataChannel);
3027 if (NULL == mDummyBatchChannel) {
3028 LOGE("creation of mDummyBatchChannel failed."
3029 "Preview will use non-hfr sensor mode ");
3030 }
3031 }
3032 if (mDummyBatchChannel) {
3033 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3034 mDummyBatchStream.width;
3035 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3036 mDummyBatchStream.height;
3037 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3038 CAM_STREAM_TYPE_VIDEO;
3039 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3040 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3041 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3042 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3043 gCamCapability[mCameraId]->color_arrangement);
3044 mStreamConfigInfo.num_streams++;
3045 }
3046
3047 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3048 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003049 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003050 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003051
3052 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3053 for (pendingRequestIterator i = mPendingRequestsList.begin();
3054 i != mPendingRequestsList.end();) {
3055 i = erasePendingRequest(i);
3056 }
3057 mPendingFrameDropList.clear();
3058 // Initialize/Reset the pending buffers list
3059 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3060 req.mPendingBufferList.clear();
3061 }
3062 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003063 mExpectedInflightDuration = 0;
3064 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003065
Thierry Strudel3d639192016-09-09 11:52:26 -07003066 mCurJpegMeta.clear();
3067 //Get min frame duration for this streams configuration
3068 deriveMinFrameDuration();
3069
Chien-Yu Chenee335912017-02-09 17:53:20 -08003070 mFirstPreviewIntentSeen = false;
3071
Thierry Strudel3d639192016-09-09 11:52:26 -07003072 // Update state
3073 mState = CONFIGURED;
3074
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003075 mFirstMetadataCallback = true;
3076
Thierry Strudel3d639192016-09-09 11:52:26 -07003077 pthread_mutex_unlock(&mMutex);
3078
3079 return rc;
3080}
3081
3082/*===========================================================================
3083 * FUNCTION : validateCaptureRequest
3084 *
3085 * DESCRIPTION: validate a capture request from camera service
3086 *
3087 * PARAMETERS :
3088 * @request : request from framework to process
3089 *
3090 * RETURN :
3091 *
3092 *==========================================================================*/
3093int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003094 camera3_capture_request_t *request,
3095 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003096{
3097 ssize_t idx = 0;
3098 const camera3_stream_buffer_t *b;
3099 CameraMetadata meta;
3100
3101 /* Sanity check the request */
3102 if (request == NULL) {
3103 LOGE("NULL capture request");
3104 return BAD_VALUE;
3105 }
3106
3107 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3108 /*settings cannot be null for the first request*/
3109 return BAD_VALUE;
3110 }
3111
3112 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003113 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3114 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003115 LOGE("Request %d: No output buffers provided!",
3116 __FUNCTION__, frameNumber);
3117 return BAD_VALUE;
3118 }
3119 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3120 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3121 request->num_output_buffers, MAX_NUM_STREAMS);
3122 return BAD_VALUE;
3123 }
3124 if (request->input_buffer != NULL) {
3125 b = request->input_buffer;
3126 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3127 LOGE("Request %d: Buffer %ld: Status not OK!",
3128 frameNumber, (long)idx);
3129 return BAD_VALUE;
3130 }
3131 if (b->release_fence != -1) {
3132 LOGE("Request %d: Buffer %ld: Has a release fence!",
3133 frameNumber, (long)idx);
3134 return BAD_VALUE;
3135 }
3136 if (b->buffer == NULL) {
3137 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3138 frameNumber, (long)idx);
3139 return BAD_VALUE;
3140 }
3141 }
3142
3143 // Validate all buffers
3144 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003145 if (b == NULL) {
3146 return BAD_VALUE;
3147 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003148 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003149 QCamera3ProcessingChannel *channel =
3150 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3151 if (channel == NULL) {
3152 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3153 frameNumber, (long)idx);
3154 return BAD_VALUE;
3155 }
3156 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3157 LOGE("Request %d: Buffer %ld: Status not OK!",
3158 frameNumber, (long)idx);
3159 return BAD_VALUE;
3160 }
3161 if (b->release_fence != -1) {
3162 LOGE("Request %d: Buffer %ld: Has a release fence!",
3163 frameNumber, (long)idx);
3164 return BAD_VALUE;
3165 }
3166 if (b->buffer == NULL) {
3167 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3168 frameNumber, (long)idx);
3169 return BAD_VALUE;
3170 }
3171 if (*(b->buffer) == NULL) {
3172 LOGE("Request %d: Buffer %ld: NULL private handle!",
3173 frameNumber, (long)idx);
3174 return BAD_VALUE;
3175 }
3176 idx++;
3177 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003178 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003179 return NO_ERROR;
3180}
3181
3182/*===========================================================================
3183 * FUNCTION : deriveMinFrameDuration
3184 *
3185 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3186 * on currently configured streams.
3187 *
3188 * PARAMETERS : NONE
3189 *
3190 * RETURN : NONE
3191 *
3192 *==========================================================================*/
3193void QCamera3HardwareInterface::deriveMinFrameDuration()
3194{
3195 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003196 bool hasRaw = false;
3197
3198 mMinRawFrameDuration = 0;
3199 mMinJpegFrameDuration = 0;
3200 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003201
3202 maxJpegDim = 0;
3203 maxProcessedDim = 0;
3204 maxRawDim = 0;
3205
3206 // Figure out maximum jpeg, processed, and raw dimensions
3207 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3208 it != mStreamInfo.end(); it++) {
3209
3210 // Input stream doesn't have valid stream_type
3211 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3212 continue;
3213
3214 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3215 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3216 if (dimension > maxJpegDim)
3217 maxJpegDim = dimension;
3218 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3219 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3220 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003221 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003222 if (dimension > maxRawDim)
3223 maxRawDim = dimension;
3224 } else {
3225 if (dimension > maxProcessedDim)
3226 maxProcessedDim = dimension;
3227 }
3228 }
3229
3230 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3231 MAX_SIZES_CNT);
3232
3233 //Assume all jpeg dimensions are in processed dimensions.
3234 if (maxJpegDim > maxProcessedDim)
3235 maxProcessedDim = maxJpegDim;
3236 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003237 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003238 maxRawDim = INT32_MAX;
3239
3240 for (size_t i = 0; i < count; i++) {
3241 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3242 gCamCapability[mCameraId]->raw_dim[i].height;
3243 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3244 maxRawDim = dimension;
3245 }
3246 }
3247
3248 //Find minimum durations for processed, jpeg, and raw
3249 for (size_t i = 0; i < count; i++) {
3250 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3251 gCamCapability[mCameraId]->raw_dim[i].height) {
3252 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3253 break;
3254 }
3255 }
3256 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3257 for (size_t i = 0; i < count; i++) {
3258 if (maxProcessedDim ==
3259 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3260 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3261 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3262 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3263 break;
3264 }
3265 }
3266}
3267
3268/*===========================================================================
3269 * FUNCTION : getMinFrameDuration
3270 *
3271 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3272 * and current request configuration.
3273 *
3274 * PARAMETERS : @request: requset sent by the frameworks
3275 *
3276 * RETURN : min farme duration for a particular request
3277 *
3278 *==========================================================================*/
3279int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3280{
3281 bool hasJpegStream = false;
3282 bool hasRawStream = false;
3283 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3284 const camera3_stream_t *stream = request->output_buffers[i].stream;
3285 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3286 hasJpegStream = true;
3287 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3288 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3289 stream->format == HAL_PIXEL_FORMAT_RAW16)
3290 hasRawStream = true;
3291 }
3292
3293 if (!hasJpegStream)
3294 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3295 else
3296 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3297}
3298
3299/*===========================================================================
3300 * FUNCTION : handleBuffersDuringFlushLock
3301 *
3302 * DESCRIPTION: Account for buffers returned from back-end during flush
3303 * This function is executed while mMutex is held by the caller.
3304 *
3305 * PARAMETERS :
3306 * @buffer: image buffer for the callback
3307 *
3308 * RETURN :
3309 *==========================================================================*/
3310void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3311{
3312 bool buffer_found = false;
3313 for (List<PendingBuffersInRequest>::iterator req =
3314 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3315 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3316 for (List<PendingBufferInfo>::iterator i =
3317 req->mPendingBufferList.begin();
3318 i != req->mPendingBufferList.end(); i++) {
3319 if (i->buffer == buffer->buffer) {
3320 mPendingBuffersMap.numPendingBufsAtFlush--;
3321 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3322 buffer->buffer, req->frame_number,
3323 mPendingBuffersMap.numPendingBufsAtFlush);
3324 buffer_found = true;
3325 break;
3326 }
3327 }
3328 if (buffer_found) {
3329 break;
3330 }
3331 }
3332 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3333 //signal the flush()
3334 LOGD("All buffers returned to HAL. Continue flush");
3335 pthread_cond_signal(&mBuffersCond);
3336 }
3337}
3338
Thierry Strudel3d639192016-09-09 11:52:26 -07003339/*===========================================================================
3340 * FUNCTION : handleBatchMetadata
3341 *
3342 * DESCRIPTION: Handles metadata buffer callback in batch mode
3343 *
3344 * PARAMETERS : @metadata_buf: metadata buffer
3345 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3346 * the meta buf in this method
3347 *
3348 * RETURN :
3349 *
3350 *==========================================================================*/
3351void QCamera3HardwareInterface::handleBatchMetadata(
3352 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3353{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003354 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003355
3356 if (NULL == metadata_buf) {
3357 LOGE("metadata_buf is NULL");
3358 return;
3359 }
3360 /* In batch mode, the metdata will contain the frame number and timestamp of
3361 * the last frame in the batch. Eg: a batch containing buffers from request
3362 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3363 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3364 * multiple process_capture_results */
3365 metadata_buffer_t *metadata =
3366 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3367 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3368 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3369 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3370 uint32_t frame_number = 0, urgent_frame_number = 0;
3371 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3372 bool invalid_metadata = false;
3373 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3374 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003375 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003376
3377 int32_t *p_frame_number_valid =
3378 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3379 uint32_t *p_frame_number =
3380 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3381 int64_t *p_capture_time =
3382 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3383 int32_t *p_urgent_frame_number_valid =
3384 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3385 uint32_t *p_urgent_frame_number =
3386 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3387
3388 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3389 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3390 (NULL == p_urgent_frame_number)) {
3391 LOGE("Invalid metadata");
3392 invalid_metadata = true;
3393 } else {
3394 frame_number_valid = *p_frame_number_valid;
3395 last_frame_number = *p_frame_number;
3396 last_frame_capture_time = *p_capture_time;
3397 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3398 last_urgent_frame_number = *p_urgent_frame_number;
3399 }
3400
3401 /* In batchmode, when no video buffers are requested, set_parms are sent
3402 * for every capture_request. The difference between consecutive urgent
3403 * frame numbers and frame numbers should be used to interpolate the
3404 * corresponding frame numbers and time stamps */
3405 pthread_mutex_lock(&mMutex);
3406 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003407 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3408 if(idx < 0) {
3409 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3410 last_urgent_frame_number);
3411 mState = ERROR;
3412 pthread_mutex_unlock(&mMutex);
3413 return;
3414 }
3415 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003416 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3417 first_urgent_frame_number;
3418
3419 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3420 urgent_frame_number_valid,
3421 first_urgent_frame_number, last_urgent_frame_number);
3422 }
3423
3424 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003425 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3426 if(idx < 0) {
3427 LOGE("Invalid frame number received: %d. Irrecoverable error",
3428 last_frame_number);
3429 mState = ERROR;
3430 pthread_mutex_unlock(&mMutex);
3431 return;
3432 }
3433 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003434 frameNumDiff = last_frame_number + 1 -
3435 first_frame_number;
3436 mPendingBatchMap.removeItem(last_frame_number);
3437
3438 LOGD("frm: valid: %d frm_num: %d - %d",
3439 frame_number_valid,
3440 first_frame_number, last_frame_number);
3441
3442 }
3443 pthread_mutex_unlock(&mMutex);
3444
3445 if (urgent_frame_number_valid || frame_number_valid) {
3446 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3447 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3448 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3449 urgentFrameNumDiff, last_urgent_frame_number);
3450 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3451 LOGE("frameNumDiff: %d frameNum: %d",
3452 frameNumDiff, last_frame_number);
3453 }
3454
3455 for (size_t i = 0; i < loopCount; i++) {
3456 /* handleMetadataWithLock is called even for invalid_metadata for
3457 * pipeline depth calculation */
3458 if (!invalid_metadata) {
3459 /* Infer frame number. Batch metadata contains frame number of the
3460 * last frame */
3461 if (urgent_frame_number_valid) {
3462 if (i < urgentFrameNumDiff) {
3463 urgent_frame_number =
3464 first_urgent_frame_number + i;
3465 LOGD("inferred urgent frame_number: %d",
3466 urgent_frame_number);
3467 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3468 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3469 } else {
3470 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3471 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3472 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3473 }
3474 }
3475
3476 /* Infer frame number. Batch metadata contains frame number of the
3477 * last frame */
3478 if (frame_number_valid) {
3479 if (i < frameNumDiff) {
3480 frame_number = first_frame_number + i;
3481 LOGD("inferred frame_number: %d", frame_number);
3482 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3483 CAM_INTF_META_FRAME_NUMBER, frame_number);
3484 } else {
3485 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3486 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3487 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3488 }
3489 }
3490
3491 if (last_frame_capture_time) {
3492 //Infer timestamp
3493 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003494 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003496 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003497 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3498 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3499 LOGD("batch capture_time: %lld, capture_time: %lld",
3500 last_frame_capture_time, capture_time);
3501 }
3502 }
3503 pthread_mutex_lock(&mMutex);
3504 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003505 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003506 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3507 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003508 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003509 pthread_mutex_unlock(&mMutex);
3510 }
3511
3512 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003513 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003514 mMetadataChannel->bufDone(metadata_buf);
3515 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003516 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003517 }
3518}
3519
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003520void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3521 camera3_error_msg_code_t errorCode)
3522{
3523 camera3_notify_msg_t notify_msg;
3524 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3525 notify_msg.type = CAMERA3_MSG_ERROR;
3526 notify_msg.message.error.error_code = errorCode;
3527 notify_msg.message.error.error_stream = NULL;
3528 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003529 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003530
3531 return;
3532}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003533
3534/*===========================================================================
3535 * FUNCTION : sendPartialMetadataWithLock
3536 *
3537 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3538 *
3539 * PARAMETERS : @metadata: metadata buffer
3540 * @requestIter: The iterator for the pending capture request for
3541 * which the partial result is being sen
3542 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3543 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003544 * @isJumpstartMetadata: Whether this is a partial metadata for
3545 * jumpstart, i.e. even though it doesn't map to a valid partial
3546 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003547 *
3548 * RETURN :
3549 *
3550 *==========================================================================*/
3551
3552void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3553 metadata_buffer_t *metadata,
3554 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003555 bool lastUrgentMetadataInBatch,
3556 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003557{
3558 camera3_capture_result_t result;
3559 memset(&result, 0, sizeof(camera3_capture_result_t));
3560
3561 requestIter->partial_result_cnt++;
3562
3563 // Extract 3A metadata
3564 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003565 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3566 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003567 // Populate metadata result
3568 result.frame_number = requestIter->frame_number;
3569 result.num_output_buffers = 0;
3570 result.output_buffers = NULL;
3571 result.partial_result = requestIter->partial_result_cnt;
3572
3573 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003574 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003575 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3576 // Notify HDR+ client about the partial metadata.
3577 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3578 result.partial_result == PARTIAL_RESULT_COUNT);
3579 }
3580 }
3581
3582 orchestrateResult(&result);
3583 LOGD("urgent frame_number = %u", result.frame_number);
3584 free_camera_metadata((camera_metadata_t *)result.result);
3585}
3586
Thierry Strudel3d639192016-09-09 11:52:26 -07003587/*===========================================================================
3588 * FUNCTION : handleMetadataWithLock
3589 *
3590 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3591 *
3592 * PARAMETERS : @metadata_buf: metadata buffer
3593 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3594 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003595 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3596 * last urgent metadata in a batch. Always true for non-batch mode
3597 * @lastMetadataInBatch: Boolean to indicate whether this is the
3598 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003599 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3600 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003601 *
3602 * RETURN :
3603 *
3604 *==========================================================================*/
3605void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003606 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003607 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3608 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003609{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003610 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003611 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3612 //during flush do not send metadata from this thread
3613 LOGD("not sending metadata during flush or when mState is error");
3614 if (free_and_bufdone_meta_buf) {
3615 mMetadataChannel->bufDone(metadata_buf);
3616 free(metadata_buf);
3617 }
3618 return;
3619 }
3620
3621 //not in flush
3622 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3623 int32_t frame_number_valid, urgent_frame_number_valid;
3624 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003625 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003626 nsecs_t currentSysTime;
3627
3628 int32_t *p_frame_number_valid =
3629 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3630 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3631 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003632 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003633 int32_t *p_urgent_frame_number_valid =
3634 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3635 uint32_t *p_urgent_frame_number =
3636 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3637 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3638 metadata) {
3639 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3640 *p_frame_number_valid, *p_frame_number);
3641 }
3642
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003643 camera_metadata_t *resultMetadata = nullptr;
3644
Thierry Strudel3d639192016-09-09 11:52:26 -07003645 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3646 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3647 LOGE("Invalid metadata");
3648 if (free_and_bufdone_meta_buf) {
3649 mMetadataChannel->bufDone(metadata_buf);
3650 free(metadata_buf);
3651 }
3652 goto done_metadata;
3653 }
3654 frame_number_valid = *p_frame_number_valid;
3655 frame_number = *p_frame_number;
3656 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003657 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003658 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3659 urgent_frame_number = *p_urgent_frame_number;
3660 currentSysTime = systemTime(CLOCK_MONOTONIC);
3661
Jason Lee603176d2017-05-31 11:43:27 -07003662 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3663 const int tries = 3;
3664 nsecs_t bestGap, measured;
3665 for (int i = 0; i < tries; ++i) {
3666 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3667 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3668 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3669 const nsecs_t gap = tmono2 - tmono;
3670 if (i == 0 || gap < bestGap) {
3671 bestGap = gap;
3672 measured = tbase - ((tmono + tmono2) >> 1);
3673 }
3674 }
3675 capture_time -= measured;
3676 }
3677
Thierry Strudel3d639192016-09-09 11:52:26 -07003678 // Detect if buffers from any requests are overdue
3679 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003680 int64_t timeout;
3681 {
3682 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3683 // If there is a pending HDR+ request, the following requests may be blocked until the
3684 // HDR+ request is done. So allow a longer timeout.
3685 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3686 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003687 if (timeout < mExpectedInflightDuration) {
3688 timeout = mExpectedInflightDuration;
3689 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003690 }
3691
3692 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003693 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003694 assert(missed.stream->priv);
3695 if (missed.stream->priv) {
3696 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3697 assert(ch->mStreams[0]);
3698 if (ch->mStreams[0]) {
3699 LOGE("Cancel missing frame = %d, buffer = %p,"
3700 "stream type = %d, stream format = %d",
3701 req.frame_number, missed.buffer,
3702 ch->mStreams[0]->getMyType(), missed.stream->format);
3703 ch->timeoutFrame(req.frame_number);
3704 }
3705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003706 }
3707 }
3708 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003709 //For the very first metadata callback, regardless whether it contains valid
3710 //frame number, send the partial metadata for the jumpstarting requests.
3711 //Note that this has to be done even if the metadata doesn't contain valid
3712 //urgent frame number, because in the case only 1 request is ever submitted
3713 //to HAL, there won't be subsequent valid urgent frame number.
3714 if (mFirstMetadataCallback) {
3715 for (pendingRequestIterator i =
3716 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3717 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003718 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3719 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003720 }
3721 }
3722 mFirstMetadataCallback = false;
3723 }
3724
Thierry Strudel3d639192016-09-09 11:52:26 -07003725 //Partial result on process_capture_result for timestamp
3726 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003727 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003728
3729 //Recieved an urgent Frame Number, handle it
3730 //using partial results
3731 for (pendingRequestIterator i =
3732 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3733 LOGD("Iterator Frame = %d urgent frame = %d",
3734 i->frame_number, urgent_frame_number);
3735
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003736 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003737 (i->partial_result_cnt == 0)) {
3738 LOGE("Error: HAL missed urgent metadata for frame number %d",
3739 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003740 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003741 }
3742
3743 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003744 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003745 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3746 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003747 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3748 // Instant AEC settled for this frame.
3749 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3750 mInstantAECSettledFrameNumber = urgent_frame_number;
3751 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003752 break;
3753 }
3754 }
3755 }
3756
3757 if (!frame_number_valid) {
3758 LOGD("Not a valid normal frame number, used as SOF only");
3759 if (free_and_bufdone_meta_buf) {
3760 mMetadataChannel->bufDone(metadata_buf);
3761 free(metadata_buf);
3762 }
3763 goto done_metadata;
3764 }
3765 LOGH("valid frame_number = %u, capture_time = %lld",
3766 frame_number, capture_time);
3767
Emilian Peev4e0fe952017-06-30 12:40:09 -07003768 handleDepthDataLocked(metadata->depth_data, frame_number,
3769 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003770
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003771 // Check whether any stream buffer corresponding to this is dropped or not
3772 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3773 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3774 for (auto & pendingRequest : mPendingRequestsList) {
3775 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3776 mInstantAECSettledFrameNumber)) {
3777 camera3_notify_msg_t notify_msg = {};
3778 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003779 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003780 QCamera3ProcessingChannel *channel =
3781 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003782 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003783 if (p_cam_frame_drop) {
3784 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003785 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003786 // Got the stream ID for drop frame.
3787 dropFrame = true;
3788 break;
3789 }
3790 }
3791 } else {
3792 // This is instant AEC case.
3793 // For instant AEC drop the stream untill AEC is settled.
3794 dropFrame = true;
3795 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003796
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003797 if (dropFrame) {
3798 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3799 if (p_cam_frame_drop) {
3800 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003801 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003802 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003803 } else {
3804 // For instant AEC, inform frame drop and frame number
3805 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3806 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003807 pendingRequest.frame_number, streamID,
3808 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003809 }
3810 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003811 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003812 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003813 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003814 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003815 if (p_cam_frame_drop) {
3816 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003817 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003818 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003819 } else {
3820 // For instant AEC, inform frame drop and frame number
3821 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3822 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003823 pendingRequest.frame_number, streamID,
3824 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003825 }
3826 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003827 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003828 PendingFrameDrop.stream_ID = streamID;
3829 // Add the Frame drop info to mPendingFrameDropList
3830 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003831 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003832 }
3833 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003834 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003835
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003836 for (auto & pendingRequest : mPendingRequestsList) {
3837 // Find the pending request with the frame number.
3838 if (pendingRequest.frame_number == frame_number) {
3839 // Update the sensor timestamp.
3840 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003841
Thierry Strudel3d639192016-09-09 11:52:26 -07003842
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003843 /* Set the timestamp in display metadata so that clients aware of
3844 private_handle such as VT can use this un-modified timestamps.
3845 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003846 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003847
Thierry Strudel3d639192016-09-09 11:52:26 -07003848 // Find channel requiring metadata, meaning internal offline postprocess
3849 // is needed.
3850 //TODO: for now, we don't support two streams requiring metadata at the same time.
3851 // (because we are not making copies, and metadata buffer is not reference counted.
3852 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003853 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3854 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003855 if (iter->need_metadata) {
3856 internalPproc = true;
3857 QCamera3ProcessingChannel *channel =
3858 (QCamera3ProcessingChannel *)iter->stream->priv;
3859 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003860 if(p_is_metabuf_queued != NULL) {
3861 *p_is_metabuf_queued = true;
3862 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003863 break;
3864 }
3865 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003866 for (auto itr = pendingRequest.internalRequestList.begin();
3867 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003868 if (itr->need_metadata) {
3869 internalPproc = true;
3870 QCamera3ProcessingChannel *channel =
3871 (QCamera3ProcessingChannel *)itr->stream->priv;
3872 channel->queueReprocMetadata(metadata_buf);
3873 break;
3874 }
3875 }
3876
Thierry Strudel54dc9782017-02-15 12:12:10 -08003877 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003878
3879 bool *enableZsl = nullptr;
3880 if (gExposeEnableZslKey) {
3881 enableZsl = &pendingRequest.enableZsl;
3882 }
3883
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003884 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003885 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003886 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003887
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003888 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003889
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003890 if (pendingRequest.blob_request) {
3891 //Dump tuning metadata if enabled and available
3892 char prop[PROPERTY_VALUE_MAX];
3893 memset(prop, 0, sizeof(prop));
3894 property_get("persist.camera.dumpmetadata", prop, "0");
3895 int32_t enabled = atoi(prop);
3896 if (enabled && metadata->is_tuning_params_valid) {
3897 dumpMetadataToFile(metadata->tuning_params,
3898 mMetaFrameCount,
3899 enabled,
3900 "Snapshot",
3901 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003902 }
3903 }
3904
3905 if (!internalPproc) {
3906 LOGD("couldn't find need_metadata for this metadata");
3907 // Return metadata buffer
3908 if (free_and_bufdone_meta_buf) {
3909 mMetadataChannel->bufDone(metadata_buf);
3910 free(metadata_buf);
3911 }
3912 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003913
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003914 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003915 }
3916 }
3917
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003918 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3919
3920 // Try to send out capture result metadata.
3921 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003922 return;
3923
Thierry Strudel3d639192016-09-09 11:52:26 -07003924done_metadata:
3925 for (pendingRequestIterator i = mPendingRequestsList.begin();
3926 i != mPendingRequestsList.end() ;i++) {
3927 i->pipeline_depth++;
3928 }
3929 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3930 unblockRequestIfNecessary();
3931}
3932
3933/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003934 * FUNCTION : handleDepthDataWithLock
3935 *
3936 * DESCRIPTION: Handles incoming depth data
3937 *
3938 * PARAMETERS : @depthData : Depth data
3939 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003940 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003941 *
3942 * RETURN :
3943 *
3944 *==========================================================================*/
3945void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003946 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003947 uint32_t currentFrameNumber;
3948 buffer_handle_t *depthBuffer;
3949
3950 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003951 return;
3952 }
3953
3954 camera3_stream_buffer_t resultBuffer =
3955 {.acquire_fence = -1,
3956 .release_fence = -1,
3957 .status = CAMERA3_BUFFER_STATUS_OK,
3958 .buffer = nullptr,
3959 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003960 do {
3961 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3962 if (nullptr == depthBuffer) {
3963 break;
3964 }
3965
Emilian Peev7650c122017-01-19 08:24:33 -08003966 resultBuffer.buffer = depthBuffer;
3967 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003968 if (valid) {
3969 int32_t rc = mDepthChannel->populateDepthData(depthData,
3970 frameNumber);
3971 if (NO_ERROR != rc) {
3972 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3973 } else {
3974 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3975 }
Emilian Peev7650c122017-01-19 08:24:33 -08003976 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003977 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003978 }
3979 } else if (currentFrameNumber > frameNumber) {
3980 break;
3981 } else {
3982 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3983 {{currentFrameNumber, mDepthChannel->getStream(),
3984 CAMERA3_MSG_ERROR_BUFFER}}};
3985 orchestrateNotify(&notify_msg);
3986
3987 LOGE("Depth buffer for frame number: %d is missing "
3988 "returning back!", currentFrameNumber);
3989 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3990 }
3991 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003992 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003993 } while (currentFrameNumber < frameNumber);
3994}
3995
3996/*===========================================================================
3997 * FUNCTION : notifyErrorFoPendingDepthData
3998 *
3999 * DESCRIPTION: Returns error for any pending depth buffers
4000 *
4001 * PARAMETERS : depthCh - depth channel that needs to get flushed
4002 *
4003 * RETURN :
4004 *
4005 *==========================================================================*/
4006void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
4007 QCamera3DepthChannel *depthCh) {
4008 uint32_t currentFrameNumber;
4009 buffer_handle_t *depthBuffer;
4010
4011 if (nullptr == depthCh) {
4012 return;
4013 }
4014
4015 camera3_notify_msg_t notify_msg =
4016 {.type = CAMERA3_MSG_ERROR,
4017 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4018 camera3_stream_buffer_t resultBuffer =
4019 {.acquire_fence = -1,
4020 .release_fence = -1,
4021 .buffer = nullptr,
4022 .stream = depthCh->getStream(),
4023 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004024
4025 while (nullptr !=
4026 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4027 depthCh->unmapBuffer(currentFrameNumber);
4028
4029 notify_msg.message.error.frame_number = currentFrameNumber;
4030 orchestrateNotify(&notify_msg);
4031
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004032 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004033 };
4034}
4035
4036/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004037 * FUNCTION : hdrPlusPerfLock
4038 *
4039 * DESCRIPTION: perf lock for HDR+ using custom intent
4040 *
4041 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4042 *
4043 * RETURN : None
4044 *
4045 *==========================================================================*/
4046void QCamera3HardwareInterface::hdrPlusPerfLock(
4047 mm_camera_super_buf_t *metadata_buf)
4048{
4049 if (NULL == metadata_buf) {
4050 LOGE("metadata_buf is NULL");
4051 return;
4052 }
4053 metadata_buffer_t *metadata =
4054 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4055 int32_t *p_frame_number_valid =
4056 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4057 uint32_t *p_frame_number =
4058 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4059
4060 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4061 LOGE("%s: Invalid metadata", __func__);
4062 return;
4063 }
4064
Wei Wang01385482017-08-03 10:49:34 -07004065 //acquire perf lock for 2 secs after the last HDR frame is captured
4066 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004067 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4068 if ((p_frame_number != NULL) &&
4069 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004070 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004071 }
4072 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004073}
4074
4075/*===========================================================================
4076 * FUNCTION : handleInputBufferWithLock
4077 *
4078 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4079 *
4080 * PARAMETERS : @frame_number: frame number of the input buffer
4081 *
4082 * RETURN :
4083 *
4084 *==========================================================================*/
4085void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4086{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004087 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004088 pendingRequestIterator i = mPendingRequestsList.begin();
4089 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4090 i++;
4091 }
4092 if (i != mPendingRequestsList.end() && i->input_buffer) {
4093 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004094 CameraMetadata settings;
4095 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4096 if(i->settings) {
4097 settings = i->settings;
4098 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4099 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004100 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004101 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004102 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004103 } else {
4104 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004105 }
4106
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004107 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4108 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4109 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004110
4111 camera3_capture_result result;
4112 memset(&result, 0, sizeof(camera3_capture_result));
4113 result.frame_number = frame_number;
4114 result.result = i->settings;
4115 result.input_buffer = i->input_buffer;
4116 result.partial_result = PARTIAL_RESULT_COUNT;
4117
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004118 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004119 LOGD("Input request metadata and input buffer frame_number = %u",
4120 i->frame_number);
4121 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004122
4123 // Dispatch result metadata that may be just unblocked by this reprocess result.
4124 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004125 } else {
4126 LOGE("Could not find input request for frame number %d", frame_number);
4127 }
4128}
4129
4130/*===========================================================================
4131 * FUNCTION : handleBufferWithLock
4132 *
4133 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4134 *
4135 * PARAMETERS : @buffer: image buffer for the callback
4136 * @frame_number: frame number of the image buffer
4137 *
4138 * RETURN :
4139 *
4140 *==========================================================================*/
4141void QCamera3HardwareInterface::handleBufferWithLock(
4142 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4143{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004144 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004145
4146 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4147 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4148 }
4149
Thierry Strudel3d639192016-09-09 11:52:26 -07004150 /* Nothing to be done during error state */
4151 if ((ERROR == mState) || (DEINIT == mState)) {
4152 return;
4153 }
4154 if (mFlushPerf) {
4155 handleBuffersDuringFlushLock(buffer);
4156 return;
4157 }
4158 //not in flush
4159 // If the frame number doesn't exist in the pending request list,
4160 // directly send the buffer to the frameworks, and update pending buffers map
4161 // Otherwise, book-keep the buffer.
4162 pendingRequestIterator i = mPendingRequestsList.begin();
4163 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4164 i++;
4165 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004166
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004167 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004168 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004169 // For a reprocessing request, try to send out result metadata.
4170 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004171 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004172 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004173
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004174 // Check if this frame was dropped.
4175 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4176 m != mPendingFrameDropList.end(); m++) {
4177 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4178 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4179 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4180 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4181 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4182 frame_number, streamID);
4183 m = mPendingFrameDropList.erase(m);
4184 break;
4185 }
4186 }
4187
Binhao Lin09245482017-08-31 18:25:29 -07004188 // WAR for encoder avtimer timestamp issue
4189 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4190 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
4191 m_bAVTimerEnabled) {
4192 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
4193 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
4194 if (req->frame_number != frame_number)
4195 continue;
4196 if(req->av_timestamp == 0) {
4197 buffer->status |= CAMERA3_BUFFER_STATUS_ERROR;
4198 }
4199 else {
4200 struct private_handle_t *priv_handle =
4201 (struct private_handle_t *) (*(buffer->buffer));
4202 setMetaData(priv_handle, SET_VT_TIMESTAMP, &(req->av_timestamp));
4203 }
4204 }
4205 }
4206
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004207 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4208 LOGH("result frame_number = %d, buffer = %p",
4209 frame_number, buffer->buffer);
4210
4211 mPendingBuffersMap.removeBuf(buffer->buffer);
4212 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4213
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004214 if (mPreviewStarted == false) {
4215 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4216 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004217 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4218
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004219 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4220 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4221 mPreviewStarted = true;
4222
4223 // Set power hint for preview
4224 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4225 }
4226 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004227}
4228
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004229void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004230 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004231{
4232 // Find the pending request for this result metadata.
4233 auto requestIter = mPendingRequestsList.begin();
4234 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4235 requestIter++;
4236 }
4237
4238 if (requestIter == mPendingRequestsList.end()) {
4239 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4240 return;
4241 }
4242
4243 // Update the result metadata
4244 requestIter->resultMetadata = resultMetadata;
4245
4246 // Check what type of request this is.
4247 bool liveRequest = false;
4248 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004249 // HDR+ request doesn't have partial results.
4250 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004251 } else if (requestIter->input_buffer != nullptr) {
4252 // Reprocessing request result is the same as settings.
4253 requestIter->resultMetadata = requestIter->settings;
4254 // Reprocessing request doesn't have partial results.
4255 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4256 } else {
4257 liveRequest = true;
Chien-Yu Chen0a921f92017-08-27 17:25:33 -07004258 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004259 mPendingLiveRequest--;
4260
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004261 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004262 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004263 // For a live request, send the metadata to HDR+ client.
4264 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4265 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4266 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4267 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004268 }
4269 }
4270
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004271 // Remove len shading map if it's not requested.
4272 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4273 CameraMetadata metadata;
4274 metadata.acquire(resultMetadata);
4275 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4276 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4277 &requestIter->requestedLensShadingMapMode, 1);
4278
4279 requestIter->resultMetadata = metadata.release();
4280 }
4281
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004282 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4283}
4284
4285void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4286 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004287 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4288 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004289 bool readyToSend = true;
4290
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004291 // Iterate through the pending requests to send out result metadata that are ready. Also if
4292 // this result metadata belongs to a live request, notify errors for previous live requests
4293 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004294 auto iter = mPendingRequestsList.begin();
4295 while (iter != mPendingRequestsList.end()) {
4296 // Check if current pending request is ready. If it's not ready, the following pending
4297 // requests are also not ready.
4298 if (readyToSend && iter->resultMetadata == nullptr) {
4299 readyToSend = false;
4300 }
4301
4302 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4303
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004304 camera3_capture_result_t result = {};
4305 result.frame_number = iter->frame_number;
4306 result.result = iter->resultMetadata;
4307 result.partial_result = iter->partial_result_cnt;
4308
4309 // If this pending buffer has result metadata, we may be able to send out shutter callback
4310 // and result metadata.
4311 if (iter->resultMetadata != nullptr) {
4312 if (!readyToSend) {
4313 // If any of the previous pending request is not ready, this pending request is
4314 // also not ready to send in order to keep shutter callbacks and result metadata
4315 // in order.
4316 iter++;
4317 continue;
4318 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004319 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004320 // If the result metadata belongs to a live request, notify errors for previous pending
4321 // live requests.
4322 mPendingLiveRequest--;
4323
4324 CameraMetadata dummyMetadata;
4325 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4326 result.result = dummyMetadata.release();
4327
4328 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004329
4330 // partial_result should be PARTIAL_RESULT_CNT in case of
4331 // ERROR_RESULT.
4332 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4333 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004334 } else {
4335 iter++;
4336 continue;
4337 }
4338
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004339 result.output_buffers = nullptr;
4340 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004341 orchestrateResult(&result);
4342
4343 // For reprocessing, result metadata is the same as settings so do not free it here to
4344 // avoid double free.
4345 if (result.result != iter->settings) {
4346 free_camera_metadata((camera_metadata_t *)result.result);
4347 }
4348 iter->resultMetadata = nullptr;
4349 iter = erasePendingRequest(iter);
4350 }
4351
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004352 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004353 for (auto &iter : mPendingRequestsList) {
4354 // Increment pipeline depth for the following pending requests.
4355 if (iter.frame_number > frameNumber) {
4356 iter.pipeline_depth++;
4357 }
4358 }
4359 }
4360
4361 unblockRequestIfNecessary();
4362}
4363
Thierry Strudel3d639192016-09-09 11:52:26 -07004364/*===========================================================================
4365 * FUNCTION : unblockRequestIfNecessary
4366 *
4367 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4368 * that mMutex is held when this function is called.
4369 *
4370 * PARAMETERS :
4371 *
4372 * RETURN :
4373 *
4374 *==========================================================================*/
4375void QCamera3HardwareInterface::unblockRequestIfNecessary()
4376{
4377 // Unblock process_capture_request
4378 pthread_cond_signal(&mRequestCond);
4379}
4380
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004381/*===========================================================================
4382 * FUNCTION : isHdrSnapshotRequest
4383 *
4384 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4385 *
4386 * PARAMETERS : camera3 request structure
4387 *
4388 * RETURN : boolean decision variable
4389 *
4390 *==========================================================================*/
4391bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4392{
4393 if (request == NULL) {
4394 LOGE("Invalid request handle");
4395 assert(0);
4396 return false;
4397 }
4398
4399 if (!mForceHdrSnapshot) {
4400 CameraMetadata frame_settings;
4401 frame_settings = request->settings;
4402
4403 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4404 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4405 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4406 return false;
4407 }
4408 } else {
4409 return false;
4410 }
4411
4412 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4413 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4414 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4415 return false;
4416 }
4417 } else {
4418 return false;
4419 }
4420 }
4421
4422 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4423 if (request->output_buffers[i].stream->format
4424 == HAL_PIXEL_FORMAT_BLOB) {
4425 return true;
4426 }
4427 }
4428
4429 return false;
4430}
4431/*===========================================================================
4432 * FUNCTION : orchestrateRequest
4433 *
4434 * DESCRIPTION: Orchestrates a capture request from camera service
4435 *
4436 * PARAMETERS :
4437 * @request : request from framework to process
4438 *
4439 * RETURN : Error status codes
4440 *
4441 *==========================================================================*/
4442int32_t QCamera3HardwareInterface::orchestrateRequest(
4443 camera3_capture_request_t *request)
4444{
4445
4446 uint32_t originalFrameNumber = request->frame_number;
4447 uint32_t originalOutputCount = request->num_output_buffers;
4448 const camera_metadata_t *original_settings = request->settings;
4449 List<InternalRequest> internallyRequestedStreams;
4450 List<InternalRequest> emptyInternalList;
4451
4452 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4453 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4454 uint32_t internalFrameNumber;
4455 CameraMetadata modified_meta;
4456
4457
4458 /* Add Blob channel to list of internally requested streams */
4459 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4460 if (request->output_buffers[i].stream->format
4461 == HAL_PIXEL_FORMAT_BLOB) {
4462 InternalRequest streamRequested;
4463 streamRequested.meteringOnly = 1;
4464 streamRequested.need_metadata = 0;
4465 streamRequested.stream = request->output_buffers[i].stream;
4466 internallyRequestedStreams.push_back(streamRequested);
4467 }
4468 }
4469 request->num_output_buffers = 0;
4470 auto itr = internallyRequestedStreams.begin();
4471
4472 /* Modify setting to set compensation */
4473 modified_meta = request->settings;
4474 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4475 uint8_t aeLock = 1;
4476 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4477 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4478 camera_metadata_t *modified_settings = modified_meta.release();
4479 request->settings = modified_settings;
4480
4481 /* Capture Settling & -2x frame */
4482 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4483 request->frame_number = internalFrameNumber;
4484 processCaptureRequest(request, internallyRequestedStreams);
4485
4486 request->num_output_buffers = originalOutputCount;
4487 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4488 request->frame_number = internalFrameNumber;
4489 processCaptureRequest(request, emptyInternalList);
4490 request->num_output_buffers = 0;
4491
4492 modified_meta = modified_settings;
4493 expCompensation = 0;
4494 aeLock = 1;
4495 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4496 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4497 modified_settings = modified_meta.release();
4498 request->settings = modified_settings;
4499
4500 /* Capture Settling & 0X frame */
4501
4502 itr = internallyRequestedStreams.begin();
4503 if (itr == internallyRequestedStreams.end()) {
4504 LOGE("Error Internally Requested Stream list is empty");
4505 assert(0);
4506 } else {
4507 itr->need_metadata = 0;
4508 itr->meteringOnly = 1;
4509 }
4510
4511 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4512 request->frame_number = internalFrameNumber;
4513 processCaptureRequest(request, internallyRequestedStreams);
4514
4515 itr = internallyRequestedStreams.begin();
4516 if (itr == internallyRequestedStreams.end()) {
4517 ALOGE("Error Internally Requested Stream list is empty");
4518 assert(0);
4519 } else {
4520 itr->need_metadata = 1;
4521 itr->meteringOnly = 0;
4522 }
4523
4524 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4525 request->frame_number = internalFrameNumber;
4526 processCaptureRequest(request, internallyRequestedStreams);
4527
4528 /* Capture 2X frame*/
4529 modified_meta = modified_settings;
4530 expCompensation = GB_HDR_2X_STEP_EV;
4531 aeLock = 1;
4532 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4533 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4534 modified_settings = modified_meta.release();
4535 request->settings = modified_settings;
4536
4537 itr = internallyRequestedStreams.begin();
4538 if (itr == internallyRequestedStreams.end()) {
4539 ALOGE("Error Internally Requested Stream list is empty");
4540 assert(0);
4541 } else {
4542 itr->need_metadata = 0;
4543 itr->meteringOnly = 1;
4544 }
4545 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4546 request->frame_number = internalFrameNumber;
4547 processCaptureRequest(request, internallyRequestedStreams);
4548
4549 itr = internallyRequestedStreams.begin();
4550 if (itr == internallyRequestedStreams.end()) {
4551 ALOGE("Error Internally Requested Stream list is empty");
4552 assert(0);
4553 } else {
4554 itr->need_metadata = 1;
4555 itr->meteringOnly = 0;
4556 }
4557
4558 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4559 request->frame_number = internalFrameNumber;
4560 processCaptureRequest(request, internallyRequestedStreams);
4561
4562
4563 /* Capture 2X on original streaming config*/
4564 internallyRequestedStreams.clear();
4565
4566 /* Restore original settings pointer */
4567 request->settings = original_settings;
4568 } else {
4569 uint32_t internalFrameNumber;
4570 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4571 request->frame_number = internalFrameNumber;
4572 return processCaptureRequest(request, internallyRequestedStreams);
4573 }
4574
4575 return NO_ERROR;
4576}
4577
4578/*===========================================================================
4579 * FUNCTION : orchestrateResult
4580 *
4581 * DESCRIPTION: Orchestrates a capture result to camera service
4582 *
4583 * PARAMETERS :
4584 * @request : request from framework to process
4585 *
4586 * RETURN :
4587 *
4588 *==========================================================================*/
4589void QCamera3HardwareInterface::orchestrateResult(
4590 camera3_capture_result_t *result)
4591{
4592 uint32_t frameworkFrameNumber;
4593 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4594 frameworkFrameNumber);
4595 if (rc != NO_ERROR) {
4596 LOGE("Cannot find translated frameworkFrameNumber");
4597 assert(0);
4598 } else {
4599 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004600 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004601 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004602 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004603 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4604 camera_metadata_entry_t entry;
4605 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4606 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004607 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004608 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4609 if (ret != OK)
4610 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004611 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004612 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004613 result->frame_number = frameworkFrameNumber;
4614 mCallbackOps->process_capture_result(mCallbackOps, result);
4615 }
4616 }
4617}
4618
4619/*===========================================================================
4620 * FUNCTION : orchestrateNotify
4621 *
4622 * DESCRIPTION: Orchestrates a notify to camera service
4623 *
4624 * PARAMETERS :
4625 * @request : request from framework to process
4626 *
4627 * RETURN :
4628 *
4629 *==========================================================================*/
4630void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4631{
4632 uint32_t frameworkFrameNumber;
4633 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004634 int32_t rc = NO_ERROR;
4635
4636 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004637 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004638
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004639 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004640 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4641 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4642 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004643 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004644 LOGE("Cannot find translated frameworkFrameNumber");
4645 assert(0);
4646 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004647 }
4648 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004649
4650 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4651 LOGD("Internal Request drop the notifyCb");
4652 } else {
4653 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4654 mCallbackOps->notify(mCallbackOps, notify_msg);
4655 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004656}
4657
4658/*===========================================================================
4659 * FUNCTION : FrameNumberRegistry
4660 *
4661 * DESCRIPTION: Constructor
4662 *
4663 * PARAMETERS :
4664 *
4665 * RETURN :
4666 *
4667 *==========================================================================*/
4668FrameNumberRegistry::FrameNumberRegistry()
4669{
4670 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4671}
4672
4673/*===========================================================================
4674 * FUNCTION : ~FrameNumberRegistry
4675 *
4676 * DESCRIPTION: Destructor
4677 *
4678 * PARAMETERS :
4679 *
4680 * RETURN :
4681 *
4682 *==========================================================================*/
4683FrameNumberRegistry::~FrameNumberRegistry()
4684{
4685}
4686
4687/*===========================================================================
4688 * FUNCTION : PurgeOldEntriesLocked
4689 *
4690 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4691 *
4692 * PARAMETERS :
4693 *
4694 * RETURN : NONE
4695 *
4696 *==========================================================================*/
4697void FrameNumberRegistry::purgeOldEntriesLocked()
4698{
4699 while (_register.begin() != _register.end()) {
4700 auto itr = _register.begin();
4701 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4702 _register.erase(itr);
4703 } else {
4704 return;
4705 }
4706 }
4707}
4708
4709/*===========================================================================
4710 * FUNCTION : allocStoreInternalFrameNumber
4711 *
4712 * DESCRIPTION: Method to note down a framework request and associate a new
4713 * internal request number against it
4714 *
4715 * PARAMETERS :
4716 * @fFrameNumber: Identifier given by framework
4717 * @internalFN : Output parameter which will have the newly generated internal
4718 * entry
4719 *
4720 * RETURN : Error code
4721 *
4722 *==========================================================================*/
4723int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4724 uint32_t &internalFrameNumber)
4725{
4726 Mutex::Autolock lock(mRegistryLock);
4727 internalFrameNumber = _nextFreeInternalNumber++;
4728 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4729 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4730 purgeOldEntriesLocked();
4731 return NO_ERROR;
4732}
4733
4734/*===========================================================================
4735 * FUNCTION : generateStoreInternalFrameNumber
4736 *
4737 * DESCRIPTION: Method to associate a new internal request number independent
4738 * of any associate with framework requests
4739 *
4740 * PARAMETERS :
4741 * @internalFrame#: Output parameter which will have the newly generated internal
4742 *
4743 *
4744 * RETURN : Error code
4745 *
4746 *==========================================================================*/
4747int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4748{
4749 Mutex::Autolock lock(mRegistryLock);
4750 internalFrameNumber = _nextFreeInternalNumber++;
4751 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4752 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4753 purgeOldEntriesLocked();
4754 return NO_ERROR;
4755}
4756
4757/*===========================================================================
4758 * FUNCTION : getFrameworkFrameNumber
4759 *
4760 * DESCRIPTION: Method to query the framework framenumber given an internal #
4761 *
4762 * PARAMETERS :
4763 * @internalFrame#: Internal reference
4764 * @frameworkframenumber: Output parameter holding framework frame entry
4765 *
4766 * RETURN : Error code
4767 *
4768 *==========================================================================*/
4769int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4770 uint32_t &frameworkFrameNumber)
4771{
4772 Mutex::Autolock lock(mRegistryLock);
4773 auto itr = _register.find(internalFrameNumber);
4774 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004775 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004776 return -ENOENT;
4777 }
4778
4779 frameworkFrameNumber = itr->second;
4780 purgeOldEntriesLocked();
4781 return NO_ERROR;
4782}
Thierry Strudel3d639192016-09-09 11:52:26 -07004783
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004784status_t QCamera3HardwareInterface::fillPbStreamConfig(
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004785 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
4786 uint32_t streamIndex) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004787 if (config == nullptr) {
4788 LOGE("%s: config is null", __FUNCTION__);
4789 return BAD_VALUE;
4790 }
4791
4792 if (channel == nullptr) {
4793 LOGE("%s: channel is null", __FUNCTION__);
4794 return BAD_VALUE;
4795 }
4796
4797 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4798 if (stream == nullptr) {
4799 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4800 return NAME_NOT_FOUND;
4801 }
4802
4803 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4804 if (streamInfo == nullptr) {
4805 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4806 return NAME_NOT_FOUND;
4807 }
4808
4809 config->id = pbStreamId;
4810 config->image.width = streamInfo->dim.width;
4811 config->image.height = streamInfo->dim.height;
4812 config->image.padding = 0;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004813
4814 int bytesPerPixel = 0;
4815
4816 switch (streamInfo->fmt) {
4817 case CAM_FORMAT_YUV_420_NV21:
4818 config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
4819 bytesPerPixel = 1;
4820 break;
4821 case CAM_FORMAT_YUV_420_NV12:
4822 case CAM_FORMAT_YUV_420_NV12_VENUS:
4823 config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
4824 bytesPerPixel = 1;
4825 break;
4826 default:
4827 ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
4828 return BAD_VALUE;
4829 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004830
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004831 uint32_t totalPlaneSize = 0;
4832
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004833 // Fill plane information.
4834 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4835 pbcamera::PlaneConfiguration plane;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004836 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004837 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4838 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004839
4840 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004841 }
4842
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004843 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004844 return OK;
4845}
4846
Thierry Strudel3d639192016-09-09 11:52:26 -07004847/*===========================================================================
4848 * FUNCTION : processCaptureRequest
4849 *
4850 * DESCRIPTION: process a capture request from camera service
4851 *
4852 * PARAMETERS :
4853 * @request : request from framework to process
4854 *
4855 * RETURN :
4856 *
4857 *==========================================================================*/
4858int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004859 camera3_capture_request_t *request,
4860 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004861{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004862 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004863 int rc = NO_ERROR;
4864 int32_t request_id;
4865 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004866 bool isVidBufRequested = false;
4867 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004868 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004869
4870 pthread_mutex_lock(&mMutex);
4871
4872 // Validate current state
4873 switch (mState) {
4874 case CONFIGURED:
4875 case STARTED:
4876 /* valid state */
4877 break;
4878
4879 case ERROR:
4880 pthread_mutex_unlock(&mMutex);
4881 handleCameraDeviceError();
4882 return -ENODEV;
4883
4884 default:
4885 LOGE("Invalid state %d", mState);
4886 pthread_mutex_unlock(&mMutex);
4887 return -ENODEV;
4888 }
4889
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004890 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004891 if (rc != NO_ERROR) {
4892 LOGE("incoming request is not valid");
4893 pthread_mutex_unlock(&mMutex);
4894 return rc;
4895 }
4896
4897 meta = request->settings;
4898
4899 // For first capture request, send capture intent, and
4900 // stream on all streams
4901 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004902 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004903 // send an unconfigure to the backend so that the isp
4904 // resources are deallocated
4905 if (!mFirstConfiguration) {
4906 cam_stream_size_info_t stream_config_info;
4907 int32_t hal_version = CAM_HAL_V3;
4908 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4909 stream_config_info.buffer_info.min_buffers =
4910 MIN_INFLIGHT_REQUESTS;
4911 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004912 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004913 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004914 clear_metadata_buffer(mParameters);
4915 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4916 CAM_INTF_PARM_HAL_VERSION, hal_version);
4917 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4918 CAM_INTF_META_STREAM_INFO, stream_config_info);
4919 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4920 mParameters);
4921 if (rc < 0) {
4922 LOGE("set_parms for unconfigure failed");
4923 pthread_mutex_unlock(&mMutex);
4924 return rc;
4925 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004926
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004928 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004929 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004930 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004931 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004932 property_get("persist.camera.is_type", is_type_value, "4");
4933 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4934 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4935 property_get("persist.camera.is_type_preview", is_type_value, "4");
4936 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4937 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004938
4939 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4940 int32_t hal_version = CAM_HAL_V3;
4941 uint8_t captureIntent =
4942 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4943 mCaptureIntent = captureIntent;
4944 clear_metadata_buffer(mParameters);
4945 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4946 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4947 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004948 if (mFirstConfiguration) {
4949 // configure instant AEC
4950 // Instant AEC is a session based parameter and it is needed only
4951 // once per complete session after open camera.
4952 // i.e. This is set only once for the first capture request, after open camera.
4953 setInstantAEC(meta);
4954 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004955 uint8_t fwkVideoStabMode=0;
4956 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4957 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4958 }
4959
Xue Tuecac74e2017-04-17 13:58:15 -07004960 // If EIS setprop is enabled then only turn it on for video/preview
4961 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004962 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004963 int32_t vsMode;
4964 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4965 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4966 rc = BAD_VALUE;
4967 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004968 LOGD("setEis %d", setEis);
4969 bool eis3Supported = false;
4970 size_t count = IS_TYPE_MAX;
4971 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4972 for (size_t i = 0; i < count; i++) {
4973 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4974 eis3Supported = true;
4975 break;
4976 }
4977 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004978
4979 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004980 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004981 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4982 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004983 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4984 is_type = isTypePreview;
4985 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4986 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4987 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004988 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004989 } else {
4990 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004991 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004992 } else {
4993 is_type = IS_TYPE_NONE;
4994 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004995 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004996 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004997 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4998 }
4999 }
5000
5001 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5002 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
5003
Thierry Strudel54dc9782017-02-15 12:12:10 -08005004 //Disable tintless only if the property is set to 0
5005 memset(prop, 0, sizeof(prop));
5006 property_get("persist.camera.tintless.enable", prop, "1");
5007 int32_t tintless_value = atoi(prop);
5008
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5010 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08005011
Thierry Strudel3d639192016-09-09 11:52:26 -07005012 //Disable CDS for HFR mode or if DIS/EIS is on.
5013 //CDS is a session parameter in the backend/ISP, so need to be set/reset
5014 //after every configure_stream
5015 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
5016 (m_bIsVideo)) {
5017 int32_t cds = CAM_CDS_MODE_OFF;
5018 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5019 CAM_INTF_PARM_CDS_MODE, cds))
5020 LOGE("Failed to disable CDS for HFR mode");
5021
5022 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005023
5024 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
5025 uint8_t* use_av_timer = NULL;
5026
5027 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005028 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005029 use_av_timer = &m_debug_avtimer;
Binhao Lin09245482017-08-31 18:25:29 -07005030 m_bAVTimerEnabled = true;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005031 }
5032 else{
5033 use_av_timer =
5034 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005035 if (use_av_timer) {
Binhao Lin09245482017-08-31 18:25:29 -07005036 m_bAVTimerEnabled = true;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005037 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
5038 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005039 }
5040
5041 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
5042 rc = BAD_VALUE;
5043 }
5044 }
5045
Thierry Strudel3d639192016-09-09 11:52:26 -07005046 setMobicat();
5047
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005048 uint8_t nrMode = 0;
5049 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5050 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5051 }
5052
Thierry Strudel3d639192016-09-09 11:52:26 -07005053 /* Set fps and hfr mode while sending meta stream info so that sensor
5054 * can configure appropriate streaming mode */
5055 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005056 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5057 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005058 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5059 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005060 if (rc == NO_ERROR) {
5061 int32_t max_fps =
5062 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005063 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005064 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5065 }
5066 /* For HFR, more buffers are dequeued upfront to improve the performance */
5067 if (mBatchSize) {
5068 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5069 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5070 }
5071 }
5072 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005073 LOGE("setHalFpsRange failed");
5074 }
5075 }
5076 if (meta.exists(ANDROID_CONTROL_MODE)) {
5077 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5078 rc = extractSceneMode(meta, metaMode, mParameters);
5079 if (rc != NO_ERROR) {
5080 LOGE("extractSceneMode failed");
5081 }
5082 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005083 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005084
Thierry Strudel04e026f2016-10-10 11:27:36 -07005085 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5086 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5087 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5088 rc = setVideoHdrMode(mParameters, vhdr);
5089 if (rc != NO_ERROR) {
5090 LOGE("setVideoHDR is failed");
5091 }
5092 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005093
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005094 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005095 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005096 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005097 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5099 sensorModeFullFov)) {
5100 rc = BAD_VALUE;
5101 }
5102 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005103 //TODO: validate the arguments, HSV scenemode should have only the
5104 //advertised fps ranges
5105
5106 /*set the capture intent, hal version, tintless, stream info,
5107 *and disenable parameters to the backend*/
5108 LOGD("set_parms META_STREAM_INFO " );
5109 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005110 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5111 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005112 mStreamConfigInfo.type[i],
5113 mStreamConfigInfo.stream_sizes[i].width,
5114 mStreamConfigInfo.stream_sizes[i].height,
5115 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005116 mStreamConfigInfo.format[i],
5117 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005118 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005119
Thierry Strudel3d639192016-09-09 11:52:26 -07005120 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5121 mParameters);
5122 if (rc < 0) {
5123 LOGE("set_parms failed for hal version, stream info");
5124 }
5125
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005126 cam_sensor_mode_info_t sensorModeInfo = {};
5127 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005128 if (rc != NO_ERROR) {
5129 LOGE("Failed to get sensor output size");
5130 pthread_mutex_unlock(&mMutex);
5131 goto error_exit;
5132 }
5133
5134 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5135 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005136 sensorModeInfo.active_array_size.width,
5137 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005138
5139 /* Set batchmode before initializing channel. Since registerBuffer
5140 * internally initializes some of the channels, better set batchmode
5141 * even before first register buffer */
5142 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5143 it != mStreamInfo.end(); it++) {
5144 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5145 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5146 && mBatchSize) {
5147 rc = channel->setBatchSize(mBatchSize);
5148 //Disable per frame map unmap for HFR/batchmode case
5149 rc |= channel->setPerFrameMapUnmap(false);
5150 if (NO_ERROR != rc) {
5151 LOGE("Channel init failed %d", rc);
5152 pthread_mutex_unlock(&mMutex);
5153 goto error_exit;
5154 }
5155 }
5156 }
5157
5158 //First initialize all streams
5159 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5160 it != mStreamInfo.end(); it++) {
5161 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005162
5163 /* Initial value of NR mode is needed before stream on */
5164 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005165 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5166 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005167 setEis) {
5168 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5169 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5170 is_type = mStreamConfigInfo.is_type[i];
5171 break;
5172 }
5173 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005174 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005175 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005176 rc = channel->initialize(IS_TYPE_NONE);
5177 }
5178 if (NO_ERROR != rc) {
5179 LOGE("Channel initialization failed %d", rc);
5180 pthread_mutex_unlock(&mMutex);
5181 goto error_exit;
5182 }
5183 }
5184
5185 if (mRawDumpChannel) {
5186 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5187 if (rc != NO_ERROR) {
5188 LOGE("Error: Raw Dump Channel init failed");
5189 pthread_mutex_unlock(&mMutex);
5190 goto error_exit;
5191 }
5192 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005193 if (mHdrPlusRawSrcChannel) {
5194 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5195 if (rc != NO_ERROR) {
5196 LOGE("Error: HDR+ RAW Source Channel init failed");
5197 pthread_mutex_unlock(&mMutex);
5198 goto error_exit;
5199 }
5200 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005201 if (mSupportChannel) {
5202 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5203 if (rc < 0) {
5204 LOGE("Support channel initialization failed");
5205 pthread_mutex_unlock(&mMutex);
5206 goto error_exit;
5207 }
5208 }
5209 if (mAnalysisChannel) {
5210 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5211 if (rc < 0) {
5212 LOGE("Analysis channel initialization failed");
5213 pthread_mutex_unlock(&mMutex);
5214 goto error_exit;
5215 }
5216 }
5217 if (mDummyBatchChannel) {
5218 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5219 if (rc < 0) {
5220 LOGE("mDummyBatchChannel setBatchSize failed");
5221 pthread_mutex_unlock(&mMutex);
5222 goto error_exit;
5223 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005224 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005225 if (rc < 0) {
5226 LOGE("mDummyBatchChannel initialization failed");
5227 pthread_mutex_unlock(&mMutex);
5228 goto error_exit;
5229 }
5230 }
5231
5232 // Set bundle info
5233 rc = setBundleInfo();
5234 if (rc < 0) {
5235 LOGE("setBundleInfo failed %d", rc);
5236 pthread_mutex_unlock(&mMutex);
5237 goto error_exit;
5238 }
5239
5240 //update settings from app here
5241 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5242 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5243 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5244 }
5245 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5246 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5247 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5248 }
5249 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5250 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5251 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5252
5253 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5254 (mLinkedCameraId != mCameraId) ) {
5255 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5256 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005257 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005258 goto error_exit;
5259 }
5260 }
5261
5262 // add bundle related cameras
5263 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5264 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005265 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5266 &m_pDualCamCmdPtr->bundle_info;
5267 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005268 if (mIsDeviceLinked)
5269 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5270 else
5271 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5272
5273 pthread_mutex_lock(&gCamLock);
5274
5275 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5276 LOGE("Dualcam: Invalid Session Id ");
5277 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005278 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005279 goto error_exit;
5280 }
5281
5282 if (mIsMainCamera == 1) {
5283 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5284 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005285 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005286 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005287 // related session id should be session id of linked session
5288 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5289 } else {
5290 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5291 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005292 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005293 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005294 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5295 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005296 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005297 pthread_mutex_unlock(&gCamLock);
5298
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005299 rc = mCameraHandle->ops->set_dual_cam_cmd(
5300 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005301 if (rc < 0) {
5302 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005303 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005304 goto error_exit;
5305 }
5306 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005307 goto no_error;
5308error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005309 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005310 return rc;
5311no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005312 mWokenUpByDaemon = false;
5313 mPendingLiveRequest = 0;
5314 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005315 }
5316
5317 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005318 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005319
5320 if (mFlushPerf) {
5321 //we cannot accept any requests during flush
5322 LOGE("process_capture_request cannot proceed during flush");
5323 pthread_mutex_unlock(&mMutex);
5324 return NO_ERROR; //should return an error
5325 }
5326
5327 if (meta.exists(ANDROID_REQUEST_ID)) {
5328 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5329 mCurrentRequestId = request_id;
5330 LOGD("Received request with id: %d", request_id);
5331 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5332 LOGE("Unable to find request id field, \
5333 & no previous id available");
5334 pthread_mutex_unlock(&mMutex);
5335 return NAME_NOT_FOUND;
5336 } else {
5337 LOGD("Re-using old request id");
5338 request_id = mCurrentRequestId;
5339 }
5340
5341 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5342 request->num_output_buffers,
5343 request->input_buffer,
5344 frameNumber);
5345 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005346 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005347 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005348 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005349 uint32_t snapshotStreamId = 0;
5350 for (size_t i = 0; i < request->num_output_buffers; i++) {
5351 const camera3_stream_buffer_t& output = request->output_buffers[i];
5352 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5353
Emilian Peev7650c122017-01-19 08:24:33 -08005354 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5355 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005356 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005357 blob_request = 1;
5358 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5359 }
5360
5361 if (output.acquire_fence != -1) {
5362 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5363 close(output.acquire_fence);
5364 if (rc != OK) {
5365 LOGE("sync wait failed %d", rc);
5366 pthread_mutex_unlock(&mMutex);
5367 return rc;
5368 }
5369 }
5370
Emilian Peev0f3c3162017-03-15 12:57:46 +00005371 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5372 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005373 depthRequestPresent = true;
5374 continue;
5375 }
5376
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005377 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005378 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005379
5380 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5381 isVidBufRequested = true;
5382 }
5383 }
5384
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005385 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5386 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5387 itr++) {
5388 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5389 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5390 channel->getStreamID(channel->getStreamTypeMask());
5391
5392 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5393 isVidBufRequested = true;
5394 }
5395 }
5396
Thierry Strudel3d639192016-09-09 11:52:26 -07005397 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005398 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005399 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005400 }
5401 if (blob_request && mRawDumpChannel) {
5402 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005403 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005404 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005405 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005406 }
5407
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005408 {
5409 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5410 // Request a RAW buffer if
5411 // 1. mHdrPlusRawSrcChannel is valid.
5412 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5413 // 3. There is no pending HDR+ request.
5414 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5415 mHdrPlusPendingRequests.size() == 0) {
5416 streamsArray.stream_request[streamsArray.num_streams].streamID =
5417 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5418 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5419 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005420 }
5421
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005422 //extract capture intent
5423 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5424 mCaptureIntent =
5425 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5426 }
5427
5428 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5429 mCacMode =
5430 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5431 }
5432
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005433 uint8_t requestedLensShadingMapMode;
5434 // Get the shading map mode.
5435 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5436 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5437 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5438 } else {
5439 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5440 }
5441
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005442 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005443 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005444
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005445 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005446 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005447 // If this request has a still capture intent, try to submit an HDR+ request.
5448 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5449 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5450 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5451 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005452 }
5453
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005454 if (hdrPlusRequest) {
5455 // For a HDR+ request, just set the frame parameters.
5456 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5457 if (rc < 0) {
5458 LOGE("fail to set frame parameters");
5459 pthread_mutex_unlock(&mMutex);
5460 return rc;
5461 }
5462 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005463 /* Parse the settings:
5464 * - For every request in NORMAL MODE
5465 * - For every request in HFR mode during preview only case
5466 * - For first request of every batch in HFR mode during video
5467 * recording. In batchmode the same settings except frame number is
5468 * repeated in each request of the batch.
5469 */
5470 if (!mBatchSize ||
5471 (mBatchSize && !isVidBufRequested) ||
5472 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005473 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005474 if (rc < 0) {
5475 LOGE("fail to set frame parameters");
5476 pthread_mutex_unlock(&mMutex);
5477 return rc;
5478 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005479
5480 {
5481 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5482 // will be reported in result metadata.
5483 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5484 if (mHdrPlusModeEnabled) {
5485 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5486 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5487 }
5488 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005489 }
5490 /* For batchMode HFR, setFrameParameters is not called for every
5491 * request. But only frame number of the latest request is parsed.
5492 * Keep track of first and last frame numbers in a batch so that
5493 * metadata for the frame numbers of batch can be duplicated in
5494 * handleBatchMetadta */
5495 if (mBatchSize) {
5496 if (!mToBeQueuedVidBufs) {
5497 //start of the batch
5498 mFirstFrameNumberInBatch = request->frame_number;
5499 }
5500 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5501 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5502 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005503 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005504 return BAD_VALUE;
5505 }
5506 }
5507 if (mNeedSensorRestart) {
5508 /* Unlock the mutex as restartSensor waits on the channels to be
5509 * stopped, which in turn calls stream callback functions -
5510 * handleBufferWithLock and handleMetadataWithLock */
5511 pthread_mutex_unlock(&mMutex);
5512 rc = dynamicUpdateMetaStreamInfo();
5513 if (rc != NO_ERROR) {
5514 LOGE("Restarting the sensor failed");
5515 return BAD_VALUE;
5516 }
5517 mNeedSensorRestart = false;
5518 pthread_mutex_lock(&mMutex);
5519 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005520 if(mResetInstantAEC) {
5521 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5522 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5523 mResetInstantAEC = false;
5524 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005525 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005526 if (request->input_buffer->acquire_fence != -1) {
5527 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5528 close(request->input_buffer->acquire_fence);
5529 if (rc != OK) {
5530 LOGE("input buffer sync wait failed %d", rc);
5531 pthread_mutex_unlock(&mMutex);
5532 return rc;
5533 }
5534 }
5535 }
5536
5537 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5538 mLastCustIntentFrmNum = frameNumber;
5539 }
5540 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005541 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005542 pendingRequestIterator latestRequest;
5543 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005544 pendingRequest.num_buffers = depthRequestPresent ?
5545 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005546 pendingRequest.request_id = request_id;
5547 pendingRequest.blob_request = blob_request;
5548 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005549 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005550 if (request->input_buffer) {
5551 pendingRequest.input_buffer =
5552 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5553 *(pendingRequest.input_buffer) = *(request->input_buffer);
5554 pInputBuffer = pendingRequest.input_buffer;
5555 } else {
5556 pendingRequest.input_buffer = NULL;
5557 pInputBuffer = NULL;
5558 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005559 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005560
5561 pendingRequest.pipeline_depth = 0;
5562 pendingRequest.partial_result_cnt = 0;
5563 extractJpegMetadata(mCurJpegMeta, request);
5564 pendingRequest.jpegMetadata = mCurJpegMeta;
5565 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005566 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005567 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
Shuzhen Wang77b049a2017-08-30 12:24:36 -07005568 pendingRequest.hybrid_ae_enable =
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005569 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5570 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005571
Samuel Ha68ba5172016-12-15 18:41:12 -08005572 /* DevCamDebug metadata processCaptureRequest */
5573 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5574 mDevCamDebugMetaEnable =
5575 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5576 }
5577 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5578 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005579
5580 //extract CAC info
5581 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5582 mCacMode =
5583 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5584 }
5585 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005586 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005587 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5588 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005589
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005590 // extract enableZsl info
5591 if (gExposeEnableZslKey) {
5592 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5593 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5594 mZslEnabled = pendingRequest.enableZsl;
5595 } else {
5596 pendingRequest.enableZsl = mZslEnabled;
5597 }
5598 }
5599
Thierry Strudel3d639192016-09-09 11:52:26 -07005600 PendingBuffersInRequest bufsForCurRequest;
5601 bufsForCurRequest.frame_number = frameNumber;
5602 // Mark current timestamp for the new request
5603 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Binhao Lin09245482017-08-31 18:25:29 -07005604 bufsForCurRequest.av_timestamp = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005605 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005606
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005607 if (hdrPlusRequest) {
5608 // Save settings for this request.
5609 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5610 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5611
5612 // Add to pending HDR+ request queue.
5613 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5614 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5615
5616 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5617 }
5618
Thierry Strudel3d639192016-09-09 11:52:26 -07005619 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005620 if ((request->output_buffers[i].stream->data_space ==
5621 HAL_DATASPACE_DEPTH) &&
5622 (HAL_PIXEL_FORMAT_BLOB ==
5623 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005624 continue;
5625 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005626 RequestedBufferInfo requestedBuf;
5627 memset(&requestedBuf, 0, sizeof(requestedBuf));
5628 requestedBuf.stream = request->output_buffers[i].stream;
5629 requestedBuf.buffer = NULL;
5630 pendingRequest.buffers.push_back(requestedBuf);
5631
5632 // Add to buffer handle the pending buffers list
5633 PendingBufferInfo bufferInfo;
5634 bufferInfo.buffer = request->output_buffers[i].buffer;
5635 bufferInfo.stream = request->output_buffers[i].stream;
5636 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5637 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5638 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5639 frameNumber, bufferInfo.buffer,
5640 channel->getStreamTypeMask(), bufferInfo.stream->format);
5641 }
5642 // Add this request packet into mPendingBuffersMap
5643 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5644 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5645 mPendingBuffersMap.get_num_overall_buffers());
5646
5647 latestRequest = mPendingRequestsList.insert(
5648 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005649
5650 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5651 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005652 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005653 for (size_t i = 0; i < request->num_output_buffers; i++) {
5654 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5655 }
5656
Thierry Strudel3d639192016-09-09 11:52:26 -07005657 if(mFlush) {
5658 LOGI("mFlush is true");
5659 pthread_mutex_unlock(&mMutex);
5660 return NO_ERROR;
5661 }
5662
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005663 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5664 // channel.
5665 if (!hdrPlusRequest) {
5666 int indexUsed;
5667 // Notify metadata channel we receive a request
5668 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005669
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005670 if(request->input_buffer != NULL){
5671 LOGD("Input request, frame_number %d", frameNumber);
5672 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5673 if (NO_ERROR != rc) {
5674 LOGE("fail to set reproc parameters");
5675 pthread_mutex_unlock(&mMutex);
5676 return rc;
5677 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005678 }
5679
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005680 // Call request on other streams
5681 uint32_t streams_need_metadata = 0;
5682 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5683 for (size_t i = 0; i < request->num_output_buffers; i++) {
5684 const camera3_stream_buffer_t& output = request->output_buffers[i];
5685 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5686
5687 if (channel == NULL) {
5688 LOGW("invalid channel pointer for stream");
5689 continue;
5690 }
5691
5692 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5693 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5694 output.buffer, request->input_buffer, frameNumber);
5695 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005696 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005697 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5698 if (rc < 0) {
5699 LOGE("Fail to request on picture channel");
5700 pthread_mutex_unlock(&mMutex);
5701 return rc;
5702 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005703 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005704 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5705 assert(NULL != mDepthChannel);
5706 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005707
Emilian Peev7650c122017-01-19 08:24:33 -08005708 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5709 if (rc < 0) {
5710 LOGE("Fail to map on depth buffer");
5711 pthread_mutex_unlock(&mMutex);
5712 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005713 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005714 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005715 } else {
5716 LOGD("snapshot request with buffer %p, frame_number %d",
5717 output.buffer, frameNumber);
5718 if (!request->settings) {
5719 rc = channel->request(output.buffer, frameNumber,
5720 NULL, mPrevParameters, indexUsed);
5721 } else {
5722 rc = channel->request(output.buffer, frameNumber,
5723 NULL, mParameters, indexUsed);
5724 }
5725 if (rc < 0) {
5726 LOGE("Fail to request on picture channel");
5727 pthread_mutex_unlock(&mMutex);
5728 return rc;
5729 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005730
Emilian Peev7650c122017-01-19 08:24:33 -08005731 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5732 uint32_t j = 0;
5733 for (j = 0; j < streamsArray.num_streams; j++) {
5734 if (streamsArray.stream_request[j].streamID == streamId) {
5735 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5736 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5737 else
5738 streamsArray.stream_request[j].buf_index = indexUsed;
5739 break;
5740 }
5741 }
5742 if (j == streamsArray.num_streams) {
5743 LOGE("Did not find matching stream to update index");
5744 assert(0);
5745 }
5746
5747 pendingBufferIter->need_metadata = true;
5748 streams_need_metadata++;
5749 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005750 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005751 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5752 bool needMetadata = false;
5753 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5754 rc = yuvChannel->request(output.buffer, frameNumber,
5755 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5756 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005757 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005758 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005759 pthread_mutex_unlock(&mMutex);
5760 return rc;
5761 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005762
5763 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5764 uint32_t j = 0;
5765 for (j = 0; j < streamsArray.num_streams; j++) {
5766 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005767 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5768 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5769 else
5770 streamsArray.stream_request[j].buf_index = indexUsed;
5771 break;
5772 }
5773 }
5774 if (j == streamsArray.num_streams) {
5775 LOGE("Did not find matching stream to update index");
5776 assert(0);
5777 }
5778
5779 pendingBufferIter->need_metadata = needMetadata;
5780 if (needMetadata)
5781 streams_need_metadata += 1;
5782 LOGD("calling YUV channel request, need_metadata is %d",
5783 needMetadata);
5784 } else {
5785 LOGD("request with buffer %p, frame_number %d",
5786 output.buffer, frameNumber);
5787
5788 rc = channel->request(output.buffer, frameNumber, indexUsed);
5789
5790 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5791 uint32_t j = 0;
5792 for (j = 0; j < streamsArray.num_streams; j++) {
5793 if (streamsArray.stream_request[j].streamID == streamId) {
5794 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5795 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5796 else
5797 streamsArray.stream_request[j].buf_index = indexUsed;
5798 break;
5799 }
5800 }
5801 if (j == streamsArray.num_streams) {
5802 LOGE("Did not find matching stream to update index");
5803 assert(0);
5804 }
5805
5806 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5807 && mBatchSize) {
5808 mToBeQueuedVidBufs++;
5809 if (mToBeQueuedVidBufs == mBatchSize) {
5810 channel->queueBatchBuf();
5811 }
5812 }
5813 if (rc < 0) {
5814 LOGE("request failed");
5815 pthread_mutex_unlock(&mMutex);
5816 return rc;
5817 }
5818 }
5819 pendingBufferIter++;
5820 }
5821
5822 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5823 itr++) {
5824 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5825
5826 if (channel == NULL) {
5827 LOGE("invalid channel pointer for stream");
5828 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005829 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005830 return BAD_VALUE;
5831 }
5832
5833 InternalRequest requestedStream;
5834 requestedStream = (*itr);
5835
5836
5837 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5838 LOGD("snapshot request internally input buffer %p, frame_number %d",
5839 request->input_buffer, frameNumber);
5840 if(request->input_buffer != NULL){
5841 rc = channel->request(NULL, frameNumber,
5842 pInputBuffer, &mReprocMeta, indexUsed, true,
5843 requestedStream.meteringOnly);
5844 if (rc < 0) {
5845 LOGE("Fail to request on picture channel");
5846 pthread_mutex_unlock(&mMutex);
5847 return rc;
5848 }
5849 } else {
5850 LOGD("snapshot request with frame_number %d", frameNumber);
5851 if (!request->settings) {
5852 rc = channel->request(NULL, frameNumber,
5853 NULL, mPrevParameters, indexUsed, true,
5854 requestedStream.meteringOnly);
5855 } else {
5856 rc = channel->request(NULL, frameNumber,
5857 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5858 }
5859 if (rc < 0) {
5860 LOGE("Fail to request on picture channel");
5861 pthread_mutex_unlock(&mMutex);
5862 return rc;
5863 }
5864
5865 if ((*itr).meteringOnly != 1) {
5866 requestedStream.need_metadata = 1;
5867 streams_need_metadata++;
5868 }
5869 }
5870
5871 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5872 uint32_t j = 0;
5873 for (j = 0; j < streamsArray.num_streams; j++) {
5874 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005875 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5876 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5877 else
5878 streamsArray.stream_request[j].buf_index = indexUsed;
5879 break;
5880 }
5881 }
5882 if (j == streamsArray.num_streams) {
5883 LOGE("Did not find matching stream to update index");
5884 assert(0);
5885 }
5886
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005887 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005888 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005889 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005890 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005891 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005892 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005893 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005894 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005895
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005896 //If 2 streams have need_metadata set to true, fail the request, unless
5897 //we copy/reference count the metadata buffer
5898 if (streams_need_metadata > 1) {
5899 LOGE("not supporting request in which two streams requires"
5900 " 2 HAL metadata for reprocessing");
5901 pthread_mutex_unlock(&mMutex);
5902 return -EINVAL;
5903 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005904
Emilian Peev656e4fa2017-06-02 16:47:04 +01005905 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5906 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5907 if (depthRequestPresent && mDepthChannel) {
5908 if (request->settings) {
5909 camera_metadata_ro_entry entry;
5910 if (find_camera_metadata_ro_entry(request->settings,
5911 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5912 if (entry.data.u8[0]) {
5913 pdafEnable = CAM_PD_DATA_ENABLED;
5914 } else {
5915 pdafEnable = CAM_PD_DATA_SKIP;
5916 }
5917 mDepthCloudMode = pdafEnable;
5918 } else {
5919 pdafEnable = mDepthCloudMode;
5920 }
5921 } else {
5922 pdafEnable = mDepthCloudMode;
5923 }
5924 }
5925
Emilian Peev7650c122017-01-19 08:24:33 -08005926 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5927 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5928 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5929 pthread_mutex_unlock(&mMutex);
5930 return BAD_VALUE;
5931 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005932
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005933 if (request->input_buffer == NULL) {
5934 /* Set the parameters to backend:
5935 * - For every request in NORMAL MODE
5936 * - For every request in HFR mode during preview only case
5937 * - Once every batch in HFR mode during video recording
5938 */
5939 if (!mBatchSize ||
5940 (mBatchSize && !isVidBufRequested) ||
5941 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5942 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5943 mBatchSize, isVidBufRequested,
5944 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005945
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005946 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5947 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5948 uint32_t m = 0;
5949 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5950 if (streamsArray.stream_request[k].streamID ==
5951 mBatchedStreamsArray.stream_request[m].streamID)
5952 break;
5953 }
5954 if (m == mBatchedStreamsArray.num_streams) {
5955 mBatchedStreamsArray.stream_request\
5956 [mBatchedStreamsArray.num_streams].streamID =
5957 streamsArray.stream_request[k].streamID;
5958 mBatchedStreamsArray.stream_request\
5959 [mBatchedStreamsArray.num_streams].buf_index =
5960 streamsArray.stream_request[k].buf_index;
5961 mBatchedStreamsArray.num_streams =
5962 mBatchedStreamsArray.num_streams + 1;
5963 }
5964 }
5965 streamsArray = mBatchedStreamsArray;
5966 }
5967 /* Update stream id of all the requested buffers */
5968 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5969 streamsArray)) {
5970 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005971 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005972 return BAD_VALUE;
5973 }
5974
5975 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5976 mParameters);
5977 if (rc < 0) {
5978 LOGE("set_parms failed");
5979 }
5980 /* reset to zero coz, the batch is queued */
5981 mToBeQueuedVidBufs = 0;
5982 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5983 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5984 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005985 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5986 uint32_t m = 0;
5987 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5988 if (streamsArray.stream_request[k].streamID ==
5989 mBatchedStreamsArray.stream_request[m].streamID)
5990 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005991 }
5992 if (m == mBatchedStreamsArray.num_streams) {
5993 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5994 streamID = streamsArray.stream_request[k].streamID;
5995 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5996 buf_index = streamsArray.stream_request[k].buf_index;
5997 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5998 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005999 }
6000 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006001 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006002
6003 // Start all streams after the first setting is sent, so that the
6004 // setting can be applied sooner: (0 + apply_delay)th frame.
6005 if (mState == CONFIGURED && mChannelHandle) {
6006 //Then start them.
6007 LOGH("Start META Channel");
6008 rc = mMetadataChannel->start();
6009 if (rc < 0) {
6010 LOGE("META channel start failed");
6011 pthread_mutex_unlock(&mMutex);
6012 return rc;
6013 }
6014
6015 if (mAnalysisChannel) {
6016 rc = mAnalysisChannel->start();
6017 if (rc < 0) {
6018 LOGE("Analysis channel start failed");
6019 mMetadataChannel->stop();
6020 pthread_mutex_unlock(&mMutex);
6021 return rc;
6022 }
6023 }
6024
6025 if (mSupportChannel) {
6026 rc = mSupportChannel->start();
6027 if (rc < 0) {
6028 LOGE("Support channel start failed");
6029 mMetadataChannel->stop();
6030 /* Although support and analysis are mutually exclusive today
6031 adding it in anycase for future proofing */
6032 if (mAnalysisChannel) {
6033 mAnalysisChannel->stop();
6034 }
6035 pthread_mutex_unlock(&mMutex);
6036 return rc;
6037 }
6038 }
6039 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6040 it != mStreamInfo.end(); it++) {
6041 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
6042 LOGH("Start Processing Channel mask=%d",
6043 channel->getStreamTypeMask());
6044 rc = channel->start();
6045 if (rc < 0) {
6046 LOGE("channel start failed");
6047 pthread_mutex_unlock(&mMutex);
6048 return rc;
6049 }
6050 }
6051
6052 if (mRawDumpChannel) {
6053 LOGD("Starting raw dump stream");
6054 rc = mRawDumpChannel->start();
6055 if (rc != NO_ERROR) {
6056 LOGE("Error Starting Raw Dump Channel");
6057 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6058 it != mStreamInfo.end(); it++) {
6059 QCamera3Channel *channel =
6060 (QCamera3Channel *)(*it)->stream->priv;
6061 LOGH("Stopping Processing Channel mask=%d",
6062 channel->getStreamTypeMask());
6063 channel->stop();
6064 }
6065 if (mSupportChannel)
6066 mSupportChannel->stop();
6067 if (mAnalysisChannel) {
6068 mAnalysisChannel->stop();
6069 }
6070 mMetadataChannel->stop();
6071 pthread_mutex_unlock(&mMutex);
6072 return rc;
6073 }
6074 }
6075
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006076 // Configure modules for stream on.
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006077 rc = startChannelLocked();
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006078 if (rc != NO_ERROR) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006079 LOGE("startChannelLocked failed %d", rc);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006080 pthread_mutex_unlock(&mMutex);
6081 return rc;
6082 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006083 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006084 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006085 }
6086
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -07006087 // Enable HDR+ mode for the first PREVIEW_INTENT request that doesn't disable HDR+.
Chenjie Luo4a761802017-06-13 17:35:54 +00006088 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006089 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006090 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006091 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6092 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6093 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -07006094 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
6095 meta.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
6096 meta.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 0) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006097
6098 if (isSessionHdrPlusModeCompatible()) {
6099 rc = enableHdrPlusModeLocked();
6100 if (rc != OK) {
6101 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6102 pthread_mutex_unlock(&mMutex);
6103 return rc;
6104 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006105 }
6106
6107 mFirstPreviewIntentSeen = true;
6108 }
6109 }
6110
Thierry Strudel3d639192016-09-09 11:52:26 -07006111 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6112
6113 mState = STARTED;
6114 // Added a timed condition wait
6115 struct timespec ts;
6116 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006117 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006118 if (rc < 0) {
6119 isValidTimeout = 0;
6120 LOGE("Error reading the real time clock!!");
6121 }
6122 else {
6123 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006124 int64_t timeout = 5;
6125 {
6126 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6127 // If there is a pending HDR+ request, the following requests may be blocked until the
6128 // HDR+ request is done. So allow a longer timeout.
6129 if (mHdrPlusPendingRequests.size() > 0) {
6130 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6131 }
6132 }
6133 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006134 }
6135 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006136 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006137 (mState != ERROR) && (mState != DEINIT)) {
6138 if (!isValidTimeout) {
6139 LOGD("Blocking on conditional wait");
6140 pthread_cond_wait(&mRequestCond, &mMutex);
6141 }
6142 else {
6143 LOGD("Blocking on timed conditional wait");
6144 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6145 if (rc == ETIMEDOUT) {
6146 rc = -ENODEV;
6147 LOGE("Unblocked on timeout!!!!");
6148 break;
6149 }
6150 }
6151 LOGD("Unblocked");
6152 if (mWokenUpByDaemon) {
6153 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006154 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006155 break;
6156 }
6157 }
6158 pthread_mutex_unlock(&mMutex);
6159
6160 return rc;
6161}
6162
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006163int32_t QCamera3HardwareInterface::startChannelLocked()
6164{
6165 // Configure modules for stream on.
6166 int32_t rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6167 mChannelHandle, /*start_sensor_streaming*/false);
6168 if (rc != NO_ERROR) {
6169 LOGE("start_channel failed %d", rc);
6170 return rc;
6171 }
6172
6173 {
6174 // Configure Easel for stream on.
6175 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6176
6177 // Now that sensor mode should have been selected, get the selected sensor mode
6178 // info.
6179 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6180 getCurrentSensorModeInfo(mSensorModeInfo);
6181
6182 if (EaselManagerClientOpened) {
6183 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
6184 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6185 /*enableCapture*/true);
6186 if (rc != OK) {
6187 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6188 mCameraId, mSensorModeInfo.op_pixel_clk);
6189 return rc;
6190 }
6191 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
6192 mEaselMipiStarted = true;
6193 }
6194 }
6195
6196 // Start sensor streaming.
6197 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6198 mChannelHandle);
6199 if (rc != NO_ERROR) {
6200 LOGE("start_sensor_stream_on failed %d", rc);
6201 return rc;
6202 }
6203
6204 return 0;
6205}
6206
6207void QCamera3HardwareInterface::stopChannelLocked(bool stopChannelImmediately)
6208{
6209 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6210 mChannelHandle, stopChannelImmediately);
6211
6212 {
6213 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6214 if (EaselManagerClientOpened && mEaselMipiStarted) {
6215 int32_t rc = gEaselManagerClient->stopMipi(mCameraId);
6216 if (rc != 0) {
6217 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
6218 }
6219 mEaselMipiStarted = false;
6220 }
6221 }
6222}
6223
Thierry Strudel3d639192016-09-09 11:52:26 -07006224/*===========================================================================
6225 * FUNCTION : dump
6226 *
6227 * DESCRIPTION:
6228 *
6229 * PARAMETERS :
6230 *
6231 *
6232 * RETURN :
6233 *==========================================================================*/
6234void QCamera3HardwareInterface::dump(int fd)
6235{
6236 pthread_mutex_lock(&mMutex);
6237 dprintf(fd, "\n Camera HAL3 information Begin \n");
6238
6239 dprintf(fd, "\nNumber of pending requests: %zu \n",
6240 mPendingRequestsList.size());
6241 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6242 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6243 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6244 for(pendingRequestIterator i = mPendingRequestsList.begin();
6245 i != mPendingRequestsList.end(); i++) {
6246 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6247 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6248 i->input_buffer);
6249 }
6250 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6251 mPendingBuffersMap.get_num_overall_buffers());
6252 dprintf(fd, "-------+------------------\n");
6253 dprintf(fd, " Frame | Stream type mask \n");
6254 dprintf(fd, "-------+------------------\n");
6255 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6256 for(auto &j : req.mPendingBufferList) {
6257 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6258 dprintf(fd, " %5d | %11d \n",
6259 req.frame_number, channel->getStreamTypeMask());
6260 }
6261 }
6262 dprintf(fd, "-------+------------------\n");
6263
6264 dprintf(fd, "\nPending frame drop list: %zu\n",
6265 mPendingFrameDropList.size());
6266 dprintf(fd, "-------+-----------\n");
6267 dprintf(fd, " Frame | Stream ID \n");
6268 dprintf(fd, "-------+-----------\n");
6269 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6270 i != mPendingFrameDropList.end(); i++) {
6271 dprintf(fd, " %5d | %9d \n",
6272 i->frame_number, i->stream_ID);
6273 }
6274 dprintf(fd, "-------+-----------\n");
6275
6276 dprintf(fd, "\n Camera HAL3 information End \n");
6277
6278 /* use dumpsys media.camera as trigger to send update debug level event */
6279 mUpdateDebugLevel = true;
6280 pthread_mutex_unlock(&mMutex);
6281 return;
6282}
6283
6284/*===========================================================================
6285 * FUNCTION : flush
6286 *
6287 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6288 * conditionally restarts channels
6289 *
6290 * PARAMETERS :
6291 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006292 * @ stopChannelImmediately: stop the channel immediately. This should be used
6293 * when device encountered an error and MIPI may has
6294 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006295 *
6296 * RETURN :
6297 * 0 on success
6298 * Error code on failure
6299 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006300int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006301{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006302 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006303 int32_t rc = NO_ERROR;
6304
6305 LOGD("Unblocking Process Capture Request");
6306 pthread_mutex_lock(&mMutex);
6307 mFlush = true;
6308 pthread_mutex_unlock(&mMutex);
6309
Chien-Yu Chen11c8edc2017-09-11 20:54:24 -07006310 // Disable HDR+ if it's enabled;
6311 {
6312 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6313 finishHdrPlusClientOpeningLocked(l);
6314 disableHdrPlusModeLocked();
6315 }
6316
Thierry Strudel3d639192016-09-09 11:52:26 -07006317 rc = stopAllChannels();
6318 // unlink of dualcam
6319 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006320 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6321 &m_pDualCamCmdPtr->bundle_info;
6322 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006323 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6324 pthread_mutex_lock(&gCamLock);
6325
6326 if (mIsMainCamera == 1) {
6327 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6328 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006329 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006330 // related session id should be session id of linked session
6331 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6332 } else {
6333 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6334 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006335 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006336 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6337 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006338 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006339 pthread_mutex_unlock(&gCamLock);
6340
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006341 rc = mCameraHandle->ops->set_dual_cam_cmd(
6342 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006343 if (rc < 0) {
6344 LOGE("Dualcam: Unlink failed, but still proceed to close");
6345 }
6346 }
6347
6348 if (rc < 0) {
6349 LOGE("stopAllChannels failed");
6350 return rc;
6351 }
6352 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006353 stopChannelLocked(stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006354 }
6355
6356 // Reset bundle info
6357 rc = setBundleInfo();
6358 if (rc < 0) {
6359 LOGE("setBundleInfo failed %d", rc);
6360 return rc;
6361 }
6362
6363 // Mutex Lock
6364 pthread_mutex_lock(&mMutex);
6365
6366 // Unblock process_capture_request
6367 mPendingLiveRequest = 0;
6368 pthread_cond_signal(&mRequestCond);
6369
6370 rc = notifyErrorForPendingRequests();
6371 if (rc < 0) {
6372 LOGE("notifyErrorForPendingRequests failed");
6373 pthread_mutex_unlock(&mMutex);
6374 return rc;
6375 }
6376
6377 mFlush = false;
6378
6379 // Start the Streams/Channels
6380 if (restartChannels) {
6381 rc = startAllChannels();
6382 if (rc < 0) {
6383 LOGE("startAllChannels failed");
6384 pthread_mutex_unlock(&mMutex);
6385 return rc;
6386 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006387 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006388 // Configure modules for stream on.
6389 rc = startChannelLocked();
Thierry Strudel2896d122017-02-23 19:18:03 -08006390 if (rc < 0) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006391 LOGE("startChannelLocked failed");
Thierry Strudel2896d122017-02-23 19:18:03 -08006392 pthread_mutex_unlock(&mMutex);
6393 return rc;
6394 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006395 }
6396 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006397 pthread_mutex_unlock(&mMutex);
6398
6399 return 0;
6400}
6401
6402/*===========================================================================
6403 * FUNCTION : flushPerf
6404 *
6405 * DESCRIPTION: This is the performance optimization version of flush that does
6406 * not use stream off, rather flushes the system
6407 *
6408 * PARAMETERS :
6409 *
6410 *
6411 * RETURN : 0 : success
6412 * -EINVAL: input is malformed (device is not valid)
6413 * -ENODEV: if the device has encountered a serious error
6414 *==========================================================================*/
6415int QCamera3HardwareInterface::flushPerf()
6416{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006417 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006418 int32_t rc = 0;
6419 struct timespec timeout;
6420 bool timed_wait = false;
6421
6422 pthread_mutex_lock(&mMutex);
6423 mFlushPerf = true;
6424 mPendingBuffersMap.numPendingBufsAtFlush =
6425 mPendingBuffersMap.get_num_overall_buffers();
6426 LOGD("Calling flush. Wait for %d buffers to return",
6427 mPendingBuffersMap.numPendingBufsAtFlush);
6428
6429 /* send the flush event to the backend */
6430 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6431 if (rc < 0) {
6432 LOGE("Error in flush: IOCTL failure");
6433 mFlushPerf = false;
6434 pthread_mutex_unlock(&mMutex);
6435 return -ENODEV;
6436 }
6437
6438 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6439 LOGD("No pending buffers in HAL, return flush");
6440 mFlushPerf = false;
6441 pthread_mutex_unlock(&mMutex);
6442 return rc;
6443 }
6444
6445 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006446 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006447 if (rc < 0) {
6448 LOGE("Error reading the real time clock, cannot use timed wait");
6449 } else {
6450 timeout.tv_sec += FLUSH_TIMEOUT;
6451 timed_wait = true;
6452 }
6453
6454 //Block on conditional variable
6455 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6456 LOGD("Waiting on mBuffersCond");
6457 if (!timed_wait) {
6458 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6459 if (rc != 0) {
6460 LOGE("pthread_cond_wait failed due to rc = %s",
6461 strerror(rc));
6462 break;
6463 }
6464 } else {
6465 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6466 if (rc != 0) {
6467 LOGE("pthread_cond_timedwait failed due to rc = %s",
6468 strerror(rc));
6469 break;
6470 }
6471 }
6472 }
6473 if (rc != 0) {
6474 mFlushPerf = false;
6475 pthread_mutex_unlock(&mMutex);
6476 return -ENODEV;
6477 }
6478
6479 LOGD("Received buffers, now safe to return them");
6480
6481 //make sure the channels handle flush
6482 //currently only required for the picture channel to release snapshot resources
6483 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6484 it != mStreamInfo.end(); it++) {
6485 QCamera3Channel *channel = (*it)->channel;
6486 if (channel) {
6487 rc = channel->flush();
6488 if (rc) {
6489 LOGE("Flushing the channels failed with error %d", rc);
6490 // even though the channel flush failed we need to continue and
6491 // return the buffers we have to the framework, however the return
6492 // value will be an error
6493 rc = -ENODEV;
6494 }
6495 }
6496 }
6497
6498 /* notify the frameworks and send errored results */
6499 rc = notifyErrorForPendingRequests();
6500 if (rc < 0) {
6501 LOGE("notifyErrorForPendingRequests failed");
6502 pthread_mutex_unlock(&mMutex);
6503 return rc;
6504 }
6505
6506 //unblock process_capture_request
6507 mPendingLiveRequest = 0;
6508 unblockRequestIfNecessary();
6509
6510 mFlushPerf = false;
6511 pthread_mutex_unlock(&mMutex);
6512 LOGD ("Flush Operation complete. rc = %d", rc);
6513 return rc;
6514}
6515
6516/*===========================================================================
6517 * FUNCTION : handleCameraDeviceError
6518 *
6519 * DESCRIPTION: This function calls internal flush and notifies the error to
6520 * framework and updates the state variable.
6521 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006522 * PARAMETERS :
6523 * @stopChannelImmediately : stop channels immediately without waiting for
6524 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006525 *
6526 * RETURN : NO_ERROR on Success
6527 * Error code on failure
6528 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006529int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006530{
6531 int32_t rc = NO_ERROR;
6532
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006533 {
6534 Mutex::Autolock lock(mFlushLock);
6535 pthread_mutex_lock(&mMutex);
6536 if (mState != ERROR) {
6537 //if mState != ERROR, nothing to be done
6538 pthread_mutex_unlock(&mMutex);
6539 return NO_ERROR;
6540 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006541 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006542
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006543 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006544 if (NO_ERROR != rc) {
6545 LOGE("internal flush to handle mState = ERROR failed");
6546 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006547
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006548 pthread_mutex_lock(&mMutex);
6549 mState = DEINIT;
6550 pthread_mutex_unlock(&mMutex);
6551 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006552
6553 camera3_notify_msg_t notify_msg;
6554 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6555 notify_msg.type = CAMERA3_MSG_ERROR;
6556 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6557 notify_msg.message.error.error_stream = NULL;
6558 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006559 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006560
6561 return rc;
6562}
6563
6564/*===========================================================================
6565 * FUNCTION : captureResultCb
6566 *
6567 * DESCRIPTION: Callback handler for all capture result
6568 * (streams, as well as metadata)
6569 *
6570 * PARAMETERS :
6571 * @metadata : metadata information
6572 * @buffer : actual gralloc buffer to be returned to frameworks.
6573 * NULL if metadata.
6574 *
6575 * RETURN : NONE
6576 *==========================================================================*/
6577void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6578 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6579{
6580 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006581 pthread_mutex_lock(&mMutex);
6582 uint8_t batchSize = mBatchSize;
6583 pthread_mutex_unlock(&mMutex);
6584 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006585 handleBatchMetadata(metadata_buf,
6586 true /* free_and_bufdone_meta_buf */);
6587 } else { /* mBatchSize = 0 */
6588 hdrPlusPerfLock(metadata_buf);
6589 pthread_mutex_lock(&mMutex);
6590 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006591 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006592 true /* last urgent frame of batch metadata */,
6593 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006594 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006595 pthread_mutex_unlock(&mMutex);
6596 }
6597 } else if (isInputBuffer) {
6598 pthread_mutex_lock(&mMutex);
6599 handleInputBufferWithLock(frame_number);
6600 pthread_mutex_unlock(&mMutex);
6601 } else {
6602 pthread_mutex_lock(&mMutex);
6603 handleBufferWithLock(buffer, frame_number);
6604 pthread_mutex_unlock(&mMutex);
6605 }
6606 return;
6607}
6608
6609/*===========================================================================
6610 * FUNCTION : getReprocessibleOutputStreamId
6611 *
6612 * DESCRIPTION: Get source output stream id for the input reprocess stream
6613 * based on size and format, which would be the largest
6614 * output stream if an input stream exists.
6615 *
6616 * PARAMETERS :
6617 * @id : return the stream id if found
6618 *
6619 * RETURN : int32_t type of status
6620 * NO_ERROR -- success
6621 * none-zero failure code
6622 *==========================================================================*/
6623int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6624{
6625 /* check if any output or bidirectional stream with the same size and format
6626 and return that stream */
6627 if ((mInputStreamInfo.dim.width > 0) &&
6628 (mInputStreamInfo.dim.height > 0)) {
6629 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6630 it != mStreamInfo.end(); it++) {
6631
6632 camera3_stream_t *stream = (*it)->stream;
6633 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6634 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6635 (stream->format == mInputStreamInfo.format)) {
6636 // Usage flag for an input stream and the source output stream
6637 // may be different.
6638 LOGD("Found reprocessible output stream! %p", *it);
6639 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6640 stream->usage, mInputStreamInfo.usage);
6641
6642 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6643 if (channel != NULL && channel->mStreams[0]) {
6644 id = channel->mStreams[0]->getMyServerID();
6645 return NO_ERROR;
6646 }
6647 }
6648 }
6649 } else {
6650 LOGD("No input stream, so no reprocessible output stream");
6651 }
6652 return NAME_NOT_FOUND;
6653}
6654
6655/*===========================================================================
6656 * FUNCTION : lookupFwkName
6657 *
6658 * DESCRIPTION: In case the enum is not same in fwk and backend
6659 * make sure the parameter is correctly propogated
6660 *
6661 * PARAMETERS :
6662 * @arr : map between the two enums
6663 * @len : len of the map
6664 * @hal_name : name of the hal_parm to map
6665 *
6666 * RETURN : int type of status
6667 * fwk_name -- success
6668 * none-zero failure code
6669 *==========================================================================*/
6670template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6671 size_t len, halType hal_name)
6672{
6673
6674 for (size_t i = 0; i < len; i++) {
6675 if (arr[i].hal_name == hal_name) {
6676 return arr[i].fwk_name;
6677 }
6678 }
6679
6680 /* Not able to find matching framework type is not necessarily
6681 * an error case. This happens when mm-camera supports more attributes
6682 * than the frameworks do */
6683 LOGH("Cannot find matching framework type");
6684 return NAME_NOT_FOUND;
6685}
6686
6687/*===========================================================================
6688 * FUNCTION : lookupHalName
6689 *
6690 * DESCRIPTION: In case the enum is not same in fwk and backend
6691 * make sure the parameter is correctly propogated
6692 *
6693 * PARAMETERS :
6694 * @arr : map between the two enums
6695 * @len : len of the map
6696 * @fwk_name : name of the hal_parm to map
6697 *
6698 * RETURN : int32_t type of status
6699 * hal_name -- success
6700 * none-zero failure code
6701 *==========================================================================*/
6702template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6703 size_t len, fwkType fwk_name)
6704{
6705 for (size_t i = 0; i < len; i++) {
6706 if (arr[i].fwk_name == fwk_name) {
6707 return arr[i].hal_name;
6708 }
6709 }
6710
6711 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6712 return NAME_NOT_FOUND;
6713}
6714
6715/*===========================================================================
6716 * FUNCTION : lookupProp
6717 *
6718 * DESCRIPTION: lookup a value by its name
6719 *
6720 * PARAMETERS :
6721 * @arr : map between the two enums
6722 * @len : size of the map
6723 * @name : name to be looked up
6724 *
6725 * RETURN : Value if found
6726 * CAM_CDS_MODE_MAX if not found
6727 *==========================================================================*/
6728template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6729 size_t len, const char *name)
6730{
6731 if (name) {
6732 for (size_t i = 0; i < len; i++) {
6733 if (!strcmp(arr[i].desc, name)) {
6734 return arr[i].val;
6735 }
6736 }
6737 }
6738 return CAM_CDS_MODE_MAX;
6739}
6740
6741/*===========================================================================
6742 *
6743 * DESCRIPTION:
6744 *
6745 * PARAMETERS :
6746 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006747 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006748 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006749 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6750 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006751 *
6752 * RETURN : camera_metadata_t*
6753 * metadata in a format specified by fwk
6754 *==========================================================================*/
6755camera_metadata_t*
6756QCamera3HardwareInterface::translateFromHalMetadata(
6757 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006758 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006759 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006760 bool lastMetadataInBatch,
6761 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006762{
6763 CameraMetadata camMetadata;
6764 camera_metadata_t *resultMetadata;
6765
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006766 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006767 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6768 * Timestamp is needed because it's used for shutter notify calculation.
6769 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006770 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006771 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006772 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006773 }
6774
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006775 if (pendingRequest.jpegMetadata.entryCount())
6776 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006777
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006778 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6779 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6780 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6781 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6782 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006783 if (mBatchSize == 0) {
6784 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006785 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006786 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006787
Samuel Ha68ba5172016-12-15 18:41:12 -08006788 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6789 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006790 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006791 // DevCamDebug metadata translateFromHalMetadata AF
6792 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6793 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6794 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6795 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6796 }
6797 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07006798 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006799 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6800 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6801 }
6802 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07006803 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006804 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6805 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6806 }
6807 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6808 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6809 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6810 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6811 }
6812 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6813 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6814 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6815 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6816 }
6817 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6818 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6819 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6820 *DevCamDebug_af_monitor_pdaf_target_pos;
6821 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6822 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6823 }
6824 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6825 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6826 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6827 *DevCamDebug_af_monitor_pdaf_confidence;
6828 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6829 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6830 }
6831 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6832 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6833 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6834 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6835 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6836 }
6837 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6838 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6839 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6840 *DevCamDebug_af_monitor_tof_target_pos;
6841 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6842 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6843 }
6844 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6845 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6846 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6847 *DevCamDebug_af_monitor_tof_confidence;
6848 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6849 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6850 }
6851 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6852 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6853 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6854 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6855 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6856 }
6857 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6858 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6859 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6860 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6861 &fwk_DevCamDebug_af_monitor_type_select, 1);
6862 }
6863 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6864 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6865 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6866 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6867 &fwk_DevCamDebug_af_monitor_refocus, 1);
6868 }
6869 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6870 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6871 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6872 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6873 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6874 }
6875 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6876 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6877 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6878 *DevCamDebug_af_search_pdaf_target_pos;
6879 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6880 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6881 }
6882 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6883 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6884 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6885 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6886 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6887 }
6888 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6889 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6890 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6891 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6892 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6893 }
6894 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6895 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6896 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6897 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6898 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6899 }
6900 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6901 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6902 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6903 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6904 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6905 }
6906 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6907 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6908 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6909 *DevCamDebug_af_search_tof_target_pos;
6910 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6911 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6912 }
6913 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6914 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6915 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6916 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6917 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6918 }
6919 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6920 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6921 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6922 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6923 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6924 }
6925 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6926 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6927 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6928 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6929 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6930 }
6931 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6932 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6933 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6934 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6935 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6936 }
6937 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6938 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6939 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6940 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6941 &fwk_DevCamDebug_af_search_type_select, 1);
6942 }
6943 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6944 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6945 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6946 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6947 &fwk_DevCamDebug_af_search_next_pos, 1);
6948 }
6949 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6950 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6951 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6952 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6953 &fwk_DevCamDebug_af_search_target_pos, 1);
6954 }
6955 // DevCamDebug metadata translateFromHalMetadata AEC
6956 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6957 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6958 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6959 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6960 }
6961 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6962 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6963 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6964 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6965 }
6966 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6967 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6968 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6969 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6970 }
6971 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6972 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6973 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6974 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6975 }
6976 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6977 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6978 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6979 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6980 }
6981 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6982 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6983 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6984 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6985 }
6986 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6987 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6988 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6989 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6990 }
6991 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6992 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6993 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6994 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6995 }
Samuel Ha34229982017-02-17 13:51:11 -08006996 // DevCamDebug metadata translateFromHalMetadata zzHDR
6997 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6998 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6999 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
7000 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
7001 }
7002 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
7003 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07007004 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08007005 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
7006 }
7007 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
7008 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
7009 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
7010 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
7011 }
7012 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
7013 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07007014 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08007015 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
7016 }
7017 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
7018 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
7019 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
7020 *DevCamDebug_aec_hdr_sensitivity_ratio;
7021 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
7022 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
7023 }
7024 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
7025 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
7026 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
7027 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
7028 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
7029 }
7030 // DevCamDebug metadata translateFromHalMetadata ADRC
7031 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
7032 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
7033 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
7034 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
7035 &fwk_DevCamDebug_aec_total_drc_gain, 1);
7036 }
7037 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
7038 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
7039 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
7040 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
7041 &fwk_DevCamDebug_aec_color_drc_gain, 1);
7042 }
7043 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
7044 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
7045 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
7046 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
7047 }
7048 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
7049 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
7050 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
7051 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
7052 }
7053 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
7054 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
7055 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
7056 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
7057 }
7058 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
7059 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
7060 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
7061 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
7062 }
Samuel Habdf4fac2017-07-28 17:21:18 -07007063 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
7064 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
7065 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
7066 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
7067 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
7068 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
7069 }
7070 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
7071 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
7072 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
7073 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
7074 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
7075 }
7076 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
7077 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
7078 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
7079 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
7080 &fwk_DevCamDebug_aec_subject_motion, 1);
7081 }
Samuel Ha68ba5172016-12-15 18:41:12 -08007082 // DevCamDebug metadata translateFromHalMetadata AWB
7083 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
7084 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
7085 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
7086 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7087 }
7088 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7089 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7090 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7091 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7092 }
7093 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7094 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7095 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7096 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7097 }
7098 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7099 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7100 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7101 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7102 }
7103 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7104 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7105 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7106 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7107 }
7108 }
7109 // atrace_end(ATRACE_TAG_ALWAYS);
7110
Thierry Strudel3d639192016-09-09 11:52:26 -07007111 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7112 int64_t fwk_frame_number = *frame_number;
7113 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7114 }
7115
7116 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7117 int32_t fps_range[2];
7118 fps_range[0] = (int32_t)float_range->min_fps;
7119 fps_range[1] = (int32_t)float_range->max_fps;
7120 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7121 fps_range, 2);
7122 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7123 fps_range[0], fps_range[1]);
7124 }
7125
7126 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7127 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7128 }
7129
7130 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7131 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7132 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7133 *sceneMode);
7134 if (NAME_NOT_FOUND != val) {
7135 uint8_t fwkSceneMode = (uint8_t)val;
7136 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7137 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7138 fwkSceneMode);
7139 }
7140 }
7141
7142 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7143 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7144 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7145 }
7146
7147 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7148 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7149 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7150 }
7151
7152 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7153 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7154 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7155 }
7156
7157 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7158 CAM_INTF_META_EDGE_MODE, metadata) {
7159 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7160 }
7161
7162 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7163 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7164 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7165 }
7166
7167 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7168 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7169 }
7170
7171 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7172 if (0 <= *flashState) {
7173 uint8_t fwk_flashState = (uint8_t) *flashState;
7174 if (!gCamCapability[mCameraId]->flash_available) {
7175 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7176 }
7177 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7178 }
7179 }
7180
7181 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7182 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7183 if (NAME_NOT_FOUND != val) {
7184 uint8_t fwk_flashMode = (uint8_t)val;
7185 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7186 }
7187 }
7188
7189 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7190 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7191 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7192 }
7193
7194 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7195 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7196 }
7197
7198 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7199 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7200 }
7201
7202 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7203 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7204 }
7205
7206 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7207 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7208 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7209 }
7210
7211 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7212 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7213 LOGD("fwk_videoStab = %d", fwk_videoStab);
7214 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7215 } else {
7216 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7217 // and so hardcoding the Video Stab result to OFF mode.
7218 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7219 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007220 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007221 }
7222
7223 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7224 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7225 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7226 }
7227
7228 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7229 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7230 }
7231
Thierry Strudel3d639192016-09-09 11:52:26 -07007232 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7233 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007234 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007235
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007236 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7237 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007238
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007239 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007240 blackLevelAppliedPattern->cam_black_level[0],
7241 blackLevelAppliedPattern->cam_black_level[1],
7242 blackLevelAppliedPattern->cam_black_level[2],
7243 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007244 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7245 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007246
7247#ifndef USE_HAL_3_3
7248 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307249 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007250 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307251 fwk_blackLevelInd[0] /= 16.0;
7252 fwk_blackLevelInd[1] /= 16.0;
7253 fwk_blackLevelInd[2] /= 16.0;
7254 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007255 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7256 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007257#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007258 }
7259
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007260#ifndef USE_HAL_3_3
7261 // Fixed whitelevel is used by ISP/Sensor
7262 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7263 &gCamCapability[mCameraId]->white_level, 1);
7264#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007265
7266 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7267 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7268 int32_t scalerCropRegion[4];
7269 scalerCropRegion[0] = hScalerCropRegion->left;
7270 scalerCropRegion[1] = hScalerCropRegion->top;
7271 scalerCropRegion[2] = hScalerCropRegion->width;
7272 scalerCropRegion[3] = hScalerCropRegion->height;
7273
7274 // Adjust crop region from sensor output coordinate system to active
7275 // array coordinate system.
7276 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7277 scalerCropRegion[2], scalerCropRegion[3]);
7278
7279 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7280 }
7281
7282 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7283 LOGD("sensorExpTime = %lld", *sensorExpTime);
7284 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7285 }
7286
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007287 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7288 LOGD("expTimeBoost = %f", *expTimeBoost);
7289 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7290 }
7291
Thierry Strudel3d639192016-09-09 11:52:26 -07007292 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7293 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7294 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7295 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7296 }
7297
7298 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7299 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7300 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7301 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7302 sensorRollingShutterSkew, 1);
7303 }
7304
7305 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7306 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7307 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7308
7309 //calculate the noise profile based on sensitivity
7310 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7311 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7312 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7313 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7314 noise_profile[i] = noise_profile_S;
7315 noise_profile[i+1] = noise_profile_O;
7316 }
7317 LOGD("noise model entry (S, O) is (%f, %f)",
7318 noise_profile_S, noise_profile_O);
7319 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7320 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7321 }
7322
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007323#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007324 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007325 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007326 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007327 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007328 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7329 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7330 }
7331 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007332#endif
7333
Thierry Strudel3d639192016-09-09 11:52:26 -07007334 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7335 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7336 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7337 }
7338
7339 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7340 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7341 *faceDetectMode);
7342 if (NAME_NOT_FOUND != val) {
7343 uint8_t fwk_faceDetectMode = (uint8_t)val;
7344 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7345
7346 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7347 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7348 CAM_INTF_META_FACE_DETECTION, metadata) {
7349 uint8_t numFaces = MIN(
7350 faceDetectionInfo->num_faces_detected, MAX_ROI);
7351 int32_t faceIds[MAX_ROI];
7352 uint8_t faceScores[MAX_ROI];
7353 int32_t faceRectangles[MAX_ROI * 4];
7354 int32_t faceLandmarks[MAX_ROI * 6];
7355 size_t j = 0, k = 0;
7356
7357 for (size_t i = 0; i < numFaces; i++) {
7358 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7359 // Adjust crop region from sensor output coordinate system to active
7360 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007361 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007362 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7363 rect.width, rect.height);
7364
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007365 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007366
Jason Lee8ce36fa2017-04-19 19:40:37 -07007367 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7368 "bottom-right (%d, %d)",
7369 faceDetectionInfo->frame_id, i,
7370 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7371 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7372
Thierry Strudel3d639192016-09-09 11:52:26 -07007373 j+= 4;
7374 }
7375 if (numFaces <= 0) {
7376 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7377 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7378 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7379 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7380 }
7381
7382 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7383 numFaces);
7384 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7385 faceRectangles, numFaces * 4U);
7386 if (fwk_faceDetectMode ==
7387 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7388 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7389 CAM_INTF_META_FACE_LANDMARK, metadata) {
7390
7391 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007392 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007393 // Map the co-ordinate sensor output coordinate system to active
7394 // array coordinate system.
7395 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007396 face_landmarks.left_eye_center.x,
7397 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007398 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007399 face_landmarks.right_eye_center.x,
7400 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007401 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007402 face_landmarks.mouth_center.x,
7403 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007404
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007405 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007406
7407 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7408 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7409 faceDetectionInfo->frame_id, i,
7410 faceLandmarks[k + LEFT_EYE_X],
7411 faceLandmarks[k + LEFT_EYE_Y],
7412 faceLandmarks[k + RIGHT_EYE_X],
7413 faceLandmarks[k + RIGHT_EYE_Y],
7414 faceLandmarks[k + MOUTH_X],
7415 faceLandmarks[k + MOUTH_Y]);
7416
Thierry Strudel04e026f2016-10-10 11:27:36 -07007417 k+= TOTAL_LANDMARK_INDICES;
7418 }
7419 } else {
7420 for (size_t i = 0; i < numFaces; i++) {
7421 setInvalidLandmarks(faceLandmarks+k);
7422 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007423 }
7424 }
7425
Jason Lee49619db2017-04-13 12:07:22 -07007426 for (size_t i = 0; i < numFaces; i++) {
7427 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7428
7429 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7430 faceDetectionInfo->frame_id, i, faceIds[i]);
7431 }
7432
Thierry Strudel3d639192016-09-09 11:52:26 -07007433 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7434 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7435 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007436 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007437 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7438 CAM_INTF_META_FACE_BLINK, metadata) {
7439 uint8_t detected[MAX_ROI];
7440 uint8_t degree[MAX_ROI * 2];
7441 for (size_t i = 0; i < numFaces; i++) {
7442 detected[i] = blinks->blink[i].blink_detected;
7443 degree[2 * i] = blinks->blink[i].left_blink;
7444 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007445
Jason Lee49619db2017-04-13 12:07:22 -07007446 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7447 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7448 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7449 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007450 }
7451 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7452 detected, numFaces);
7453 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7454 degree, numFaces * 2);
7455 }
7456 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7457 CAM_INTF_META_FACE_SMILE, metadata) {
7458 uint8_t degree[MAX_ROI];
7459 uint8_t confidence[MAX_ROI];
7460 for (size_t i = 0; i < numFaces; i++) {
7461 degree[i] = smiles->smile[i].smile_degree;
7462 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007463
Jason Lee49619db2017-04-13 12:07:22 -07007464 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7465 "smile_degree=%d, smile_score=%d",
7466 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007467 }
7468 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7469 degree, numFaces);
7470 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7471 confidence, numFaces);
7472 }
7473 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7474 CAM_INTF_META_FACE_GAZE, metadata) {
7475 int8_t angle[MAX_ROI];
7476 int32_t direction[MAX_ROI * 3];
7477 int8_t degree[MAX_ROI * 2];
7478 for (size_t i = 0; i < numFaces; i++) {
7479 angle[i] = gazes->gaze[i].gaze_angle;
7480 direction[3 * i] = gazes->gaze[i].updown_dir;
7481 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7482 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7483 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7484 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007485
7486 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7487 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7488 "left_right_gaze=%d, top_bottom_gaze=%d",
7489 faceDetectionInfo->frame_id, i, angle[i],
7490 direction[3 * i], direction[3 * i + 1],
7491 direction[3 * i + 2],
7492 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007493 }
7494 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7495 (uint8_t *)angle, numFaces);
7496 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7497 direction, numFaces * 3);
7498 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7499 (uint8_t *)degree, numFaces * 2);
7500 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007501 }
7502 }
7503 }
7504 }
7505
7506 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7507 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007508 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007509 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007510 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007511
Shuzhen Wang14415f52016-11-16 18:26:18 -08007512 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7513 histogramBins = *histBins;
7514 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7515 }
7516
7517 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007518 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7519 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007520 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007521
7522 switch (stats_data->type) {
7523 case CAM_HISTOGRAM_TYPE_BAYER:
7524 switch (stats_data->bayer_stats.data_type) {
7525 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007526 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7527 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007528 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007529 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7530 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007531 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007532 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7533 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007534 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007535 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007536 case CAM_STATS_CHANNEL_R:
7537 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007538 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7539 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007540 }
7541 break;
7542 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007543 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007544 break;
7545 }
7546
Shuzhen Wang14415f52016-11-16 18:26:18 -08007547 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007548 }
7549 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007550 }
7551
7552 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7553 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7554 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7555 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7556 }
7557
7558 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7559 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7560 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7561 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7562 }
7563
7564 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7565 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7566 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7567 CAM_MAX_SHADING_MAP_HEIGHT);
7568 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7569 CAM_MAX_SHADING_MAP_WIDTH);
7570 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7571 lensShadingMap->lens_shading, 4U * map_width * map_height);
7572 }
7573
7574 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7575 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7576 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7577 }
7578
7579 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7580 //Populate CAM_INTF_META_TONEMAP_CURVES
7581 /* ch0 = G, ch 1 = B, ch 2 = R*/
7582 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7583 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7584 tonemap->tonemap_points_cnt,
7585 CAM_MAX_TONEMAP_CURVE_SIZE);
7586 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7587 }
7588
7589 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7590 &tonemap->curves[0].tonemap_points[0][0],
7591 tonemap->tonemap_points_cnt * 2);
7592
7593 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7594 &tonemap->curves[1].tonemap_points[0][0],
7595 tonemap->tonemap_points_cnt * 2);
7596
7597 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7598 &tonemap->curves[2].tonemap_points[0][0],
7599 tonemap->tonemap_points_cnt * 2);
7600 }
7601
7602 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7603 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7604 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7605 CC_GAIN_MAX);
7606 }
7607
7608 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7609 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7610 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7611 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7612 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7613 }
7614
7615 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7616 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7617 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7618 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7619 toneCurve->tonemap_points_cnt,
7620 CAM_MAX_TONEMAP_CURVE_SIZE);
7621 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7622 }
7623 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7624 (float*)toneCurve->curve.tonemap_points,
7625 toneCurve->tonemap_points_cnt * 2);
7626 }
7627
7628 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7629 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7630 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7631 predColorCorrectionGains->gains, 4);
7632 }
7633
7634 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7635 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7636 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7637 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7638 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7639 }
7640
7641 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7642 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7643 }
7644
7645 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7646 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7647 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7648 }
7649
7650 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7651 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7652 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7653 }
7654
7655 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7656 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7657 *effectMode);
7658 if (NAME_NOT_FOUND != val) {
7659 uint8_t fwk_effectMode = (uint8_t)val;
7660 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7661 }
7662 }
7663
7664 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7665 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7666 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7667 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7668 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7669 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7670 }
7671 int32_t fwk_testPatternData[4];
7672 fwk_testPatternData[0] = testPatternData->r;
7673 fwk_testPatternData[3] = testPatternData->b;
7674 switch (gCamCapability[mCameraId]->color_arrangement) {
7675 case CAM_FILTER_ARRANGEMENT_RGGB:
7676 case CAM_FILTER_ARRANGEMENT_GRBG:
7677 fwk_testPatternData[1] = testPatternData->gr;
7678 fwk_testPatternData[2] = testPatternData->gb;
7679 break;
7680 case CAM_FILTER_ARRANGEMENT_GBRG:
7681 case CAM_FILTER_ARRANGEMENT_BGGR:
7682 fwk_testPatternData[2] = testPatternData->gr;
7683 fwk_testPatternData[1] = testPatternData->gb;
7684 break;
7685 default:
7686 LOGE("color arrangement %d is not supported",
7687 gCamCapability[mCameraId]->color_arrangement);
7688 break;
7689 }
7690 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7691 }
7692
7693 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7694 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7695 }
7696
7697 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7698 String8 str((const char *)gps_methods);
7699 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7700 }
7701
7702 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7703 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7704 }
7705
7706 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7707 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7708 }
7709
7710 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7711 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7712 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7713 }
7714
7715 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7716 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7717 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7718 }
7719
7720 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7721 int32_t fwk_thumb_size[2];
7722 fwk_thumb_size[0] = thumb_size->width;
7723 fwk_thumb_size[1] = thumb_size->height;
7724 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7725 }
7726
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007727 // Skip reprocess metadata if there is no input stream.
7728 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7729 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7730 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7731 privateData,
7732 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7733 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007734 }
7735
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007736 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007737 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007738 meteringMode, 1);
7739 }
7740
Thierry Strudel54dc9782017-02-15 12:12:10 -08007741 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7742 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7743 LOGD("hdr_scene_data: %d %f\n",
7744 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7745 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7746 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7747 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7748 &isHdr, 1);
7749 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7750 &isHdrConfidence, 1);
7751 }
7752
7753
7754
Thierry Strudel3d639192016-09-09 11:52:26 -07007755 if (metadata->is_tuning_params_valid) {
7756 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7757 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7758 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7759
7760
7761 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7762 sizeof(uint32_t));
7763 data += sizeof(uint32_t);
7764
7765 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7766 sizeof(uint32_t));
7767 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7768 data += sizeof(uint32_t);
7769
7770 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7771 sizeof(uint32_t));
7772 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7773 data += sizeof(uint32_t);
7774
7775 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7776 sizeof(uint32_t));
7777 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7778 data += sizeof(uint32_t);
7779
7780 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7781 sizeof(uint32_t));
7782 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7783 data += sizeof(uint32_t);
7784
7785 metadata->tuning_params.tuning_mod3_data_size = 0;
7786 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7787 sizeof(uint32_t));
7788 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7789 data += sizeof(uint32_t);
7790
7791 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7792 TUNING_SENSOR_DATA_MAX);
7793 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7794 count);
7795 data += count;
7796
7797 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7798 TUNING_VFE_DATA_MAX);
7799 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7800 count);
7801 data += count;
7802
7803 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7804 TUNING_CPP_DATA_MAX);
7805 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7806 count);
7807 data += count;
7808
7809 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7810 TUNING_CAC_DATA_MAX);
7811 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7812 count);
7813 data += count;
7814
7815 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7816 (int32_t *)(void *)tuning_meta_data_blob,
7817 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7818 }
7819
7820 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7821 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7822 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7823 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7824 NEUTRAL_COL_POINTS);
7825 }
7826
7827 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7828 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7829 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7830 }
7831
7832 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7833 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7834 // Adjust crop region from sensor output coordinate system to active
7835 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007836 cam_rect_t hAeRect = hAeRegions->rect;
7837 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7838 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007839
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007840 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007841 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7842 REGIONS_TUPLE_COUNT);
7843 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7844 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007845 hAeRect.left, hAeRect.top, hAeRect.width,
7846 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007847 }
7848
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007849 if (!pendingRequest.focusStateSent) {
7850 if (pendingRequest.focusStateValid) {
7851 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7852 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007853 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007854 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7855 uint8_t fwk_afState = (uint8_t) *afState;
7856 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7857 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7858 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007859 }
7860 }
7861
Thierry Strudel3d639192016-09-09 11:52:26 -07007862 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7863 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7864 }
7865
7866 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7867 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7868 }
7869
7870 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7871 uint8_t fwk_lensState = *lensState;
7872 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7873 }
7874
Thierry Strudel3d639192016-09-09 11:52:26 -07007875 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007876 uint32_t ab_mode = *hal_ab_mode;
7877 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7878 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7879 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7880 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007881 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007882 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007883 if (NAME_NOT_FOUND != val) {
7884 uint8_t fwk_ab_mode = (uint8_t)val;
7885 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7886 }
7887 }
7888
7889 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7890 int val = lookupFwkName(SCENE_MODES_MAP,
7891 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7892 if (NAME_NOT_FOUND != val) {
7893 uint8_t fwkBestshotMode = (uint8_t)val;
7894 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7895 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7896 } else {
7897 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7898 }
7899 }
7900
7901 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7902 uint8_t fwk_mode = (uint8_t) *mode;
7903 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7904 }
7905
7906 /* Constant metadata values to be update*/
7907 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7908 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7909
7910 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7911 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7912
7913 int32_t hotPixelMap[2];
7914 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7915
7916 // CDS
7917 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7918 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7919 }
7920
Thierry Strudel04e026f2016-10-10 11:27:36 -07007921 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7922 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007923 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007924 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7925 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7926 } else {
7927 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7928 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007929
7930 if(fwk_hdr != curr_hdr_state) {
7931 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7932 if(fwk_hdr)
7933 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7934 else
7935 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7936 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007937 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7938 }
7939
Thierry Strudel54dc9782017-02-15 12:12:10 -08007940 //binning correction
7941 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7942 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7943 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7944 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7945 }
7946
Thierry Strudel04e026f2016-10-10 11:27:36 -07007947 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007948 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007949 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7950 int8_t is_ir_on = 0;
7951
7952 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7953 if(is_ir_on != curr_ir_state) {
7954 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7955 if(is_ir_on)
7956 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7957 else
7958 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7959 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007960 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007961 }
7962
Thierry Strudel269c81a2016-10-12 12:13:59 -07007963 // AEC SPEED
7964 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7965 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7966 }
7967
7968 // AWB SPEED
7969 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7970 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7971 }
7972
Thierry Strudel3d639192016-09-09 11:52:26 -07007973 // TNR
7974 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7975 uint8_t tnr_enable = tnr->denoise_enable;
7976 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007977 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7978 int8_t is_tnr_on = 0;
7979
7980 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7981 if(is_tnr_on != curr_tnr_state) {
7982 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7983 if(is_tnr_on)
7984 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7985 else
7986 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7987 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007988
7989 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7990 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7991 }
7992
7993 // Reprocess crop data
7994 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7995 uint8_t cnt = crop_data->num_of_streams;
7996 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7997 // mm-qcamera-daemon only posts crop_data for streams
7998 // not linked to pproc. So no valid crop metadata is not
7999 // necessarily an error case.
8000 LOGD("No valid crop metadata entries");
8001 } else {
8002 uint32_t reproc_stream_id;
8003 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8004 LOGD("No reprocessible stream found, ignore crop data");
8005 } else {
8006 int rc = NO_ERROR;
8007 Vector<int32_t> roi_map;
8008 int32_t *crop = new int32_t[cnt*4];
8009 if (NULL == crop) {
8010 rc = NO_MEMORY;
8011 }
8012 if (NO_ERROR == rc) {
8013 int32_t streams_found = 0;
8014 for (size_t i = 0; i < cnt; i++) {
8015 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
8016 if (pprocDone) {
8017 // HAL already does internal reprocessing,
8018 // either via reprocessing before JPEG encoding,
8019 // or offline postprocessing for pproc bypass case.
8020 crop[0] = 0;
8021 crop[1] = 0;
8022 crop[2] = mInputStreamInfo.dim.width;
8023 crop[3] = mInputStreamInfo.dim.height;
8024 } else {
8025 crop[0] = crop_data->crop_info[i].crop.left;
8026 crop[1] = crop_data->crop_info[i].crop.top;
8027 crop[2] = crop_data->crop_info[i].crop.width;
8028 crop[3] = crop_data->crop_info[i].crop.height;
8029 }
8030 roi_map.add(crop_data->crop_info[i].roi_map.left);
8031 roi_map.add(crop_data->crop_info[i].roi_map.top);
8032 roi_map.add(crop_data->crop_info[i].roi_map.width);
8033 roi_map.add(crop_data->crop_info[i].roi_map.height);
8034 streams_found++;
8035 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
8036 crop[0], crop[1], crop[2], crop[3]);
8037 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
8038 crop_data->crop_info[i].roi_map.left,
8039 crop_data->crop_info[i].roi_map.top,
8040 crop_data->crop_info[i].roi_map.width,
8041 crop_data->crop_info[i].roi_map.height);
8042 break;
8043
8044 }
8045 }
8046 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
8047 &streams_found, 1);
8048 camMetadata.update(QCAMERA3_CROP_REPROCESS,
8049 crop, (size_t)(streams_found * 4));
8050 if (roi_map.array()) {
8051 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
8052 roi_map.array(), roi_map.size());
8053 }
8054 }
8055 if (crop) {
8056 delete [] crop;
8057 }
8058 }
8059 }
8060 }
8061
8062 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8063 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
8064 // so hardcoding the CAC result to OFF mode.
8065 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8066 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
8067 } else {
8068 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
8069 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8070 *cacMode);
8071 if (NAME_NOT_FOUND != val) {
8072 uint8_t resultCacMode = (uint8_t)val;
8073 // check whether CAC result from CB is equal to Framework set CAC mode
8074 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008075 if (pendingRequest.fwkCacMode != resultCacMode) {
8076 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07008077 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08008078 //Check if CAC is disabled by property
8079 if (m_cacModeDisabled) {
8080 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8081 }
8082
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008083 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07008084 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
8085 } else {
8086 LOGE("Invalid CAC camera parameter: %d", *cacMode);
8087 }
8088 }
8089 }
8090
8091 // Post blob of cam_cds_data through vendor tag.
8092 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8093 uint8_t cnt = cdsInfo->num_of_streams;
8094 cam_cds_data_t cdsDataOverride;
8095 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8096 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8097 cdsDataOverride.num_of_streams = 1;
8098 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8099 uint32_t reproc_stream_id;
8100 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8101 LOGD("No reprocessible stream found, ignore cds data");
8102 } else {
8103 for (size_t i = 0; i < cnt; i++) {
8104 if (cdsInfo->cds_info[i].stream_id ==
8105 reproc_stream_id) {
8106 cdsDataOverride.cds_info[0].cds_enable =
8107 cdsInfo->cds_info[i].cds_enable;
8108 break;
8109 }
8110 }
8111 }
8112 } else {
8113 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8114 }
8115 camMetadata.update(QCAMERA3_CDS_INFO,
8116 (uint8_t *)&cdsDataOverride,
8117 sizeof(cam_cds_data_t));
8118 }
8119
8120 // Ldaf calibration data
8121 if (!mLdafCalibExist) {
8122 IF_META_AVAILABLE(uint32_t, ldafCalib,
8123 CAM_INTF_META_LDAF_EXIF, metadata) {
8124 mLdafCalibExist = true;
8125 mLdafCalib[0] = ldafCalib[0];
8126 mLdafCalib[1] = ldafCalib[1];
8127 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8128 ldafCalib[0], ldafCalib[1]);
8129 }
8130 }
8131
Thierry Strudel54dc9782017-02-15 12:12:10 -08008132 // EXIF debug data through vendor tag
8133 /*
8134 * Mobicat Mask can assume 3 values:
8135 * 1 refers to Mobicat data,
8136 * 2 refers to Stats Debug and Exif Debug Data
8137 * 3 refers to Mobicat and Stats Debug Data
8138 * We want to make sure that we are sending Exif debug data
8139 * only when Mobicat Mask is 2.
8140 */
8141 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8142 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8143 (uint8_t *)(void *)mExifParams.debug_params,
8144 sizeof(mm_jpeg_debug_exif_params_t));
8145 }
8146
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008147 // Reprocess and DDM debug data through vendor tag
8148 cam_reprocess_info_t repro_info;
8149 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008150 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8151 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008152 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008153 }
8154 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8155 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008156 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008157 }
8158 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8159 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008160 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008161 }
8162 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8163 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008164 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008165 }
8166 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8167 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008168 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008169 }
8170 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008171 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008172 }
8173 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8174 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008175 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008176 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008177 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8178 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8179 }
8180 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8181 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8182 }
8183 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8184 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008185
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008186 // INSTANT AEC MODE
8187 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8188 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8189 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8190 }
8191
Shuzhen Wange763e802016-03-31 10:24:29 -07008192 // AF scene change
8193 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8194 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8195 }
8196
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008197 // Enable ZSL
8198 if (enableZsl != nullptr) {
8199 uint8_t value = *enableZsl ?
8200 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8201 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8202 }
8203
Xu Han821ea9c2017-05-23 09:00:40 -07008204 // OIS Data
8205 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8206 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8207 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8208 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8209 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8210 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8211 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8212 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8213 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8214 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8215 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
Xue Tu2c3e9142017-08-18 16:23:52 -07008216 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8217 frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8218 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8219 frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
Xu Han821ea9c2017-05-23 09:00:40 -07008220 }
8221
Thierry Strudel3d639192016-09-09 11:52:26 -07008222 resultMetadata = camMetadata.release();
8223 return resultMetadata;
8224}
8225
8226/*===========================================================================
8227 * FUNCTION : saveExifParams
8228 *
8229 * DESCRIPTION:
8230 *
8231 * PARAMETERS :
8232 * @metadata : metadata information from callback
8233 *
8234 * RETURN : none
8235 *
8236 *==========================================================================*/
8237void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8238{
8239 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8240 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8241 if (mExifParams.debug_params) {
8242 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8243 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8244 }
8245 }
8246 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8247 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8248 if (mExifParams.debug_params) {
8249 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8250 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8251 }
8252 }
8253 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8254 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8255 if (mExifParams.debug_params) {
8256 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8257 mExifParams.debug_params->af_debug_params_valid = TRUE;
8258 }
8259 }
8260 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8261 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8262 if (mExifParams.debug_params) {
8263 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8264 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8265 }
8266 }
8267 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8268 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8269 if (mExifParams.debug_params) {
8270 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8271 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8272 }
8273 }
8274 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8275 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8276 if (mExifParams.debug_params) {
8277 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8278 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8279 }
8280 }
8281 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8282 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8283 if (mExifParams.debug_params) {
8284 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8285 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8286 }
8287 }
8288 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8289 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8290 if (mExifParams.debug_params) {
8291 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8292 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8293 }
8294 }
8295}
8296
8297/*===========================================================================
8298 * FUNCTION : get3AExifParams
8299 *
8300 * DESCRIPTION:
8301 *
8302 * PARAMETERS : none
8303 *
8304 *
8305 * RETURN : mm_jpeg_exif_params_t
8306 *
8307 *==========================================================================*/
8308mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8309{
8310 return mExifParams;
8311}
8312
8313/*===========================================================================
8314 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8315 *
8316 * DESCRIPTION:
8317 *
8318 * PARAMETERS :
8319 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008320 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8321 * urgent metadata in a batch. Always true for
8322 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008323 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008324 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8325 * i.e. even though it doesn't map to a valid partial
8326 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008327 * RETURN : camera_metadata_t*
8328 * metadata in a format specified by fwk
8329 *==========================================================================*/
8330camera_metadata_t*
8331QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008332 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008333 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008334{
8335 CameraMetadata camMetadata;
8336 camera_metadata_t *resultMetadata;
8337
Shuzhen Wang485e2442017-08-02 12:21:08 -07008338 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008339 /* In batch mode, use empty metadata if this is not the last in batch
8340 */
8341 resultMetadata = allocate_camera_metadata(0, 0);
8342 return resultMetadata;
8343 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008344
8345 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8346 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8347 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8348 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8349 }
8350
8351 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8352 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8353 &aecTrigger->trigger, 1);
8354 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8355 &aecTrigger->trigger_id, 1);
8356 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8357 aecTrigger->trigger);
8358 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8359 aecTrigger->trigger_id);
8360 }
8361
8362 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8363 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8364 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8365 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8366 }
8367
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008368 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8369 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8370 if (NAME_NOT_FOUND != val) {
8371 uint8_t fwkAfMode = (uint8_t)val;
8372 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8373 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8374 } else {
8375 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8376 val);
8377 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008378 }
8379
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008380 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8381 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8382 af_trigger->trigger);
8383 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8384 af_trigger->trigger_id);
8385
8386 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8387 mAfTrigger = *af_trigger;
8388 uint32_t fwk_AfState = (uint32_t) *afState;
8389
8390 // If this is the result for a new trigger, check if there is new early
8391 // af state. If there is, use the last af state for all results
8392 // preceding current partial frame number.
8393 for (auto & pendingRequest : mPendingRequestsList) {
8394 if (pendingRequest.frame_number < frame_number) {
8395 pendingRequest.focusStateValid = true;
8396 pendingRequest.focusState = fwk_AfState;
8397 } else if (pendingRequest.frame_number == frame_number) {
8398 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8399 // Check if early AF state for trigger exists. If yes, send AF state as
8400 // partial result for better latency.
8401 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8402 pendingRequest.focusStateSent = true;
8403 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8404 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8405 frame_number, fwkEarlyAfState);
8406 }
8407 }
8408 }
8409 }
8410 }
8411 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8412 &mAfTrigger.trigger, 1);
8413 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8414
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008415 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8416 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008417 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008418 int32_t afRegions[REGIONS_TUPLE_COUNT];
8419 // Adjust crop region from sensor output coordinate system to active
8420 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008421 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8422 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008423
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008424 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008425 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8426 REGIONS_TUPLE_COUNT);
8427 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8428 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008429 hAfRect.left, hAfRect.top, hAfRect.width,
8430 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008431 }
8432
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008433 // AF region confidence
8434 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8435 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8436 }
8437
Thierry Strudel3d639192016-09-09 11:52:26 -07008438 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8439 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8440 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8441 if (NAME_NOT_FOUND != val) {
8442 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8443 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8444 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8445 } else {
8446 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8447 }
8448 }
8449
8450 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8451 uint32_t aeMode = CAM_AE_MODE_MAX;
8452 int32_t flashMode = CAM_FLASH_MODE_MAX;
8453 int32_t redeye = -1;
8454 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8455 aeMode = *pAeMode;
8456 }
8457 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8458 flashMode = *pFlashMode;
8459 }
8460 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8461 redeye = *pRedeye;
8462 }
8463
8464 if (1 == redeye) {
8465 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8466 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8467 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8468 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8469 flashMode);
8470 if (NAME_NOT_FOUND != val) {
8471 fwk_aeMode = (uint8_t)val;
8472 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8473 } else {
8474 LOGE("Unsupported flash mode %d", flashMode);
8475 }
8476 } else if (aeMode == CAM_AE_MODE_ON) {
8477 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8478 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8479 } else if (aeMode == CAM_AE_MODE_OFF) {
8480 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8481 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008482 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8483 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8484 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008485 } else {
8486 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8487 "flashMode:%d, aeMode:%u!!!",
8488 redeye, flashMode, aeMode);
8489 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008490 if (mInstantAEC) {
8491 // Increment frame Idx count untill a bound reached for instant AEC.
8492 mInstantAecFrameIdxCount++;
8493 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8494 CAM_INTF_META_AEC_INFO, metadata) {
8495 LOGH("ae_params->settled = %d",ae_params->settled);
8496 // If AEC settled, or if number of frames reached bound value,
8497 // should reset instant AEC.
8498 if (ae_params->settled ||
8499 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8500 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8501 mInstantAEC = false;
8502 mResetInstantAEC = true;
8503 mInstantAecFrameIdxCount = 0;
8504 }
8505 }
8506 }
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07008507
8508 IF_META_AVAILABLE(int32_t, af_tof_confidence,
8509 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
8510 IF_META_AVAILABLE(int32_t, af_tof_distance,
8511 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
8512 int32_t fwk_af_tof_confidence = *af_tof_confidence;
8513 int32_t fwk_af_tof_distance = *af_tof_distance;
8514 if (fwk_af_tof_confidence == 1) {
8515 mSceneDistance = fwk_af_tof_distance;
8516 } else {
8517 mSceneDistance = -1;
8518 }
8519 LOGD("tof_distance %d, tof_confidence %d, mSceneDistance %d",
8520 fwk_af_tof_distance, fwk_af_tof_confidence, mSceneDistance);
8521 }
8522 }
8523 camMetadata.update(NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE, &mSceneDistance, 1);
8524
Thierry Strudel3d639192016-09-09 11:52:26 -07008525 resultMetadata = camMetadata.release();
8526 return resultMetadata;
8527}
8528
8529/*===========================================================================
8530 * FUNCTION : dumpMetadataToFile
8531 *
8532 * DESCRIPTION: Dumps tuning metadata to file system
8533 *
8534 * PARAMETERS :
8535 * @meta : tuning metadata
8536 * @dumpFrameCount : current dump frame count
8537 * @enabled : Enable mask
8538 *
8539 *==========================================================================*/
8540void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8541 uint32_t &dumpFrameCount,
8542 bool enabled,
8543 const char *type,
8544 uint32_t frameNumber)
8545{
8546 //Some sanity checks
8547 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8548 LOGE("Tuning sensor data size bigger than expected %d: %d",
8549 meta.tuning_sensor_data_size,
8550 TUNING_SENSOR_DATA_MAX);
8551 return;
8552 }
8553
8554 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8555 LOGE("Tuning VFE data size bigger than expected %d: %d",
8556 meta.tuning_vfe_data_size,
8557 TUNING_VFE_DATA_MAX);
8558 return;
8559 }
8560
8561 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8562 LOGE("Tuning CPP data size bigger than expected %d: %d",
8563 meta.tuning_cpp_data_size,
8564 TUNING_CPP_DATA_MAX);
8565 return;
8566 }
8567
8568 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8569 LOGE("Tuning CAC data size bigger than expected %d: %d",
8570 meta.tuning_cac_data_size,
8571 TUNING_CAC_DATA_MAX);
8572 return;
8573 }
8574 //
8575
8576 if(enabled){
8577 char timeBuf[FILENAME_MAX];
8578 char buf[FILENAME_MAX];
8579 memset(buf, 0, sizeof(buf));
8580 memset(timeBuf, 0, sizeof(timeBuf));
8581 time_t current_time;
8582 struct tm * timeinfo;
8583 time (&current_time);
8584 timeinfo = localtime (&current_time);
8585 if (timeinfo != NULL) {
8586 strftime (timeBuf, sizeof(timeBuf),
8587 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8588 }
8589 String8 filePath(timeBuf);
8590 snprintf(buf,
8591 sizeof(buf),
8592 "%dm_%s_%d.bin",
8593 dumpFrameCount,
8594 type,
8595 frameNumber);
8596 filePath.append(buf);
8597 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8598 if (file_fd >= 0) {
8599 ssize_t written_len = 0;
8600 meta.tuning_data_version = TUNING_DATA_VERSION;
8601 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8602 written_len += write(file_fd, data, sizeof(uint32_t));
8603 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8604 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8605 written_len += write(file_fd, data, sizeof(uint32_t));
8606 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8607 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8608 written_len += write(file_fd, data, sizeof(uint32_t));
8609 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8610 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8611 written_len += write(file_fd, data, sizeof(uint32_t));
8612 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8613 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8614 written_len += write(file_fd, data, sizeof(uint32_t));
8615 meta.tuning_mod3_data_size = 0;
8616 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8617 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8618 written_len += write(file_fd, data, sizeof(uint32_t));
8619 size_t total_size = meta.tuning_sensor_data_size;
8620 data = (void *)((uint8_t *)&meta.data);
8621 written_len += write(file_fd, data, total_size);
8622 total_size = meta.tuning_vfe_data_size;
8623 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8624 written_len += write(file_fd, data, total_size);
8625 total_size = meta.tuning_cpp_data_size;
8626 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8627 written_len += write(file_fd, data, total_size);
8628 total_size = meta.tuning_cac_data_size;
8629 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8630 written_len += write(file_fd, data, total_size);
8631 close(file_fd);
8632 }else {
8633 LOGE("fail to open file for metadata dumping");
8634 }
8635 }
8636}
8637
8638/*===========================================================================
8639 * FUNCTION : cleanAndSortStreamInfo
8640 *
8641 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8642 * and sort them such that raw stream is at the end of the list
8643 * This is a workaround for camera daemon constraint.
8644 *
8645 * PARAMETERS : None
8646 *
8647 *==========================================================================*/
8648void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8649{
8650 List<stream_info_t *> newStreamInfo;
8651
8652 /*clean up invalid streams*/
8653 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8654 it != mStreamInfo.end();) {
8655 if(((*it)->status) == INVALID){
8656 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8657 delete channel;
8658 free(*it);
8659 it = mStreamInfo.erase(it);
8660 } else {
8661 it++;
8662 }
8663 }
8664
8665 // Move preview/video/callback/snapshot streams into newList
8666 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8667 it != mStreamInfo.end();) {
8668 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8669 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8670 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8671 newStreamInfo.push_back(*it);
8672 it = mStreamInfo.erase(it);
8673 } else
8674 it++;
8675 }
8676 // Move raw streams into newList
8677 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8678 it != mStreamInfo.end();) {
8679 newStreamInfo.push_back(*it);
8680 it = mStreamInfo.erase(it);
8681 }
8682
8683 mStreamInfo = newStreamInfo;
8684}
8685
8686/*===========================================================================
8687 * FUNCTION : extractJpegMetadata
8688 *
8689 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8690 * JPEG metadata is cached in HAL, and return as part of capture
8691 * result when metadata is returned from camera daemon.
8692 *
8693 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8694 * @request: capture request
8695 *
8696 *==========================================================================*/
8697void QCamera3HardwareInterface::extractJpegMetadata(
8698 CameraMetadata& jpegMetadata,
8699 const camera3_capture_request_t *request)
8700{
8701 CameraMetadata frame_settings;
8702 frame_settings = request->settings;
8703
8704 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8705 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8706 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8707 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8708
8709 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8710 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8711 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8712 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8713
8714 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8715 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8716 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8717 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8718
8719 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8720 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8721 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8722 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8723
8724 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8725 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8726 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8727 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8728
8729 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8730 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8731 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8732 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8733
8734 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8735 int32_t thumbnail_size[2];
8736 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8737 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8738 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8739 int32_t orientation =
8740 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008741 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008742 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8743 int32_t temp;
8744 temp = thumbnail_size[0];
8745 thumbnail_size[0] = thumbnail_size[1];
8746 thumbnail_size[1] = temp;
8747 }
8748 }
8749 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8750 thumbnail_size,
8751 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8752 }
8753
8754}
8755
8756/*===========================================================================
8757 * FUNCTION : convertToRegions
8758 *
8759 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8760 *
8761 * PARAMETERS :
8762 * @rect : cam_rect_t struct to convert
8763 * @region : int32_t destination array
8764 * @weight : if we are converting from cam_area_t, weight is valid
8765 * else weight = -1
8766 *
8767 *==========================================================================*/
8768void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8769 int32_t *region, int weight)
8770{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008771 region[FACE_LEFT] = rect.left;
8772 region[FACE_TOP] = rect.top;
8773 region[FACE_RIGHT] = rect.left + rect.width;
8774 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008775 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008776 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008777 }
8778}
8779
8780/*===========================================================================
8781 * FUNCTION : convertFromRegions
8782 *
8783 * DESCRIPTION: helper method to convert from array to cam_rect_t
8784 *
8785 * PARAMETERS :
8786 * @rect : cam_rect_t struct to convert
8787 * @region : int32_t destination array
8788 * @weight : if we are converting from cam_area_t, weight is valid
8789 * else weight = -1
8790 *
8791 *==========================================================================*/
8792void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008793 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008794{
Thierry Strudel3d639192016-09-09 11:52:26 -07008795 int32_t x_min = frame_settings.find(tag).data.i32[0];
8796 int32_t y_min = frame_settings.find(tag).data.i32[1];
8797 int32_t x_max = frame_settings.find(tag).data.i32[2];
8798 int32_t y_max = frame_settings.find(tag).data.i32[3];
8799 roi.weight = frame_settings.find(tag).data.i32[4];
8800 roi.rect.left = x_min;
8801 roi.rect.top = y_min;
8802 roi.rect.width = x_max - x_min;
8803 roi.rect.height = y_max - y_min;
8804}
8805
8806/*===========================================================================
8807 * FUNCTION : resetIfNeededROI
8808 *
8809 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8810 * crop region
8811 *
8812 * PARAMETERS :
8813 * @roi : cam_area_t struct to resize
8814 * @scalerCropRegion : cam_crop_region_t region to compare against
8815 *
8816 *
8817 *==========================================================================*/
8818bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8819 const cam_crop_region_t* scalerCropRegion)
8820{
8821 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8822 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8823 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8824 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8825
8826 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8827 * without having this check the calculations below to validate if the roi
8828 * is inside scalar crop region will fail resulting in the roi not being
8829 * reset causing algorithm to continue to use stale roi window
8830 */
8831 if (roi->weight == 0) {
8832 return true;
8833 }
8834
8835 if ((roi_x_max < scalerCropRegion->left) ||
8836 // right edge of roi window is left of scalar crop's left edge
8837 (roi_y_max < scalerCropRegion->top) ||
8838 // bottom edge of roi window is above scalar crop's top edge
8839 (roi->rect.left > crop_x_max) ||
8840 // left edge of roi window is beyond(right) of scalar crop's right edge
8841 (roi->rect.top > crop_y_max)){
8842 // top edge of roi windo is above scalar crop's top edge
8843 return false;
8844 }
8845 if (roi->rect.left < scalerCropRegion->left) {
8846 roi->rect.left = scalerCropRegion->left;
8847 }
8848 if (roi->rect.top < scalerCropRegion->top) {
8849 roi->rect.top = scalerCropRegion->top;
8850 }
8851 if (roi_x_max > crop_x_max) {
8852 roi_x_max = crop_x_max;
8853 }
8854 if (roi_y_max > crop_y_max) {
8855 roi_y_max = crop_y_max;
8856 }
8857 roi->rect.width = roi_x_max - roi->rect.left;
8858 roi->rect.height = roi_y_max - roi->rect.top;
8859 return true;
8860}
8861
8862/*===========================================================================
8863 * FUNCTION : convertLandmarks
8864 *
8865 * DESCRIPTION: helper method to extract the landmarks from face detection info
8866 *
8867 * PARAMETERS :
8868 * @landmark_data : input landmark data to be converted
8869 * @landmarks : int32_t destination array
8870 *
8871 *
8872 *==========================================================================*/
8873void QCamera3HardwareInterface::convertLandmarks(
8874 cam_face_landmarks_info_t landmark_data,
8875 int32_t *landmarks)
8876{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008877 if (landmark_data.is_left_eye_valid) {
8878 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8879 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8880 } else {
8881 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8882 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8883 }
8884
8885 if (landmark_data.is_right_eye_valid) {
8886 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8887 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8888 } else {
8889 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8890 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8891 }
8892
8893 if (landmark_data.is_mouth_valid) {
8894 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8895 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8896 } else {
8897 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8898 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8899 }
8900}
8901
8902/*===========================================================================
8903 * FUNCTION : setInvalidLandmarks
8904 *
8905 * DESCRIPTION: helper method to set invalid landmarks
8906 *
8907 * PARAMETERS :
8908 * @landmarks : int32_t destination array
8909 *
8910 *
8911 *==========================================================================*/
8912void QCamera3HardwareInterface::setInvalidLandmarks(
8913 int32_t *landmarks)
8914{
8915 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8916 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8917 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8918 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8919 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8920 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008921}
8922
8923#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008924
8925/*===========================================================================
8926 * FUNCTION : getCapabilities
8927 *
8928 * DESCRIPTION: query camera capability from back-end
8929 *
8930 * PARAMETERS :
8931 * @ops : mm-interface ops structure
8932 * @cam_handle : camera handle for which we need capability
8933 *
8934 * RETURN : ptr type of capability structure
8935 * capability for success
8936 * NULL for failure
8937 *==========================================================================*/
8938cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8939 uint32_t cam_handle)
8940{
8941 int rc = NO_ERROR;
8942 QCamera3HeapMemory *capabilityHeap = NULL;
8943 cam_capability_t *cap_ptr = NULL;
8944
8945 if (ops == NULL) {
8946 LOGE("Invalid arguments");
8947 return NULL;
8948 }
8949
8950 capabilityHeap = new QCamera3HeapMemory(1);
8951 if (capabilityHeap == NULL) {
8952 LOGE("creation of capabilityHeap failed");
8953 return NULL;
8954 }
8955
8956 /* Allocate memory for capability buffer */
8957 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8958 if(rc != OK) {
8959 LOGE("No memory for cappability");
8960 goto allocate_failed;
8961 }
8962
8963 /* Map memory for capability buffer */
8964 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8965
8966 rc = ops->map_buf(cam_handle,
8967 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8968 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8969 if(rc < 0) {
8970 LOGE("failed to map capability buffer");
8971 rc = FAILED_TRANSACTION;
8972 goto map_failed;
8973 }
8974
8975 /* Query Capability */
8976 rc = ops->query_capability(cam_handle);
8977 if(rc < 0) {
8978 LOGE("failed to query capability");
8979 rc = FAILED_TRANSACTION;
8980 goto query_failed;
8981 }
8982
8983 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8984 if (cap_ptr == NULL) {
8985 LOGE("out of memory");
8986 rc = NO_MEMORY;
8987 goto query_failed;
8988 }
8989
8990 memset(cap_ptr, 0, sizeof(cam_capability_t));
8991 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8992
8993 int index;
8994 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8995 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8996 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8997 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8998 }
8999
9000query_failed:
9001 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
9002map_failed:
9003 capabilityHeap->deallocate();
9004allocate_failed:
9005 delete capabilityHeap;
9006
9007 if (rc != NO_ERROR) {
9008 return NULL;
9009 } else {
9010 return cap_ptr;
9011 }
9012}
9013
Thierry Strudel3d639192016-09-09 11:52:26 -07009014/*===========================================================================
9015 * FUNCTION : initCapabilities
9016 *
9017 * DESCRIPTION: initialize camera capabilities in static data struct
9018 *
9019 * PARAMETERS :
9020 * @cameraId : camera Id
9021 *
9022 * RETURN : int32_t type of status
9023 * NO_ERROR -- success
9024 * none-zero failure code
9025 *==========================================================================*/
9026int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
9027{
9028 int rc = 0;
9029 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009030 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07009031
9032 rc = camera_open((uint8_t)cameraId, &cameraHandle);
9033 if (rc) {
9034 LOGE("camera_open failed. rc = %d", rc);
9035 goto open_failed;
9036 }
9037 if (!cameraHandle) {
9038 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
9039 goto open_failed;
9040 }
9041
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009042 handle = get_main_camera_handle(cameraHandle->camera_handle);
9043 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
9044 if (gCamCapability[cameraId] == NULL) {
9045 rc = FAILED_TRANSACTION;
9046 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07009047 }
9048
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009049 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009050 if (is_dual_camera_by_idx(cameraId)) {
9051 handle = get_aux_camera_handle(cameraHandle->camera_handle);
9052 gCamCapability[cameraId]->aux_cam_cap =
9053 getCapabilities(cameraHandle->ops, handle);
9054 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
9055 rc = FAILED_TRANSACTION;
9056 free(gCamCapability[cameraId]);
9057 goto failed_op;
9058 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009059
9060 // Copy the main camera capability to main_cam_cap struct
9061 gCamCapability[cameraId]->main_cam_cap =
9062 (cam_capability_t *)malloc(sizeof(cam_capability_t));
9063 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
9064 LOGE("out of memory");
9065 rc = NO_MEMORY;
9066 goto failed_op;
9067 }
9068 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
9069 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07009070 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009071failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07009072 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
9073 cameraHandle = NULL;
9074open_failed:
9075 return rc;
9076}
9077
9078/*==========================================================================
9079 * FUNCTION : get3Aversion
9080 *
9081 * DESCRIPTION: get the Q3A S/W version
9082 *
9083 * PARAMETERS :
9084 * @sw_version: Reference of Q3A structure which will hold version info upon
9085 * return
9086 *
9087 * RETURN : None
9088 *
9089 *==========================================================================*/
9090void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
9091{
9092 if(gCamCapability[mCameraId])
9093 sw_version = gCamCapability[mCameraId]->q3a_version;
9094 else
9095 LOGE("Capability structure NULL!");
9096}
9097
9098
9099/*===========================================================================
9100 * FUNCTION : initParameters
9101 *
9102 * DESCRIPTION: initialize camera parameters
9103 *
9104 * PARAMETERS :
9105 *
9106 * RETURN : int32_t type of status
9107 * NO_ERROR -- success
9108 * none-zero failure code
9109 *==========================================================================*/
9110int QCamera3HardwareInterface::initParameters()
9111{
9112 int rc = 0;
9113
9114 //Allocate Set Param Buffer
9115 mParamHeap = new QCamera3HeapMemory(1);
9116 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9117 if(rc != OK) {
9118 rc = NO_MEMORY;
9119 LOGE("Failed to allocate SETPARM Heap memory");
9120 delete mParamHeap;
9121 mParamHeap = NULL;
9122 return rc;
9123 }
9124
9125 //Map memory for parameters buffer
9126 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9127 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9128 mParamHeap->getFd(0),
9129 sizeof(metadata_buffer_t),
9130 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9131 if(rc < 0) {
9132 LOGE("failed to map SETPARM buffer");
9133 rc = FAILED_TRANSACTION;
9134 mParamHeap->deallocate();
9135 delete mParamHeap;
9136 mParamHeap = NULL;
9137 return rc;
9138 }
9139
9140 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9141
9142 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9143 return rc;
9144}
9145
9146/*===========================================================================
9147 * FUNCTION : deinitParameters
9148 *
9149 * DESCRIPTION: de-initialize camera parameters
9150 *
9151 * PARAMETERS :
9152 *
9153 * RETURN : NONE
9154 *==========================================================================*/
9155void QCamera3HardwareInterface::deinitParameters()
9156{
9157 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9158 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9159
9160 mParamHeap->deallocate();
9161 delete mParamHeap;
9162 mParamHeap = NULL;
9163
9164 mParameters = NULL;
9165
9166 free(mPrevParameters);
9167 mPrevParameters = NULL;
9168}
9169
9170/*===========================================================================
9171 * FUNCTION : calcMaxJpegSize
9172 *
9173 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9174 *
9175 * PARAMETERS :
9176 *
9177 * RETURN : max_jpeg_size
9178 *==========================================================================*/
9179size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9180{
9181 size_t max_jpeg_size = 0;
9182 size_t temp_width, temp_height;
9183 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9184 MAX_SIZES_CNT);
9185 for (size_t i = 0; i < count; i++) {
9186 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9187 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9188 if (temp_width * temp_height > max_jpeg_size ) {
9189 max_jpeg_size = temp_width * temp_height;
9190 }
9191 }
9192 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9193 return max_jpeg_size;
9194}
9195
9196/*===========================================================================
9197 * FUNCTION : getMaxRawSize
9198 *
9199 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9200 *
9201 * PARAMETERS :
9202 *
9203 * RETURN : Largest supported Raw Dimension
9204 *==========================================================================*/
9205cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9206{
9207 int max_width = 0;
9208 cam_dimension_t maxRawSize;
9209
9210 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9211 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9212 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9213 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9214 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9215 }
9216 }
9217 return maxRawSize;
9218}
9219
9220
9221/*===========================================================================
9222 * FUNCTION : calcMaxJpegDim
9223 *
9224 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9225 *
9226 * PARAMETERS :
9227 *
9228 * RETURN : max_jpeg_dim
9229 *==========================================================================*/
9230cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9231{
9232 cam_dimension_t max_jpeg_dim;
9233 cam_dimension_t curr_jpeg_dim;
9234 max_jpeg_dim.width = 0;
9235 max_jpeg_dim.height = 0;
9236 curr_jpeg_dim.width = 0;
9237 curr_jpeg_dim.height = 0;
9238 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9239 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9240 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9241 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9242 max_jpeg_dim.width * max_jpeg_dim.height ) {
9243 max_jpeg_dim.width = curr_jpeg_dim.width;
9244 max_jpeg_dim.height = curr_jpeg_dim.height;
9245 }
9246 }
9247 return max_jpeg_dim;
9248}
9249
9250/*===========================================================================
9251 * FUNCTION : addStreamConfig
9252 *
9253 * DESCRIPTION: adds the stream configuration to the array
9254 *
9255 * PARAMETERS :
9256 * @available_stream_configs : pointer to stream configuration array
9257 * @scalar_format : scalar format
9258 * @dim : configuration dimension
9259 * @config_type : input or output configuration type
9260 *
9261 * RETURN : NONE
9262 *==========================================================================*/
9263void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9264 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9265{
9266 available_stream_configs.add(scalar_format);
9267 available_stream_configs.add(dim.width);
9268 available_stream_configs.add(dim.height);
9269 available_stream_configs.add(config_type);
9270}
9271
9272/*===========================================================================
9273 * FUNCTION : suppportBurstCapture
9274 *
9275 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9276 *
9277 * PARAMETERS :
9278 * @cameraId : camera Id
9279 *
9280 * RETURN : true if camera supports BURST_CAPTURE
9281 * false otherwise
9282 *==========================================================================*/
9283bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9284{
9285 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9286 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9287 const int32_t highResWidth = 3264;
9288 const int32_t highResHeight = 2448;
9289
9290 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9291 // Maximum resolution images cannot be captured at >= 10fps
9292 // -> not supporting BURST_CAPTURE
9293 return false;
9294 }
9295
9296 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9297 // Maximum resolution images can be captured at >= 20fps
9298 // --> supporting BURST_CAPTURE
9299 return true;
9300 }
9301
9302 // Find the smallest highRes resolution, or largest resolution if there is none
9303 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9304 MAX_SIZES_CNT);
9305 size_t highRes = 0;
9306 while ((highRes + 1 < totalCnt) &&
9307 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9308 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9309 highResWidth * highResHeight)) {
9310 highRes++;
9311 }
9312 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9313 return true;
9314 } else {
9315 return false;
9316 }
9317}
9318
9319/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009320 * FUNCTION : getPDStatIndex
9321 *
9322 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9323 *
9324 * PARAMETERS :
9325 * @caps : camera capabilities
9326 *
9327 * RETURN : int32_t type
9328 * non-negative - on success
9329 * -1 - on failure
9330 *==========================================================================*/
9331int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9332 if (nullptr == caps) {
9333 return -1;
9334 }
9335
9336 uint32_t metaRawCount = caps->meta_raw_channel_count;
9337 int32_t ret = -1;
9338 for (size_t i = 0; i < metaRawCount; i++) {
9339 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9340 ret = i;
9341 break;
9342 }
9343 }
9344
9345 return ret;
9346}
9347
9348/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009349 * FUNCTION : initStaticMetadata
9350 *
9351 * DESCRIPTION: initialize the static metadata
9352 *
9353 * PARAMETERS :
9354 * @cameraId : camera Id
9355 *
9356 * RETURN : int32_t type of status
9357 * 0 -- success
9358 * non-zero failure code
9359 *==========================================================================*/
9360int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9361{
9362 int rc = 0;
9363 CameraMetadata staticInfo;
9364 size_t count = 0;
9365 bool limitedDevice = false;
9366 char prop[PROPERTY_VALUE_MAX];
9367 bool supportBurst = false;
Emilian Peeve91e9ae2017-09-18 14:40:55 +01009368 Vector<int32_t> available_characteristics_keys;
Thierry Strudel3d639192016-09-09 11:52:26 -07009369
9370 supportBurst = supportBurstCapture(cameraId);
9371
9372 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9373 * guaranteed or if min fps of max resolution is less than 20 fps, its
9374 * advertised as limited device*/
9375 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9376 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9377 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9378 !supportBurst;
9379
9380 uint8_t supportedHwLvl = limitedDevice ?
9381 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009382#ifndef USE_HAL_3_3
9383 // LEVEL_3 - This device will support level 3.
9384 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9385#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009386 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009387#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009388
9389 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9390 &supportedHwLvl, 1);
9391
9392 bool facingBack = false;
9393 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9394 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9395 facingBack = true;
9396 }
9397 /*HAL 3 only*/
9398 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9399 &gCamCapability[cameraId]->min_focus_distance, 1);
9400
9401 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9402 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9403
9404 /*should be using focal lengths but sensor doesn't provide that info now*/
9405 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9406 &gCamCapability[cameraId]->focal_length,
9407 1);
9408
9409 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9410 gCamCapability[cameraId]->apertures,
9411 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9412
9413 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9414 gCamCapability[cameraId]->filter_densities,
9415 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9416
9417
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009418 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9419 size_t mode_count =
9420 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9421 for (size_t i = 0; i < mode_count; i++) {
9422 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9423 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009424 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009425 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009426
9427 int32_t lens_shading_map_size[] = {
9428 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9429 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9430 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9431 lens_shading_map_size,
9432 sizeof(lens_shading_map_size)/sizeof(int32_t));
9433
9434 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9435 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9436
9437 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9438 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9439
9440 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9441 &gCamCapability[cameraId]->max_frame_duration, 1);
9442
9443 camera_metadata_rational baseGainFactor = {
9444 gCamCapability[cameraId]->base_gain_factor.numerator,
9445 gCamCapability[cameraId]->base_gain_factor.denominator};
9446 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9447 &baseGainFactor, 1);
9448
9449 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9450 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9451
9452 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9453 gCamCapability[cameraId]->pixel_array_size.height};
9454 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9455 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9456
9457 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9458 gCamCapability[cameraId]->active_array_size.top,
9459 gCamCapability[cameraId]->active_array_size.width,
9460 gCamCapability[cameraId]->active_array_size.height};
9461 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9462 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9463
9464 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9465 &gCamCapability[cameraId]->white_level, 1);
9466
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009467 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9468 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9469 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009470 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009471 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009472
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009473#ifndef USE_HAL_3_3
9474 bool hasBlackRegions = false;
9475 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9476 LOGW("black_region_count: %d is bounded to %d",
9477 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9478 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9479 }
9480 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9481 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9482 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9483 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9484 }
9485 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9486 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9487 hasBlackRegions = true;
9488 }
9489#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009490 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9491 &gCamCapability[cameraId]->flash_charge_duration, 1);
9492
9493 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9494 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9495
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009496 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9497 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9498 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009499 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9500 &timestampSource, 1);
9501
Thierry Strudel54dc9782017-02-15 12:12:10 -08009502 //update histogram vendor data
9503 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009504 &gCamCapability[cameraId]->histogram_size, 1);
9505
Thierry Strudel54dc9782017-02-15 12:12:10 -08009506 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009507 &gCamCapability[cameraId]->max_histogram_count, 1);
9508
Shuzhen Wang14415f52016-11-16 18:26:18 -08009509 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9510 //so that app can request fewer number of bins than the maximum supported.
9511 std::vector<int32_t> histBins;
9512 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9513 histBins.push_back(maxHistBins);
9514 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9515 (maxHistBins & 0x1) == 0) {
9516 histBins.push_back(maxHistBins >> 1);
9517 maxHistBins >>= 1;
9518 }
9519 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9520 histBins.data(), histBins.size());
Emilian Peeve91e9ae2017-09-18 14:40:55 +01009521 if (!histBins.empty()) {
9522 available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS);
9523 }
Shuzhen Wang14415f52016-11-16 18:26:18 -08009524
Thierry Strudel3d639192016-09-09 11:52:26 -07009525 int32_t sharpness_map_size[] = {
9526 gCamCapability[cameraId]->sharpness_map_size.width,
9527 gCamCapability[cameraId]->sharpness_map_size.height};
9528
9529 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9530 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9531
9532 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9533 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9534
Emilian Peev0f3c3162017-03-15 12:57:46 +00009535 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9536 if (0 <= indexPD) {
9537 // Advertise PD stats data as part of the Depth capabilities
9538 int32_t depthWidth =
9539 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9540 int32_t depthHeight =
9541 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009542 int32_t depthStride =
9543 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009544 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9545 assert(0 < depthSamplesCount);
9546 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9547 &depthSamplesCount, 1);
9548
9549 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9550 depthHeight,
9551 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9552 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9553 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9554 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9555 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9556
9557 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9558 depthHeight, 33333333,
9559 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9560 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9561 depthMinDuration,
9562 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9563
9564 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9565 depthHeight, 0,
9566 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9567 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9568 depthStallDuration,
9569 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9570
9571 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9572 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009573
9574 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9575 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9576 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peeve91e9ae2017-09-18 14:40:55 +01009577 available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS);
Emilian Peev835938b2017-08-31 16:59:54 +01009578
9579 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS,
9580 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.right_gain_map),
9581 sizeof(gCamCapability[cameraId]->pdaf_cal.right_gain_map));
Emilian Peeve91e9ae2017-09-18 14:40:55 +01009582 available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS);
Emilian Peev835938b2017-08-31 16:59:54 +01009583
9584 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS,
9585 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.left_gain_map),
9586 sizeof(gCamCapability[cameraId]->pdaf_cal.left_gain_map));
Emilian Peeve91e9ae2017-09-18 14:40:55 +01009587 available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS);
Emilian Peev835938b2017-08-31 16:59:54 +01009588
9589 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF,
9590 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.conversion_coeff),
9591 sizeof(gCamCapability[cameraId]->pdaf_cal.conversion_coeff));
Emilian Peeve91e9ae2017-09-18 14:40:55 +01009592 available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF);
9593
Emilian Peev0f3c3162017-03-15 12:57:46 +00009594 }
9595
Thierry Strudel3d639192016-09-09 11:52:26 -07009596 int32_t scalar_formats[] = {
9597 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9598 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9599 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9600 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9601 HAL_PIXEL_FORMAT_RAW10,
9602 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009603 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9604 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9605 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009606
9607 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9608 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9609 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9610 count, MAX_SIZES_CNT, available_processed_sizes);
9611 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9612 available_processed_sizes, count * 2);
9613
9614 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9615 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9616 makeTable(gCamCapability[cameraId]->raw_dim,
9617 count, MAX_SIZES_CNT, available_raw_sizes);
9618 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9619 available_raw_sizes, count * 2);
9620
9621 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9622 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9623 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9624 count, MAX_SIZES_CNT, available_fps_ranges);
9625 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9626 available_fps_ranges, count * 2);
9627
9628 camera_metadata_rational exposureCompensationStep = {
9629 gCamCapability[cameraId]->exp_compensation_step.numerator,
9630 gCamCapability[cameraId]->exp_compensation_step.denominator};
9631 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9632 &exposureCompensationStep, 1);
9633
9634 Vector<uint8_t> availableVstabModes;
9635 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9636 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009637 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009638 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009639 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009640 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009641 count = IS_TYPE_MAX;
9642 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9643 for (size_t i = 0; i < count; i++) {
9644 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9645 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9646 eisSupported = true;
9647 break;
9648 }
9649 }
9650 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009651 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9652 }
9653 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9654 availableVstabModes.array(), availableVstabModes.size());
9655
9656 /*HAL 1 and HAL 3 common*/
9657 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9658 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9659 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009660 // Cap the max zoom to the max preferred value
9661 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009662 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9663 &maxZoom, 1);
9664
9665 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9666 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9667
9668 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9669 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9670 max3aRegions[2] = 0; /* AF not supported */
9671 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9672 max3aRegions, 3);
9673
9674 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9675 memset(prop, 0, sizeof(prop));
9676 property_get("persist.camera.facedetect", prop, "1");
9677 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9678 LOGD("Support face detection mode: %d",
9679 supportedFaceDetectMode);
9680
9681 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009682 /* support mode should be OFF if max number of face is 0 */
9683 if (maxFaces <= 0) {
9684 supportedFaceDetectMode = 0;
9685 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009686 Vector<uint8_t> availableFaceDetectModes;
9687 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9688 if (supportedFaceDetectMode == 1) {
9689 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9690 } else if (supportedFaceDetectMode == 2) {
9691 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9692 } else if (supportedFaceDetectMode == 3) {
9693 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9694 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9695 } else {
9696 maxFaces = 0;
9697 }
9698 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9699 availableFaceDetectModes.array(),
9700 availableFaceDetectModes.size());
9701 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9702 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009703 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9704 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9705 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009706
9707 int32_t exposureCompensationRange[] = {
9708 gCamCapability[cameraId]->exposure_compensation_min,
9709 gCamCapability[cameraId]->exposure_compensation_max};
9710 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9711 exposureCompensationRange,
9712 sizeof(exposureCompensationRange)/sizeof(int32_t));
9713
9714 uint8_t lensFacing = (facingBack) ?
9715 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9716 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9717
9718 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9719 available_thumbnail_sizes,
9720 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9721
9722 /*all sizes will be clubbed into this tag*/
9723 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9724 /*android.scaler.availableStreamConfigurations*/
9725 Vector<int32_t> available_stream_configs;
9726 cam_dimension_t active_array_dim;
9727 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9728 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009729
9730 /*advertise list of input dimensions supported based on below property.
9731 By default all sizes upto 5MP will be advertised.
9732 Note that the setprop resolution format should be WxH.
9733 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9734 To list all supported sizes, setprop needs to be set with "0x0" */
9735 cam_dimension_t minInputSize = {2592,1944}; //5MP
9736 memset(prop, 0, sizeof(prop));
9737 property_get("persist.camera.input.minsize", prop, "2592x1944");
9738 if (strlen(prop) > 0) {
9739 char *saveptr = NULL;
9740 char *token = strtok_r(prop, "x", &saveptr);
9741 if (token != NULL) {
9742 minInputSize.width = atoi(token);
9743 }
9744 token = strtok_r(NULL, "x", &saveptr);
9745 if (token != NULL) {
9746 minInputSize.height = atoi(token);
9747 }
9748 }
9749
Thierry Strudel3d639192016-09-09 11:52:26 -07009750 /* Add input/output stream configurations for each scalar formats*/
9751 for (size_t j = 0; j < scalar_formats_count; j++) {
9752 switch (scalar_formats[j]) {
9753 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9754 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9755 case HAL_PIXEL_FORMAT_RAW10:
9756 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9757 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9758 addStreamConfig(available_stream_configs, scalar_formats[j],
9759 gCamCapability[cameraId]->raw_dim[i],
9760 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9761 }
9762 break;
9763 case HAL_PIXEL_FORMAT_BLOB:
9764 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9765 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9766 addStreamConfig(available_stream_configs, scalar_formats[j],
9767 gCamCapability[cameraId]->picture_sizes_tbl[i],
9768 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9769 }
9770 break;
9771 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9772 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9773 default:
9774 cam_dimension_t largest_picture_size;
9775 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9776 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9777 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9778 addStreamConfig(available_stream_configs, scalar_formats[j],
9779 gCamCapability[cameraId]->picture_sizes_tbl[i],
9780 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009781 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009782 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9783 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009784 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9785 >= minInputSize.width) || (gCamCapability[cameraId]->
9786 picture_sizes_tbl[i].height >= minInputSize.height)) {
9787 addStreamConfig(available_stream_configs, scalar_formats[j],
9788 gCamCapability[cameraId]->picture_sizes_tbl[i],
9789 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9790 }
9791 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009792 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009793
Thierry Strudel3d639192016-09-09 11:52:26 -07009794 break;
9795 }
9796 }
9797
9798 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9799 available_stream_configs.array(), available_stream_configs.size());
9800 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9801 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9802
9803 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9804 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9805
9806 /* android.scaler.availableMinFrameDurations */
9807 Vector<int64_t> available_min_durations;
9808 for (size_t j = 0; j < scalar_formats_count; j++) {
9809 switch (scalar_formats[j]) {
9810 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9811 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9812 case HAL_PIXEL_FORMAT_RAW10:
9813 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9814 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9815 available_min_durations.add(scalar_formats[j]);
9816 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9817 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9818 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9819 }
9820 break;
9821 default:
9822 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9823 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9824 available_min_durations.add(scalar_formats[j]);
9825 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9826 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9827 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9828 }
9829 break;
9830 }
9831 }
9832 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9833 available_min_durations.array(), available_min_durations.size());
9834
9835 Vector<int32_t> available_hfr_configs;
9836 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9837 int32_t fps = 0;
9838 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9839 case CAM_HFR_MODE_60FPS:
9840 fps = 60;
9841 break;
9842 case CAM_HFR_MODE_90FPS:
9843 fps = 90;
9844 break;
9845 case CAM_HFR_MODE_120FPS:
9846 fps = 120;
9847 break;
9848 case CAM_HFR_MODE_150FPS:
9849 fps = 150;
9850 break;
9851 case CAM_HFR_MODE_180FPS:
9852 fps = 180;
9853 break;
9854 case CAM_HFR_MODE_210FPS:
9855 fps = 210;
9856 break;
9857 case CAM_HFR_MODE_240FPS:
9858 fps = 240;
9859 break;
9860 case CAM_HFR_MODE_480FPS:
9861 fps = 480;
9862 break;
9863 case CAM_HFR_MODE_OFF:
9864 case CAM_HFR_MODE_MAX:
9865 default:
9866 break;
9867 }
9868
9869 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9870 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9871 /* For each HFR frame rate, need to advertise one variable fps range
9872 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9873 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9874 * set by the app. When video recording is started, [120, 120] is
9875 * set. This way sensor configuration does not change when recording
9876 * is started */
9877
9878 /* (width, height, fps_min, fps_max, batch_size_max) */
9879 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9880 j < MAX_SIZES_CNT; j++) {
9881 available_hfr_configs.add(
9882 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9883 available_hfr_configs.add(
9884 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9885 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9886 available_hfr_configs.add(fps);
9887 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9888
9889 /* (width, height, fps_min, fps_max, batch_size_max) */
9890 available_hfr_configs.add(
9891 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9892 available_hfr_configs.add(
9893 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9894 available_hfr_configs.add(fps);
9895 available_hfr_configs.add(fps);
9896 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9897 }
9898 }
9899 }
9900 //Advertise HFR capability only if the property is set
9901 memset(prop, 0, sizeof(prop));
9902 property_get("persist.camera.hal3hfr.enable", prop, "1");
9903 uint8_t hfrEnable = (uint8_t)atoi(prop);
9904
9905 if(hfrEnable && available_hfr_configs.array()) {
9906 staticInfo.update(
9907 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9908 available_hfr_configs.array(), available_hfr_configs.size());
9909 }
9910
9911 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9912 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9913 &max_jpeg_size, 1);
9914
9915 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9916 size_t size = 0;
9917 count = CAM_EFFECT_MODE_MAX;
9918 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9919 for (size_t i = 0; i < count; i++) {
9920 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9921 gCamCapability[cameraId]->supported_effects[i]);
9922 if (NAME_NOT_FOUND != val) {
9923 avail_effects[size] = (uint8_t)val;
9924 size++;
9925 }
9926 }
9927 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9928 avail_effects,
9929 size);
9930
9931 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9932 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9933 size_t supported_scene_modes_cnt = 0;
9934 count = CAM_SCENE_MODE_MAX;
9935 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9936 for (size_t i = 0; i < count; i++) {
9937 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9938 CAM_SCENE_MODE_OFF) {
9939 int val = lookupFwkName(SCENE_MODES_MAP,
9940 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9941 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009942
Thierry Strudel3d639192016-09-09 11:52:26 -07009943 if (NAME_NOT_FOUND != val) {
9944 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9945 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9946 supported_scene_modes_cnt++;
9947 }
9948 }
9949 }
9950 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9951 avail_scene_modes,
9952 supported_scene_modes_cnt);
9953
9954 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9955 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9956 supported_scene_modes_cnt,
9957 CAM_SCENE_MODE_MAX,
9958 scene_mode_overrides,
9959 supported_indexes,
9960 cameraId);
9961
9962 if (supported_scene_modes_cnt == 0) {
9963 supported_scene_modes_cnt = 1;
9964 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9965 }
9966
9967 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9968 scene_mode_overrides, supported_scene_modes_cnt * 3);
9969
9970 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9971 ANDROID_CONTROL_MODE_AUTO,
9972 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9973 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9974 available_control_modes,
9975 3);
9976
9977 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9978 size = 0;
9979 count = CAM_ANTIBANDING_MODE_MAX;
9980 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9981 for (size_t i = 0; i < count; i++) {
9982 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9983 gCamCapability[cameraId]->supported_antibandings[i]);
9984 if (NAME_NOT_FOUND != val) {
9985 avail_antibanding_modes[size] = (uint8_t)val;
9986 size++;
9987 }
9988
9989 }
9990 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9991 avail_antibanding_modes,
9992 size);
9993
9994 uint8_t avail_abberation_modes[] = {
9995 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9996 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9997 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9998 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9999 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
10000 if (0 == count) {
10001 // If no aberration correction modes are available for a device, this advertise OFF mode
10002 size = 1;
10003 } else {
10004 // If count is not zero then atleast one among the FAST or HIGH quality is supported
10005 // So, advertize all 3 modes if atleast any one mode is supported as per the
10006 // new M requirement
10007 size = 3;
10008 }
10009 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10010 avail_abberation_modes,
10011 size);
10012
10013 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
10014 size = 0;
10015 count = CAM_FOCUS_MODE_MAX;
10016 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
10017 for (size_t i = 0; i < count; i++) {
10018 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10019 gCamCapability[cameraId]->supported_focus_modes[i]);
10020 if (NAME_NOT_FOUND != val) {
10021 avail_af_modes[size] = (uint8_t)val;
10022 size++;
10023 }
10024 }
10025 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
10026 avail_af_modes,
10027 size);
10028
10029 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
10030 size = 0;
10031 count = CAM_WB_MODE_MAX;
10032 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
10033 for (size_t i = 0; i < count; i++) {
10034 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10035 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10036 gCamCapability[cameraId]->supported_white_balances[i]);
10037 if (NAME_NOT_FOUND != val) {
10038 avail_awb_modes[size] = (uint8_t)val;
10039 size++;
10040 }
10041 }
10042 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
10043 avail_awb_modes,
10044 size);
10045
10046 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
10047 count = CAM_FLASH_FIRING_LEVEL_MAX;
10048 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
10049 count);
10050 for (size_t i = 0; i < count; i++) {
10051 available_flash_levels[i] =
10052 gCamCapability[cameraId]->supported_firing_levels[i];
10053 }
10054 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
10055 available_flash_levels, count);
10056
10057 uint8_t flashAvailable;
10058 if (gCamCapability[cameraId]->flash_available)
10059 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
10060 else
10061 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
10062 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
10063 &flashAvailable, 1);
10064
10065 Vector<uint8_t> avail_ae_modes;
10066 count = CAM_AE_MODE_MAX;
10067 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
10068 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080010069 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
10070 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
10071 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
10072 }
10073 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070010074 }
10075 if (flashAvailable) {
10076 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
10077 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
10078 }
10079 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
10080 avail_ae_modes.array(),
10081 avail_ae_modes.size());
10082
10083 int32_t sensitivity_range[2];
10084 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
10085 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
10086 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
10087 sensitivity_range,
10088 sizeof(sensitivity_range) / sizeof(int32_t));
10089
10090 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10091 &gCamCapability[cameraId]->max_analog_sensitivity,
10092 1);
10093
10094 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
10095 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
10096 &sensor_orientation,
10097 1);
10098
10099 int32_t max_output_streams[] = {
10100 MAX_STALLING_STREAMS,
10101 MAX_PROCESSED_STREAMS,
10102 MAX_RAW_STREAMS};
10103 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
10104 max_output_streams,
10105 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
10106
10107 uint8_t avail_leds = 0;
10108 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
10109 &avail_leds, 0);
10110
10111 uint8_t focus_dist_calibrated;
10112 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
10113 gCamCapability[cameraId]->focus_dist_calibrated);
10114 if (NAME_NOT_FOUND != val) {
10115 focus_dist_calibrated = (uint8_t)val;
10116 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10117 &focus_dist_calibrated, 1);
10118 }
10119
10120 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
10121 size = 0;
10122 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
10123 MAX_TEST_PATTERN_CNT);
10124 for (size_t i = 0; i < count; i++) {
10125 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10126 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10127 if (NAME_NOT_FOUND != testpatternMode) {
10128 avail_testpattern_modes[size] = testpatternMode;
10129 size++;
10130 }
10131 }
10132 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10133 avail_testpattern_modes,
10134 size);
10135
10136 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10137 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10138 &max_pipeline_depth,
10139 1);
10140
10141 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10142 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10143 &partial_result_count,
10144 1);
10145
10146 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10147 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10148
10149 Vector<uint8_t> available_capabilities;
10150 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10151 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10152 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10153 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10154 if (supportBurst) {
10155 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10156 }
10157 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10158 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10159 if (hfrEnable && available_hfr_configs.array()) {
10160 available_capabilities.add(
10161 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10162 }
10163
10164 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10165 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10166 }
10167 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10168 available_capabilities.array(),
10169 available_capabilities.size());
10170
10171 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10172 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10173 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10174 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10175
10176 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10177 &aeLockAvailable, 1);
10178
10179 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10180 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10181 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10182 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10183
10184 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10185 &awbLockAvailable, 1);
10186
10187 int32_t max_input_streams = 1;
10188 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10189 &max_input_streams,
10190 1);
10191
10192 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10193 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10194 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10195 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10196 HAL_PIXEL_FORMAT_YCbCr_420_888};
10197 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10198 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10199
10200 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10201 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10202 &max_latency,
10203 1);
10204
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010205#ifndef USE_HAL_3_3
10206 int32_t isp_sensitivity_range[2];
10207 isp_sensitivity_range[0] =
10208 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10209 isp_sensitivity_range[1] =
10210 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10211 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10212 isp_sensitivity_range,
10213 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10214#endif
10215
Thierry Strudel3d639192016-09-09 11:52:26 -070010216 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10217 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10218 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10219 available_hot_pixel_modes,
10220 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10221
10222 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10223 ANDROID_SHADING_MODE_FAST,
10224 ANDROID_SHADING_MODE_HIGH_QUALITY};
10225 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10226 available_shading_modes,
10227 3);
10228
10229 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10230 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10231 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10232 available_lens_shading_map_modes,
10233 2);
10234
10235 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10236 ANDROID_EDGE_MODE_FAST,
10237 ANDROID_EDGE_MODE_HIGH_QUALITY,
10238 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10239 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10240 available_edge_modes,
10241 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10242
10243 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10244 ANDROID_NOISE_REDUCTION_MODE_FAST,
10245 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10246 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10247 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10248 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10249 available_noise_red_modes,
10250 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10251
10252 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10253 ANDROID_TONEMAP_MODE_FAST,
10254 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10255 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10256 available_tonemap_modes,
10257 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10258
10259 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10260 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10261 available_hot_pixel_map_modes,
10262 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10263
10264 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10265 gCamCapability[cameraId]->reference_illuminant1);
10266 if (NAME_NOT_FOUND != val) {
10267 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10268 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10269 }
10270
10271 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10272 gCamCapability[cameraId]->reference_illuminant2);
10273 if (NAME_NOT_FOUND != val) {
10274 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10275 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10276 }
10277
10278 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10279 (void *)gCamCapability[cameraId]->forward_matrix1,
10280 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10281
10282 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10283 (void *)gCamCapability[cameraId]->forward_matrix2,
10284 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10285
10286 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10287 (void *)gCamCapability[cameraId]->color_transform1,
10288 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10289
10290 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10291 (void *)gCamCapability[cameraId]->color_transform2,
10292 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10293
10294 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10295 (void *)gCamCapability[cameraId]->calibration_transform1,
10296 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10297
10298 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10299 (void *)gCamCapability[cameraId]->calibration_transform2,
10300 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10301
10302 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10303 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10304 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10305 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10306 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10307 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10308 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10309 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10310 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10311 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10312 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10313 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10314 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10315 ANDROID_JPEG_GPS_COORDINATES,
10316 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10317 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10318 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10319 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10320 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10321 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10322 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10323 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10324 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10325 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010326#ifndef USE_HAL_3_3
10327 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10328#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010329 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010330 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010331 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10332 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010333 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010334 QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
10335 QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
10336 QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TEMPORAL_DENOISE_ENABLE,
10337 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, QCAMERA3_USE_ISO_EXP_PRIORITY,
10338 QCAMERA3_SELECT_PRIORITY, QCAMERA3_USE_SATURATION,
10339 QCAMERA3_EXPOSURE_METER, QCAMERA3_USE_AV_TIMER,
10340 QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
10341 QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
10342 QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10343 QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
10344 QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
10345 QCAMERA3_JPEG_ENCODE_CROP_ENABLE, QCAMERA3_JPEG_ENCODE_CROP_RECT,
10346 QCAMERA3_JPEG_ENCODE_CROP_ROI, QCAMERA3_VIDEO_HDR_MODE,
10347 QCAMERA3_IR_MODE, QCAMERA3_AEC_CONVERGENCE_SPEED,
10348 QCAMERA3_AWB_CONVERGENCE_SPEED, QCAMERA3_INSTANT_AEC_MODE,
10349 QCAMERA3_SHARPNESS_STRENGTH, QCAMERA3_HISTOGRAM_MODE,
10350 QCAMERA3_BINNING_CORRECTION_MODE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010351 /* DevCamDebug metadata request_keys_basic */
10352 DEVCAMDEBUG_META_ENABLE,
10353 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010354 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010355 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010356 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010357 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010358 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010359 NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE
Samuel Ha68ba5172016-12-15 18:41:12 -080010360 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010361
10362 size_t request_keys_cnt =
10363 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10364 Vector<int32_t> available_request_keys;
10365 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10366 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10367 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10368 }
10369
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010370 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010371 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010372 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070010373 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
Chien-Yu Chenec328c82017-08-30 16:41:08 -070010374 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010375 }
10376
Thierry Strudel3d639192016-09-09 11:52:26 -070010377 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10378 available_request_keys.array(), available_request_keys.size());
10379
10380 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10381 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10382 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10383 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10384 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10385 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10386 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10387 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10388 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10389 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10390 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10391 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10392 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10393 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10394 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10395 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10396 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010397 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010398 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10399 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10400 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010401 ANDROID_STATISTICS_FACE_SCORES,
10402#ifndef USE_HAL_3_3
10403 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10404#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010405 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010406 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010407 QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
10408 QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
10409 QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TUNING_META_DATA_BLOB,
10410 QCAMERA3_TEMPORAL_DENOISE_ENABLE, QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE,
10411 QCAMERA3_EXPOSURE_METER, QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN,
10412 QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
10413 QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
10414 QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10415 QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
10416 QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB, QCAMERA3_VIDEO_HDR_MODE,
10417 QCAMERA3_IR_MODE, QCAMERA3_AEC_CONVERGENCE_SPEED,
10418 QCAMERA3_AWB_CONVERGENCE_SPEED, QCAMERA3_INSTANT_AEC_MODE,
10419 QCAMERA3_HISTOGRAM_MODE, QCAMERA3_BINNING_CORRECTION_MODE,
10420 QCAMERA3_STATS_IS_HDR_SCENE, QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
10421 QCAMERA3_STATS_BLINK_DETECTED, QCAMERA3_STATS_BLINK_DEGREE,
10422 QCAMERA3_STATS_SMILE_DEGREE, QCAMERA3_STATS_SMILE_CONFIDENCE,
10423 QCAMERA3_STATS_GAZE_ANGLE, QCAMERA3_STATS_GAZE_DIRECTION,
10424 QCAMERA3_STATS_GAZE_DEGREE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010425 // DevCamDebug metadata result_keys_basic
10426 DEVCAMDEBUG_META_ENABLE,
10427 // DevCamDebug metadata result_keys AF
10428 DEVCAMDEBUG_AF_LENS_POSITION,
10429 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10430 DEVCAMDEBUG_AF_TOF_DISTANCE,
10431 DEVCAMDEBUG_AF_LUMA,
10432 DEVCAMDEBUG_AF_HAF_STATE,
10433 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10434 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10435 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10436 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10437 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10438 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10439 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10440 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10441 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10442 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10443 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10444 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10445 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10446 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10447 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10448 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10449 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10450 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10451 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10452 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10453 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10454 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10455 // DevCamDebug metadata result_keys AEC
10456 DEVCAMDEBUG_AEC_TARGET_LUMA,
10457 DEVCAMDEBUG_AEC_COMP_LUMA,
10458 DEVCAMDEBUG_AEC_AVG_LUMA,
10459 DEVCAMDEBUG_AEC_CUR_LUMA,
10460 DEVCAMDEBUG_AEC_LINECOUNT,
10461 DEVCAMDEBUG_AEC_REAL_GAIN,
10462 DEVCAMDEBUG_AEC_EXP_INDEX,
10463 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010464 // DevCamDebug metadata result_keys zzHDR
10465 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10466 DEVCAMDEBUG_AEC_L_LINECOUNT,
10467 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10468 DEVCAMDEBUG_AEC_S_LINECOUNT,
10469 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10470 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10471 // DevCamDebug metadata result_keys ADRC
10472 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10473 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10474 DEVCAMDEBUG_AEC_GTM_RATIO,
10475 DEVCAMDEBUG_AEC_LTM_RATIO,
10476 DEVCAMDEBUG_AEC_LA_RATIO,
10477 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010478 // DevCamDebug metadata result_keys AEC MOTION
10479 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10480 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10481 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010482 // DevCamDebug metadata result_keys AWB
10483 DEVCAMDEBUG_AWB_R_GAIN,
10484 DEVCAMDEBUG_AWB_G_GAIN,
10485 DEVCAMDEBUG_AWB_B_GAIN,
10486 DEVCAMDEBUG_AWB_CCT,
10487 DEVCAMDEBUG_AWB_DECISION,
10488 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010489 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10490 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10491 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010492 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010493 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010494 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -070010495 NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010496 NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
10497 NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
10498 NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
10499 NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
10500 NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
10501 NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
10502 NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010503 };
10504
Thierry Strudel3d639192016-09-09 11:52:26 -070010505 size_t result_keys_cnt =
10506 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10507
10508 Vector<int32_t> available_result_keys;
10509 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10510 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10511 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10512 }
10513 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10514 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10515 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10516 }
10517 if (supportedFaceDetectMode == 1) {
10518 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10519 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10520 } else if ((supportedFaceDetectMode == 2) ||
10521 (supportedFaceDetectMode == 3)) {
10522 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10523 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10524 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010525#ifndef USE_HAL_3_3
Shuzhen Wanga1bc9de2017-09-14 16:54:02 -070010526 {
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010527 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10528 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10529 }
10530#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010531
10532 if (gExposeEnableZslKey) {
10533 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chendaf68892017-08-25 12:56:40 -070010534 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010535 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
10536 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010537 }
10538
Thierry Strudel3d639192016-09-09 11:52:26 -070010539 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10540 available_result_keys.array(), available_result_keys.size());
10541
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010542 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010543 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10544 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10545 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10546 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10547 ANDROID_SCALER_CROPPING_TYPE,
10548 ANDROID_SYNC_MAX_LATENCY,
10549 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10550 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10551 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10552 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10553 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10554 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10555 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10556 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10557 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10558 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10559 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10560 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10561 ANDROID_LENS_FACING,
10562 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10563 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10564 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10565 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10566 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10567 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10568 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10569 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10570 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10571 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10572 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10573 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10574 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10575 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10576 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10577 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10578 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10579 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10580 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10581 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010582 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010583 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10584 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10585 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10586 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10587 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10588 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10589 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10590 ANDROID_CONTROL_AVAILABLE_MODES,
10591 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10592 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10593 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10594 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010595 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10596#ifndef USE_HAL_3_3
10597 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10598 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10599#endif
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010600 QCAMERA3_OPAQUE_RAW_FORMAT, QCAMERA3_EXP_TIME_RANGE,
10601 QCAMERA3_SATURATION_RANGE, QCAMERA3_SENSOR_IS_MONO_ONLY,
10602 QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10603 QCAMERA3_SHARPNESS_RANGE,
10604 QCAMERA3_HISTOGRAM_BUCKETS, QCAMERA3_HISTOGRAM_MAX_COUNT,
10605 QCAMERA3_STATS_BSGC_AVAILABLE
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010606 };
10607
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010608 available_characteristics_keys.appendArray(characteristics_keys_basic,
10609 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10610#ifndef USE_HAL_3_3
10611 if (hasBlackRegions) {
10612 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10613 }
10614#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010615
10616 if (0 <= indexPD) {
10617 int32_t depthKeys[] = {
10618 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10619 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10620 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10621 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10622 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10623 };
10624 available_characteristics_keys.appendArray(depthKeys,
10625 sizeof(depthKeys) / sizeof(depthKeys[0]));
10626 }
10627
Thierry Strudel3d639192016-09-09 11:52:26 -070010628 /*available stall durations depend on the hw + sw and will be different for different devices */
10629 /*have to add for raw after implementation*/
10630 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10631 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10632
10633 Vector<int64_t> available_stall_durations;
10634 for (uint32_t j = 0; j < stall_formats_count; j++) {
10635 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10636 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10637 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10638 available_stall_durations.add(stall_formats[j]);
10639 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10640 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10641 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10642 }
10643 } else {
10644 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10645 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10646 available_stall_durations.add(stall_formats[j]);
10647 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10648 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10649 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10650 }
10651 }
10652 }
10653 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10654 available_stall_durations.array(),
10655 available_stall_durations.size());
10656
10657 //QCAMERA3_OPAQUE_RAW
10658 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10659 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10660 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10661 case LEGACY_RAW:
10662 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10663 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10664 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10665 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10666 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10667 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10668 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10669 break;
10670 case MIPI_RAW:
10671 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10672 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10673 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10674 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10675 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10676 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10677 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10678 break;
10679 default:
10680 LOGE("unknown opaque_raw_format %d",
10681 gCamCapability[cameraId]->opaque_raw_fmt);
10682 break;
10683 }
10684 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10685
10686 Vector<int32_t> strides;
10687 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10688 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10689 cam_stream_buf_plane_info_t buf_planes;
10690 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10691 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10692 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10693 &gCamCapability[cameraId]->padding_info, &buf_planes);
10694 strides.add(buf_planes.plane_info.mp[0].stride);
10695 }
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010696
10697 if (!strides.isEmpty()) {
10698 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10699 strides.size());
10700 available_characteristics_keys.add(QCAMERA3_OPAQUE_RAW_STRIDES);
10701 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010702
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010703 //TBD: remove the following line once backend advertises zzHDR in feature mask
10704 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010705 //Video HDR default
10706 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10707 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010708 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010709 int32_t vhdr_mode[] = {
10710 QCAMERA3_VIDEO_HDR_MODE_OFF,
10711 QCAMERA3_VIDEO_HDR_MODE_ON};
10712
10713 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10714 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10715 vhdr_mode, vhdr_mode_count);
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010716 available_characteristics_keys.add(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010717 }
10718
Thierry Strudel3d639192016-09-09 11:52:26 -070010719 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10720 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10721 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10722
10723 uint8_t isMonoOnly =
10724 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10725 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10726 &isMonoOnly, 1);
10727
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010728#ifndef USE_HAL_3_3
10729 Vector<int32_t> opaque_size;
10730 for (size_t j = 0; j < scalar_formats_count; j++) {
10731 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10732 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10733 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10734 cam_stream_buf_plane_info_t buf_planes;
10735
10736 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10737 &gCamCapability[cameraId]->padding_info, &buf_planes);
10738
10739 if (rc == 0) {
10740 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10741 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10742 opaque_size.add(buf_planes.plane_info.frame_len);
10743 }else {
10744 LOGE("raw frame calculation failed!");
10745 }
10746 }
10747 }
10748 }
10749
10750 if ((opaque_size.size() > 0) &&
10751 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10752 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10753 else
10754 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10755#endif
10756
Thierry Strudel04e026f2016-10-10 11:27:36 -070010757 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10758 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10759 size = 0;
10760 count = CAM_IR_MODE_MAX;
10761 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10762 for (size_t i = 0; i < count; i++) {
10763 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10764 gCamCapability[cameraId]->supported_ir_modes[i]);
10765 if (NAME_NOT_FOUND != val) {
10766 avail_ir_modes[size] = (int32_t)val;
10767 size++;
10768 }
10769 }
10770 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10771 avail_ir_modes, size);
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010772 available_characteristics_keys.add(QCAMERA3_IR_AVAILABLE_MODES);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010773 }
10774
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010775 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10776 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10777 size = 0;
10778 count = CAM_AEC_CONVERGENCE_MAX;
10779 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10780 for (size_t i = 0; i < count; i++) {
10781 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10782 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10783 if (NAME_NOT_FOUND != val) {
10784 available_instant_aec_modes[size] = (int32_t)val;
10785 size++;
10786 }
10787 }
10788 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10789 available_instant_aec_modes, size);
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010790 available_characteristics_keys.add(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES);
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010791 }
10792
Thierry Strudel54dc9782017-02-15 12:12:10 -080010793 int32_t sharpness_range[] = {
10794 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10795 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10796 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10797
10798 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10799 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10800 size = 0;
10801 count = CAM_BINNING_CORRECTION_MODE_MAX;
10802 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10803 for (size_t i = 0; i < count; i++) {
10804 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10805 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10806 gCamCapability[cameraId]->supported_binning_modes[i]);
10807 if (NAME_NOT_FOUND != val) {
10808 avail_binning_modes[size] = (int32_t)val;
10809 size++;
10810 }
10811 }
10812 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10813 avail_binning_modes, size);
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010814 available_characteristics_keys.add(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES);
Thierry Strudel54dc9782017-02-15 12:12:10 -080010815 }
10816
10817 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10818 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10819 size = 0;
10820 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10821 for (size_t i = 0; i < count; i++) {
10822 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10823 gCamCapability[cameraId]->supported_aec_modes[i]);
10824 if (NAME_NOT_FOUND != val)
10825 available_aec_modes[size++] = val;
10826 }
10827 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10828 available_aec_modes, size);
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010829 available_characteristics_keys.add(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES);
Thierry Strudel54dc9782017-02-15 12:12:10 -080010830 }
10831
10832 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10833 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10834 size = 0;
10835 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10836 for (size_t i = 0; i < count; i++) {
10837 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10838 gCamCapability[cameraId]->supported_iso_modes[i]);
10839 if (NAME_NOT_FOUND != val)
10840 available_iso_modes[size++] = val;
10841 }
10842 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10843 available_iso_modes, size);
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010844 available_characteristics_keys.add(QCAMERA3_ISO_AVAILABLE_MODES);
Thierry Strudel54dc9782017-02-15 12:12:10 -080010845 }
10846
10847 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010848 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010849 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10850 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10851 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10852
10853 int32_t available_saturation_range[4];
10854 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10855 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10856 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10857 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10858 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10859 available_saturation_range, 4);
10860
10861 uint8_t is_hdr_values[2];
10862 is_hdr_values[0] = 0;
10863 is_hdr_values[1] = 1;
10864 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10865 is_hdr_values, 2);
10866
10867 float is_hdr_confidence_range[2];
10868 is_hdr_confidence_range[0] = 0.0;
10869 is_hdr_confidence_range[1] = 1.0;
10870 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10871 is_hdr_confidence_range, 2);
10872
Emilian Peev0a972ef2017-03-16 10:25:53 +000010873 size_t eepromLength = strnlen(
10874 reinterpret_cast<const char *>(
10875 gCamCapability[cameraId]->eeprom_version_info),
10876 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10877 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010878 char easelInfo[] = ",E:N";
10879 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10880 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10881 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010882 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
Arnd Geis082a4d72017-08-24 10:33:07 -070010883 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010884 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010885 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010886 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10887 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010888 available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO);
Emilian Peev0a972ef2017-03-16 10:25:53 +000010889 }
10890
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010891 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
10892 available_characteristics_keys.array(),
10893 available_characteristics_keys.size());
10894
Thierry Strudel3d639192016-09-09 11:52:26 -070010895 gStaticMetadata[cameraId] = staticInfo.release();
10896 return rc;
10897}
10898
10899/*===========================================================================
10900 * FUNCTION : makeTable
10901 *
10902 * DESCRIPTION: make a table of sizes
10903 *
10904 * PARAMETERS :
10905 *
10906 *
10907 *==========================================================================*/
10908void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10909 size_t max_size, int32_t *sizeTable)
10910{
10911 size_t j = 0;
10912 if (size > max_size) {
10913 size = max_size;
10914 }
10915 for (size_t i = 0; i < size; i++) {
10916 sizeTable[j] = dimTable[i].width;
10917 sizeTable[j+1] = dimTable[i].height;
10918 j+=2;
10919 }
10920}
10921
10922/*===========================================================================
10923 * FUNCTION : makeFPSTable
10924 *
10925 * DESCRIPTION: make a table of fps ranges
10926 *
10927 * PARAMETERS :
10928 *
10929 *==========================================================================*/
10930void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10931 size_t max_size, int32_t *fpsRangesTable)
10932{
10933 size_t j = 0;
10934 if (size > max_size) {
10935 size = max_size;
10936 }
10937 for (size_t i = 0; i < size; i++) {
10938 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10939 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10940 j+=2;
10941 }
10942}
10943
10944/*===========================================================================
10945 * FUNCTION : makeOverridesList
10946 *
10947 * DESCRIPTION: make a list of scene mode overrides
10948 *
10949 * PARAMETERS :
10950 *
10951 *
10952 *==========================================================================*/
10953void QCamera3HardwareInterface::makeOverridesList(
10954 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10955 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10956{
10957 /*daemon will give a list of overrides for all scene modes.
10958 However we should send the fwk only the overrides for the scene modes
10959 supported by the framework*/
10960 size_t j = 0;
10961 if (size > max_size) {
10962 size = max_size;
10963 }
10964 size_t focus_count = CAM_FOCUS_MODE_MAX;
10965 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10966 focus_count);
10967 for (size_t i = 0; i < size; i++) {
10968 bool supt = false;
10969 size_t index = supported_indexes[i];
10970 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10971 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10972 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10973 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10974 overridesTable[index].awb_mode);
10975 if (NAME_NOT_FOUND != val) {
10976 overridesList[j+1] = (uint8_t)val;
10977 }
10978 uint8_t focus_override = overridesTable[index].af_mode;
10979 for (size_t k = 0; k < focus_count; k++) {
10980 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10981 supt = true;
10982 break;
10983 }
10984 }
10985 if (supt) {
10986 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10987 focus_override);
10988 if (NAME_NOT_FOUND != val) {
10989 overridesList[j+2] = (uint8_t)val;
10990 }
10991 } else {
10992 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10993 }
10994 j+=3;
10995 }
10996}
10997
10998/*===========================================================================
10999 * FUNCTION : filterJpegSizes
11000 *
11001 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
11002 * could be downscaled to
11003 *
11004 * PARAMETERS :
11005 *
11006 * RETURN : length of jpegSizes array
11007 *==========================================================================*/
11008
11009size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
11010 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
11011 uint8_t downscale_factor)
11012{
11013 if (0 == downscale_factor) {
11014 downscale_factor = 1;
11015 }
11016
11017 int32_t min_width = active_array_size.width / downscale_factor;
11018 int32_t min_height = active_array_size.height / downscale_factor;
11019 size_t jpegSizesCnt = 0;
11020 if (processedSizesCnt > maxCount) {
11021 processedSizesCnt = maxCount;
11022 }
11023 for (size_t i = 0; i < processedSizesCnt; i+=2) {
11024 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
11025 jpegSizes[jpegSizesCnt] = processedSizes[i];
11026 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
11027 jpegSizesCnt += 2;
11028 }
11029 }
11030 return jpegSizesCnt;
11031}
11032
11033/*===========================================================================
11034 * FUNCTION : computeNoiseModelEntryS
11035 *
11036 * DESCRIPTION: function to map a given sensitivity to the S noise
11037 * model parameters in the DNG noise model.
11038 *
11039 * PARAMETERS : sens : the sensor sensitivity
11040 *
11041 ** RETURN : S (sensor amplification) noise
11042 *
11043 *==========================================================================*/
11044double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
11045 double s = gCamCapability[mCameraId]->gradient_S * sens +
11046 gCamCapability[mCameraId]->offset_S;
11047 return ((s < 0.0) ? 0.0 : s);
11048}
11049
11050/*===========================================================================
11051 * FUNCTION : computeNoiseModelEntryO
11052 *
11053 * DESCRIPTION: function to map a given sensitivity to the O noise
11054 * model parameters in the DNG noise model.
11055 *
11056 * PARAMETERS : sens : the sensor sensitivity
11057 *
11058 ** RETURN : O (sensor readout) noise
11059 *
11060 *==========================================================================*/
11061double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
11062 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
11063 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
11064 1.0 : (1.0 * sens / max_analog_sens);
11065 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
11066 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
11067 return ((o < 0.0) ? 0.0 : o);
11068}
11069
11070/*===========================================================================
11071 * FUNCTION : getSensorSensitivity
11072 *
11073 * DESCRIPTION: convert iso_mode to an integer value
11074 *
11075 * PARAMETERS : iso_mode : the iso_mode supported by sensor
11076 *
11077 ** RETURN : sensitivity supported by sensor
11078 *
11079 *==========================================================================*/
11080int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
11081{
11082 int32_t sensitivity;
11083
11084 switch (iso_mode) {
11085 case CAM_ISO_MODE_100:
11086 sensitivity = 100;
11087 break;
11088 case CAM_ISO_MODE_200:
11089 sensitivity = 200;
11090 break;
11091 case CAM_ISO_MODE_400:
11092 sensitivity = 400;
11093 break;
11094 case CAM_ISO_MODE_800:
11095 sensitivity = 800;
11096 break;
11097 case CAM_ISO_MODE_1600:
11098 sensitivity = 1600;
11099 break;
11100 default:
11101 sensitivity = -1;
11102 break;
11103 }
11104 return sensitivity;
11105}
11106
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011107int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011108 if (gEaselManagerClient == nullptr) {
11109 gEaselManagerClient = EaselManagerClient::create();
11110 if (gEaselManagerClient == nullptr) {
11111 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
11112 return -ENODEV;
11113 }
11114 }
11115
11116 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070011117 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
11118 // to connect to Easel.
11119 bool doNotpowerOnEasel =
11120 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
11121
11122 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070011123 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
11124 return OK;
11125 }
11126
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011127 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011128 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011129 if (res != OK) {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070011130 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
11131 res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011132 return res;
11133 }
11134
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011135 EaselManagerClientOpened = true;
11136
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011137 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011138 if (res != OK) {
11139 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
11140 }
11141
Zhijun Hedaacd8a2017-09-14 12:07:42 -070011142 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070011143 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070011144 gEnableMultipleHdrplusOutputs =
11145 property_get_bool("persist.camera.hdrplus.multiple_outputs", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011146
11147 // Expose enableZsl key only when HDR+ mode is enabled.
11148 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011149 }
11150
11151 return OK;
11152}
11153
Thierry Strudel3d639192016-09-09 11:52:26 -070011154/*===========================================================================
11155 * FUNCTION : getCamInfo
11156 *
11157 * DESCRIPTION: query camera capabilities
11158 *
11159 * PARAMETERS :
11160 * @cameraId : camera Id
11161 * @info : camera info struct to be filled in with camera capabilities
11162 *
11163 * RETURN : int type of status
11164 * NO_ERROR -- success
11165 * none-zero failure code
11166 *==========================================================================*/
11167int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
11168 struct camera_info *info)
11169{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011170 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070011171 int rc = 0;
11172
11173 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070011174
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011175 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070011176 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011177 rc = initHdrPlusClientLocked();
11178 if (rc != OK) {
11179 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
11180 pthread_mutex_unlock(&gCamLock);
11181 return rc;
11182 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070011183 }
11184
Thierry Strudel3d639192016-09-09 11:52:26 -070011185 if (NULL == gCamCapability[cameraId]) {
11186 rc = initCapabilities(cameraId);
11187 if (rc < 0) {
11188 pthread_mutex_unlock(&gCamLock);
11189 return rc;
11190 }
11191 }
11192
11193 if (NULL == gStaticMetadata[cameraId]) {
11194 rc = initStaticMetadata(cameraId);
11195 if (rc < 0) {
11196 pthread_mutex_unlock(&gCamLock);
11197 return rc;
11198 }
11199 }
11200
11201 switch(gCamCapability[cameraId]->position) {
11202 case CAM_POSITION_BACK:
11203 case CAM_POSITION_BACK_AUX:
11204 info->facing = CAMERA_FACING_BACK;
11205 break;
11206
11207 case CAM_POSITION_FRONT:
11208 case CAM_POSITION_FRONT_AUX:
11209 info->facing = CAMERA_FACING_FRONT;
11210 break;
11211
11212 default:
11213 LOGE("Unknown position type %d for camera id:%d",
11214 gCamCapability[cameraId]->position, cameraId);
11215 rc = -1;
11216 break;
11217 }
11218
11219
11220 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011221#ifndef USE_HAL_3_3
11222 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11223#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011224 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011225#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011226 info->static_camera_characteristics = gStaticMetadata[cameraId];
11227
11228 //For now assume both cameras can operate independently.
11229 info->conflicting_devices = NULL;
11230 info->conflicting_devices_length = 0;
11231
11232 //resource cost is 100 * MIN(1.0, m/M),
11233 //where m is throughput requirement with maximum stream configuration
11234 //and M is CPP maximum throughput.
11235 float max_fps = 0.0;
11236 for (uint32_t i = 0;
11237 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11238 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11239 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11240 }
11241 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11242 gCamCapability[cameraId]->active_array_size.width *
11243 gCamCapability[cameraId]->active_array_size.height * max_fps /
11244 gCamCapability[cameraId]->max_pixel_bandwidth;
11245 info->resource_cost = 100 * MIN(1.0, ratio);
11246 LOGI("camera %d resource cost is %d", cameraId,
11247 info->resource_cost);
11248
11249 pthread_mutex_unlock(&gCamLock);
11250 return rc;
11251}
11252
11253/*===========================================================================
11254 * FUNCTION : translateCapabilityToMetadata
11255 *
11256 * DESCRIPTION: translate the capability into camera_metadata_t
11257 *
11258 * PARAMETERS : type of the request
11259 *
11260 *
11261 * RETURN : success: camera_metadata_t*
11262 * failure: NULL
11263 *
11264 *==========================================================================*/
11265camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11266{
11267 if (mDefaultMetadata[type] != NULL) {
11268 return mDefaultMetadata[type];
11269 }
11270 //first time we are handling this request
11271 //fill up the metadata structure using the wrapper class
11272 CameraMetadata settings;
11273 //translate from cam_capability_t to camera_metadata_tag_t
11274 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11275 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11276 int32_t defaultRequestID = 0;
11277 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11278
11279 /* OIS disable */
11280 char ois_prop[PROPERTY_VALUE_MAX];
11281 memset(ois_prop, 0, sizeof(ois_prop));
11282 property_get("persist.camera.ois.disable", ois_prop, "0");
11283 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11284
11285 /* Force video to use OIS */
11286 char videoOisProp[PROPERTY_VALUE_MAX];
11287 memset(videoOisProp, 0, sizeof(videoOisProp));
11288 property_get("persist.camera.ois.video", videoOisProp, "1");
11289 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011290
11291 // Hybrid AE enable/disable
11292 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11293 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11294 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011295 uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011296
Thierry Strudel3d639192016-09-09 11:52:26 -070011297 uint8_t controlIntent = 0;
11298 uint8_t focusMode;
11299 uint8_t vsMode;
11300 uint8_t optStabMode;
11301 uint8_t cacMode;
11302 uint8_t edge_mode;
11303 uint8_t noise_red_mode;
11304 uint8_t tonemap_mode;
11305 bool highQualityModeEntryAvailable = FALSE;
11306 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011307 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011308 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11309 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011310 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011311 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011312 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011313
Thierry Strudel3d639192016-09-09 11:52:26 -070011314 switch (type) {
11315 case CAMERA3_TEMPLATE_PREVIEW:
11316 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11317 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11318 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11319 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11320 edge_mode = ANDROID_EDGE_MODE_FAST;
11321 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11322 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11323 break;
11324 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11325 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11326 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11327 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11328 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11329 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11330 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11331 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11332 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11333 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11334 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11335 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11336 highQualityModeEntryAvailable = TRUE;
11337 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11338 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11339 fastModeEntryAvailable = TRUE;
11340 }
11341 }
11342 if (highQualityModeEntryAvailable) {
11343 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11344 } else if (fastModeEntryAvailable) {
11345 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11346 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011347 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11348 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11349 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011350 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011351 break;
11352 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11353 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11354 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11355 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011356 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11357 edge_mode = ANDROID_EDGE_MODE_FAST;
11358 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11359 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11360 if (forceVideoOis)
11361 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11362 break;
11363 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11364 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11365 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11366 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011367 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11368 edge_mode = ANDROID_EDGE_MODE_FAST;
11369 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11370 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11371 if (forceVideoOis)
11372 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11373 break;
11374 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11375 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11376 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11377 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11378 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11379 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11380 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11381 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11382 break;
11383 case CAMERA3_TEMPLATE_MANUAL:
11384 edge_mode = ANDROID_EDGE_MODE_FAST;
11385 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11386 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11387 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11388 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11389 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11390 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11391 break;
11392 default:
11393 edge_mode = ANDROID_EDGE_MODE_FAST;
11394 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11395 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11396 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11397 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11398 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11399 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11400 break;
11401 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011402 // Set CAC to OFF if underlying device doesn't support
11403 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11404 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11405 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011406 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11407 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11408 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11409 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11410 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11411 }
11412 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011413 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011414 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011415
11416 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11417 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11418 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11419 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11420 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11421 || ois_disable)
11422 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11423 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011424 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011425
11426 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11427 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11428
11429 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11430 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11431
11432 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11433 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11434
11435 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11436 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11437
11438 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11439 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11440
11441 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11442 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11443
11444 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11445 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11446
11447 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11448 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11449
11450 /*flash*/
11451 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11452 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11453
11454 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11455 settings.update(ANDROID_FLASH_FIRING_POWER,
11456 &flashFiringLevel, 1);
11457
11458 /* lens */
11459 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11460 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11461
11462 if (gCamCapability[mCameraId]->filter_densities_count) {
11463 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11464 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11465 gCamCapability[mCameraId]->filter_densities_count);
11466 }
11467
11468 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11469 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11470
Thierry Strudel3d639192016-09-09 11:52:26 -070011471 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11472 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11473
11474 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11475 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11476
11477 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11478 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11479
11480 /* face detection (default to OFF) */
11481 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11482 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11483
Thierry Strudel54dc9782017-02-15 12:12:10 -080011484 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11485 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011486
11487 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11488 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11489
11490 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11491 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11492
Thierry Strudel3d639192016-09-09 11:52:26 -070011493
11494 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11495 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11496
11497 /* Exposure time(Update the Min Exposure Time)*/
11498 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11499 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11500
11501 /* frame duration */
11502 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11503 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11504
11505 /* sensitivity */
11506 static const int32_t default_sensitivity = 100;
11507 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011508#ifndef USE_HAL_3_3
11509 static const int32_t default_isp_sensitivity =
11510 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11511 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11512#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011513
11514 /*edge mode*/
11515 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11516
11517 /*noise reduction mode*/
11518 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11519
11520 /*color correction mode*/
11521 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11522 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11523
11524 /*transform matrix mode*/
11525 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11526
11527 int32_t scaler_crop_region[4];
11528 scaler_crop_region[0] = 0;
11529 scaler_crop_region[1] = 0;
11530 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11531 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11532 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11533
11534 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11535 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11536
11537 /*focus distance*/
11538 float focus_distance = 0.0;
11539 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11540
11541 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011542 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011543 float max_range = 0.0;
11544 float max_fixed_fps = 0.0;
11545 int32_t fps_range[2] = {0, 0};
11546 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11547 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011548 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11549 TEMPLATE_MAX_PREVIEW_FPS) {
11550 continue;
11551 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011552 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11553 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11554 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11555 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11556 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11557 if (range > max_range) {
11558 fps_range[0] =
11559 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11560 fps_range[1] =
11561 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11562 max_range = range;
11563 }
11564 } else {
11565 if (range < 0.01 && max_fixed_fps <
11566 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11567 fps_range[0] =
11568 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11569 fps_range[1] =
11570 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11571 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11572 }
11573 }
11574 }
11575 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11576
11577 /*precapture trigger*/
11578 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11579 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11580
11581 /*af trigger*/
11582 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11583 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11584
11585 /* ae & af regions */
11586 int32_t active_region[] = {
11587 gCamCapability[mCameraId]->active_array_size.left,
11588 gCamCapability[mCameraId]->active_array_size.top,
11589 gCamCapability[mCameraId]->active_array_size.left +
11590 gCamCapability[mCameraId]->active_array_size.width,
11591 gCamCapability[mCameraId]->active_array_size.top +
11592 gCamCapability[mCameraId]->active_array_size.height,
11593 0};
11594 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11595 sizeof(active_region) / sizeof(active_region[0]));
11596 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11597 sizeof(active_region) / sizeof(active_region[0]));
11598
11599 /* black level lock */
11600 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11601 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11602
Thierry Strudel3d639192016-09-09 11:52:26 -070011603 //special defaults for manual template
11604 if (type == CAMERA3_TEMPLATE_MANUAL) {
11605 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11606 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11607
11608 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11609 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11610
11611 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11612 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11613
11614 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11615 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11616
11617 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11618 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11619
11620 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11621 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11622 }
11623
11624
11625 /* TNR
11626 * We'll use this location to determine which modes TNR will be set.
11627 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11628 * This is not to be confused with linking on a per stream basis that decision
11629 * is still on per-session basis and will be handled as part of config stream
11630 */
11631 uint8_t tnr_enable = 0;
11632
11633 if (m_bTnrPreview || m_bTnrVideo) {
11634
11635 switch (type) {
11636 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11637 tnr_enable = 1;
11638 break;
11639
11640 default:
11641 tnr_enable = 0;
11642 break;
11643 }
11644
11645 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11646 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11647 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11648
11649 LOGD("TNR:%d with process plate %d for template:%d",
11650 tnr_enable, tnr_process_type, type);
11651 }
11652
11653 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011654 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011655 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11656
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011657 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011658 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11659
Shuzhen Wang920ea402017-05-03 08:49:39 -070011660 uint8_t related_camera_id = mCameraId;
11661 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011662
11663 /* CDS default */
11664 char prop[PROPERTY_VALUE_MAX];
11665 memset(prop, 0, sizeof(prop));
11666 property_get("persist.camera.CDS", prop, "Auto");
11667 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11668 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11669 if (CAM_CDS_MODE_MAX == cds_mode) {
11670 cds_mode = CAM_CDS_MODE_AUTO;
11671 }
11672
11673 /* Disabling CDS in templates which have TNR enabled*/
11674 if (tnr_enable)
11675 cds_mode = CAM_CDS_MODE_OFF;
11676
11677 int32_t mode = cds_mode;
11678 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011679
Thierry Strudel269c81a2016-10-12 12:13:59 -070011680 /* Manual Convergence AEC Speed is disabled by default*/
11681 float default_aec_speed = 0;
11682 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11683
11684 /* Manual Convergence AWB Speed is disabled by default*/
11685 float default_awb_speed = 0;
11686 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11687
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011688 // Set instant AEC to normal convergence by default
11689 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11690 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11691
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011692 if (gExposeEnableZslKey) {
11693 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070011694 int32_t postview = 0;
11695 settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070011696 int32_t continuousZslCapture = 0;
11697 settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -070011698 // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE and
11699 // CAMERA3_TEMPLATE_PREVIEW.
11700 int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11701 type == CAMERA3_TEMPLATE_PREVIEW) ? 0 : 1;
Chien-Yu Chenec328c82017-08-30 16:41:08 -070011702 settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
11703
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011704 // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
11705 // hybrid ae is enabled for 3rd party app HDR+.
11706 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11707 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
11708 hybrid_ae = 1;
11709 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011710 }
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011711 /* hybrid ae */
11712 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011713
Thierry Strudel3d639192016-09-09 11:52:26 -070011714 mDefaultMetadata[type] = settings.release();
11715
11716 return mDefaultMetadata[type];
11717}
11718
11719/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011720 * FUNCTION : getExpectedFrameDuration
11721 *
11722 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11723 * duration
11724 *
11725 * PARAMETERS :
11726 * @request : request settings
11727 * @frameDuration : The maximum frame duration in nanoseconds
11728 *
11729 * RETURN : None
11730 *==========================================================================*/
11731void QCamera3HardwareInterface::getExpectedFrameDuration(
11732 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11733 if (nullptr == frameDuration) {
11734 return;
11735 }
11736
11737 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11738 find_camera_metadata_ro_entry(request,
11739 ANDROID_SENSOR_EXPOSURE_TIME,
11740 &e);
11741 if (e.count > 0) {
11742 *frameDuration = e.data.i64[0];
11743 }
11744 find_camera_metadata_ro_entry(request,
11745 ANDROID_SENSOR_FRAME_DURATION,
11746 &e);
11747 if (e.count > 0) {
11748 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11749 }
11750}
11751
11752/*===========================================================================
11753 * FUNCTION : calculateMaxExpectedDuration
11754 *
11755 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11756 * current camera settings.
11757 *
11758 * PARAMETERS :
11759 * @request : request settings
11760 *
11761 * RETURN : Expected frame duration in nanoseconds.
11762 *==========================================================================*/
11763nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11764 const camera_metadata_t *request) {
11765 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11766 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11767 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11768 if (e.count == 0) {
11769 return maxExpectedDuration;
11770 }
11771
11772 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11773 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11774 }
11775
11776 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11777 return maxExpectedDuration;
11778 }
11779
11780 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11781 if (e.count == 0) {
11782 return maxExpectedDuration;
11783 }
11784
11785 switch (e.data.u8[0]) {
11786 case ANDROID_CONTROL_AE_MODE_OFF:
11787 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11788 break;
11789 default:
11790 find_camera_metadata_ro_entry(request,
11791 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11792 &e);
11793 if (e.count > 1) {
11794 maxExpectedDuration = 1e9 / e.data.u8[0];
11795 }
11796 break;
11797 }
11798
11799 return maxExpectedDuration;
11800}
11801
11802/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011803 * FUNCTION : setFrameParameters
11804 *
11805 * DESCRIPTION: set parameters per frame as requested in the metadata from
11806 * framework
11807 *
11808 * PARAMETERS :
11809 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011810 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011811 * @blob_request: Whether this request is a blob request or not
11812 *
11813 * RETURN : success: NO_ERROR
11814 * failure:
11815 *==========================================================================*/
11816int QCamera3HardwareInterface::setFrameParameters(
11817 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011818 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011819 int blob_request,
11820 uint32_t snapshotStreamId)
11821{
11822 /*translate from camera_metadata_t type to parm_type_t*/
11823 int rc = 0;
11824 int32_t hal_version = CAM_HAL_V3;
11825
11826 clear_metadata_buffer(mParameters);
11827 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11828 LOGE("Failed to set hal version in the parameters");
11829 return BAD_VALUE;
11830 }
11831
11832 /*we need to update the frame number in the parameters*/
11833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11834 request->frame_number)) {
11835 LOGE("Failed to set the frame number in the parameters");
11836 return BAD_VALUE;
11837 }
11838
11839 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011840 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011841 LOGE("Failed to set stream type mask in the parameters");
11842 return BAD_VALUE;
11843 }
11844
11845 if (mUpdateDebugLevel) {
11846 uint32_t dummyDebugLevel = 0;
11847 /* The value of dummyDebugLevel is irrelavent. On
11848 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11849 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11850 dummyDebugLevel)) {
11851 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11852 return BAD_VALUE;
11853 }
11854 mUpdateDebugLevel = false;
11855 }
11856
11857 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011858 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011859 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11860 if (blob_request)
11861 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11862 }
11863
11864 return rc;
11865}
11866
11867/*===========================================================================
11868 * FUNCTION : setReprocParameters
11869 *
11870 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11871 * return it.
11872 *
11873 * PARAMETERS :
11874 * @request : request that needs to be serviced
11875 *
11876 * RETURN : success: NO_ERROR
11877 * failure:
11878 *==========================================================================*/
11879int32_t QCamera3HardwareInterface::setReprocParameters(
11880 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11881 uint32_t snapshotStreamId)
11882{
11883 /*translate from camera_metadata_t type to parm_type_t*/
11884 int rc = 0;
11885
11886 if (NULL == request->settings){
11887 LOGE("Reprocess settings cannot be NULL");
11888 return BAD_VALUE;
11889 }
11890
11891 if (NULL == reprocParam) {
11892 LOGE("Invalid reprocessing metadata buffer");
11893 return BAD_VALUE;
11894 }
11895 clear_metadata_buffer(reprocParam);
11896
11897 /*we need to update the frame number in the parameters*/
11898 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11899 request->frame_number)) {
11900 LOGE("Failed to set the frame number in the parameters");
11901 return BAD_VALUE;
11902 }
11903
11904 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11905 if (rc < 0) {
11906 LOGE("Failed to translate reproc request");
11907 return rc;
11908 }
11909
11910 CameraMetadata frame_settings;
11911 frame_settings = request->settings;
11912 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11913 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11914 int32_t *crop_count =
11915 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11916 int32_t *crop_data =
11917 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11918 int32_t *roi_map =
11919 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11920 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11921 cam_crop_data_t crop_meta;
11922 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11923 crop_meta.num_of_streams = 1;
11924 crop_meta.crop_info[0].crop.left = crop_data[0];
11925 crop_meta.crop_info[0].crop.top = crop_data[1];
11926 crop_meta.crop_info[0].crop.width = crop_data[2];
11927 crop_meta.crop_info[0].crop.height = crop_data[3];
11928
11929 crop_meta.crop_info[0].roi_map.left =
11930 roi_map[0];
11931 crop_meta.crop_info[0].roi_map.top =
11932 roi_map[1];
11933 crop_meta.crop_info[0].roi_map.width =
11934 roi_map[2];
11935 crop_meta.crop_info[0].roi_map.height =
11936 roi_map[3];
11937
11938 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11939 rc = BAD_VALUE;
11940 }
11941 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11942 request->input_buffer->stream,
11943 crop_meta.crop_info[0].crop.left,
11944 crop_meta.crop_info[0].crop.top,
11945 crop_meta.crop_info[0].crop.width,
11946 crop_meta.crop_info[0].crop.height);
11947 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11948 request->input_buffer->stream,
11949 crop_meta.crop_info[0].roi_map.left,
11950 crop_meta.crop_info[0].roi_map.top,
11951 crop_meta.crop_info[0].roi_map.width,
11952 crop_meta.crop_info[0].roi_map.height);
11953 } else {
11954 LOGE("Invalid reprocess crop count %d!", *crop_count);
11955 }
11956 } else {
11957 LOGE("No crop data from matching output stream");
11958 }
11959
11960 /* These settings are not needed for regular requests so handle them specially for
11961 reprocess requests; information needed for EXIF tags */
11962 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11963 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11964 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11965 if (NAME_NOT_FOUND != val) {
11966 uint32_t flashMode = (uint32_t)val;
11967 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11968 rc = BAD_VALUE;
11969 }
11970 } else {
11971 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11972 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11973 }
11974 } else {
11975 LOGH("No flash mode in reprocess settings");
11976 }
11977
11978 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11979 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11980 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11981 rc = BAD_VALUE;
11982 }
11983 } else {
11984 LOGH("No flash state in reprocess settings");
11985 }
11986
11987 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11988 uint8_t *reprocessFlags =
11989 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11990 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11991 *reprocessFlags)) {
11992 rc = BAD_VALUE;
11993 }
11994 }
11995
Thierry Strudel54dc9782017-02-15 12:12:10 -080011996 // Add exif debug data to internal metadata
11997 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11998 mm_jpeg_debug_exif_params_t *debug_params =
11999 (mm_jpeg_debug_exif_params_t *)frame_settings.find
12000 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
12001 // AE
12002 if (debug_params->ae_debug_params_valid == TRUE) {
12003 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
12004 debug_params->ae_debug_params);
12005 }
12006 // AWB
12007 if (debug_params->awb_debug_params_valid == TRUE) {
12008 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
12009 debug_params->awb_debug_params);
12010 }
12011 // AF
12012 if (debug_params->af_debug_params_valid == TRUE) {
12013 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
12014 debug_params->af_debug_params);
12015 }
12016 // ASD
12017 if (debug_params->asd_debug_params_valid == TRUE) {
12018 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
12019 debug_params->asd_debug_params);
12020 }
12021 // Stats
12022 if (debug_params->stats_debug_params_valid == TRUE) {
12023 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
12024 debug_params->stats_debug_params);
12025 }
12026 // BE Stats
12027 if (debug_params->bestats_debug_params_valid == TRUE) {
12028 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
12029 debug_params->bestats_debug_params);
12030 }
12031 // BHIST
12032 if (debug_params->bhist_debug_params_valid == TRUE) {
12033 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
12034 debug_params->bhist_debug_params);
12035 }
12036 // 3A Tuning
12037 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
12038 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
12039 debug_params->q3a_tuning_debug_params);
12040 }
12041 }
12042
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012043 // Add metadata which reprocess needs
12044 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
12045 cam_reprocess_info_t *repro_info =
12046 (cam_reprocess_info_t *)frame_settings.find
12047 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070012048 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012049 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012050 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012051 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012052 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012053 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012054 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012055 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012056 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012057 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070012058 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012059 repro_info->pipeline_flip);
12060 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
12061 repro_info->af_roi);
12062 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
12063 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070012064 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
12065 CAM_INTF_PARM_ROTATION metadata then has been added in
12066 translateToHalMetadata. HAL need to keep this new rotation
12067 metadata. Otherwise, the old rotation info saved in the vendor tag
12068 would be used */
12069 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
12070 CAM_INTF_PARM_ROTATION, reprocParam) {
12071 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
12072 } else {
12073 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012074 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012075 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012076 }
12077
12078 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
12079 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
12080 roi.width and roi.height would be the final JPEG size.
12081 For now, HAL only checks this for reprocess request */
12082 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
12083 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
12084 uint8_t *enable =
12085 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
12086 if (*enable == TRUE) {
12087 int32_t *crop_data =
12088 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
12089 cam_stream_crop_info_t crop_meta;
12090 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
12091 crop_meta.stream_id = 0;
12092 crop_meta.crop.left = crop_data[0];
12093 crop_meta.crop.top = crop_data[1];
12094 crop_meta.crop.width = crop_data[2];
12095 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012096 // The JPEG crop roi should match cpp output size
12097 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
12098 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
12099 crop_meta.roi_map.left = 0;
12100 crop_meta.roi_map.top = 0;
12101 crop_meta.roi_map.width = cpp_crop->crop.width;
12102 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070012103 }
12104 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
12105 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012106 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070012107 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012108 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
12109 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070012110 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012111 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
12112
12113 // Add JPEG scale information
12114 cam_dimension_t scale_dim;
12115 memset(&scale_dim, 0, sizeof(cam_dimension_t));
12116 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
12117 int32_t *roi =
12118 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
12119 scale_dim.width = roi[2];
12120 scale_dim.height = roi[3];
12121 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
12122 scale_dim);
12123 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
12124 scale_dim.width, scale_dim.height, mCameraId);
12125 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012126 }
12127 }
12128
12129 return rc;
12130}
12131
12132/*===========================================================================
12133 * FUNCTION : saveRequestSettings
12134 *
12135 * DESCRIPTION: Add any settings that might have changed to the request settings
12136 * and save the settings to be applied on the frame
12137 *
12138 * PARAMETERS :
12139 * @jpegMetadata : the extracted and/or modified jpeg metadata
12140 * @request : request with initial settings
12141 *
12142 * RETURN :
12143 * camera_metadata_t* : pointer to the saved request settings
12144 *==========================================================================*/
12145camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
12146 const CameraMetadata &jpegMetadata,
12147 camera3_capture_request_t *request)
12148{
12149 camera_metadata_t *resultMetadata;
12150 CameraMetadata camMetadata;
12151 camMetadata = request->settings;
12152
12153 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12154 int32_t thumbnail_size[2];
12155 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12156 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12157 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
12158 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
12159 }
12160
12161 if (request->input_buffer != NULL) {
12162 uint8_t reprocessFlags = 1;
12163 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
12164 (uint8_t*)&reprocessFlags,
12165 sizeof(reprocessFlags));
12166 }
12167
12168 resultMetadata = camMetadata.release();
12169 return resultMetadata;
12170}
12171
12172/*===========================================================================
12173 * FUNCTION : setHalFpsRange
12174 *
12175 * DESCRIPTION: set FPS range parameter
12176 *
12177 *
12178 * PARAMETERS :
12179 * @settings : Metadata from framework
12180 * @hal_metadata: Metadata buffer
12181 *
12182 *
12183 * RETURN : success: NO_ERROR
12184 * failure:
12185 *==========================================================================*/
12186int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
12187 metadata_buffer_t *hal_metadata)
12188{
12189 int32_t rc = NO_ERROR;
12190 cam_fps_range_t fps_range;
12191 fps_range.min_fps = (float)
12192 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12193 fps_range.max_fps = (float)
12194 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12195 fps_range.video_min_fps = fps_range.min_fps;
12196 fps_range.video_max_fps = fps_range.max_fps;
12197
12198 LOGD("aeTargetFpsRange fps: [%f %f]",
12199 fps_range.min_fps, fps_range.max_fps);
12200 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12201 * follows:
12202 * ---------------------------------------------------------------|
12203 * Video stream is absent in configure_streams |
12204 * (Camcorder preview before the first video record |
12205 * ---------------------------------------------------------------|
12206 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12207 * | | | vid_min/max_fps|
12208 * ---------------------------------------------------------------|
12209 * NO | [ 30, 240] | 240 | [240, 240] |
12210 * |-------------|-------------|----------------|
12211 * | [240, 240] | 240 | [240, 240] |
12212 * ---------------------------------------------------------------|
12213 * Video stream is present in configure_streams |
12214 * ---------------------------------------------------------------|
12215 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12216 * | | | vid_min/max_fps|
12217 * ---------------------------------------------------------------|
12218 * NO | [ 30, 240] | 240 | [240, 240] |
12219 * (camcorder prev |-------------|-------------|----------------|
12220 * after video rec | [240, 240] | 240 | [240, 240] |
12221 * is stopped) | | | |
12222 * ---------------------------------------------------------------|
12223 * YES | [ 30, 240] | 240 | [240, 240] |
12224 * |-------------|-------------|----------------|
12225 * | [240, 240] | 240 | [240, 240] |
12226 * ---------------------------------------------------------------|
12227 * When Video stream is absent in configure_streams,
12228 * preview fps = sensor_fps / batchsize
12229 * Eg: for 240fps at batchSize 4, preview = 60fps
12230 * for 120fps at batchSize 4, preview = 30fps
12231 *
12232 * When video stream is present in configure_streams, preview fps is as per
12233 * the ratio of preview buffers to video buffers requested in process
12234 * capture request
12235 */
12236 mBatchSize = 0;
12237 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12238 fps_range.min_fps = fps_range.video_max_fps;
12239 fps_range.video_min_fps = fps_range.video_max_fps;
12240 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12241 fps_range.max_fps);
12242 if (NAME_NOT_FOUND != val) {
12243 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12244 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12245 return BAD_VALUE;
12246 }
12247
12248 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12249 /* If batchmode is currently in progress and the fps changes,
12250 * set the flag to restart the sensor */
12251 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12252 (mHFRVideoFps != fps_range.max_fps)) {
12253 mNeedSensorRestart = true;
12254 }
12255 mHFRVideoFps = fps_range.max_fps;
12256 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12257 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12258 mBatchSize = MAX_HFR_BATCH_SIZE;
12259 }
12260 }
12261 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12262
12263 }
12264 } else {
12265 /* HFR mode is session param in backend/ISP. This should be reset when
12266 * in non-HFR mode */
12267 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12268 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12269 return BAD_VALUE;
12270 }
12271 }
12272 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12273 return BAD_VALUE;
12274 }
12275 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12276 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12277 return rc;
12278}
12279
12280/*===========================================================================
12281 * FUNCTION : translateToHalMetadata
12282 *
12283 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12284 *
12285 *
12286 * PARAMETERS :
12287 * @request : request sent from framework
12288 *
12289 *
12290 * RETURN : success: NO_ERROR
12291 * failure:
12292 *==========================================================================*/
12293int QCamera3HardwareInterface::translateToHalMetadata
12294 (const camera3_capture_request_t *request,
12295 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012296 uint32_t snapshotStreamId) {
12297 if (request == nullptr || hal_metadata == nullptr) {
12298 return BAD_VALUE;
12299 }
12300
12301 int64_t minFrameDuration = getMinFrameDuration(request);
12302
12303 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12304 minFrameDuration);
12305}
12306
12307int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12308 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12309 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12310
Thierry Strudel3d639192016-09-09 11:52:26 -070012311 int rc = 0;
12312 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012313 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012314
12315 /* Do not change the order of the following list unless you know what you are
12316 * doing.
12317 * The order is laid out in such a way that parameters in the front of the table
12318 * may be used to override the parameters later in the table. Examples are:
12319 * 1. META_MODE should precede AEC/AWB/AF MODE
12320 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12321 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12322 * 4. Any mode should precede it's corresponding settings
12323 */
12324 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12325 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12326 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12327 rc = BAD_VALUE;
12328 }
12329 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12330 if (rc != NO_ERROR) {
12331 LOGE("extractSceneMode failed");
12332 }
12333 }
12334
12335 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12336 uint8_t fwk_aeMode =
12337 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12338 uint8_t aeMode;
12339 int32_t redeye;
12340
12341 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12342 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012343 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12344 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012345 } else {
12346 aeMode = CAM_AE_MODE_ON;
12347 }
12348 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12349 redeye = 1;
12350 } else {
12351 redeye = 0;
12352 }
12353
12354 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12355 fwk_aeMode);
12356 if (NAME_NOT_FOUND != val) {
12357 int32_t flashMode = (int32_t)val;
12358 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12359 }
12360
12361 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12362 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12363 rc = BAD_VALUE;
12364 }
12365 }
12366
12367 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12368 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12369 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12370 fwk_whiteLevel);
12371 if (NAME_NOT_FOUND != val) {
12372 uint8_t whiteLevel = (uint8_t)val;
12373 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12374 rc = BAD_VALUE;
12375 }
12376 }
12377 }
12378
12379 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12380 uint8_t fwk_cacMode =
12381 frame_settings.find(
12382 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12383 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12384 fwk_cacMode);
12385 if (NAME_NOT_FOUND != val) {
12386 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12387 bool entryAvailable = FALSE;
12388 // Check whether Frameworks set CAC mode is supported in device or not
12389 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12390 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12391 entryAvailable = TRUE;
12392 break;
12393 }
12394 }
12395 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12396 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12397 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12398 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12399 if (entryAvailable == FALSE) {
12400 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12401 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12402 } else {
12403 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12404 // High is not supported and so set the FAST as spec say's underlying
12405 // device implementation can be the same for both modes.
12406 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12407 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12408 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12409 // in order to avoid the fps drop due to high quality
12410 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12411 } else {
12412 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12413 }
12414 }
12415 }
12416 LOGD("Final cacMode is %d", cacMode);
12417 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12418 rc = BAD_VALUE;
12419 }
12420 } else {
12421 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12422 }
12423 }
12424
Jason Lee84ae9972017-02-24 13:24:24 -080012425 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012426 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012427 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012428 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012429 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12430 fwk_focusMode);
12431 if (NAME_NOT_FOUND != val) {
12432 uint8_t focusMode = (uint8_t)val;
12433 LOGD("set focus mode %d", focusMode);
12434 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12435 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12436 rc = BAD_VALUE;
12437 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012438 }
12439 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012440 } else {
12441 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12442 LOGE("Focus forced to infinity %d", focusMode);
12443 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12444 rc = BAD_VALUE;
12445 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012446 }
12447
Jason Lee84ae9972017-02-24 13:24:24 -080012448 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12449 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012450 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12451 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12452 focalDistance)) {
12453 rc = BAD_VALUE;
12454 }
12455 }
12456
12457 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12458 uint8_t fwk_antibandingMode =
12459 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12460 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12461 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12462 if (NAME_NOT_FOUND != val) {
12463 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012464 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12465 if (m60HzZone) {
12466 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12467 } else {
12468 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12469 }
12470 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012471 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12472 hal_antibandingMode)) {
12473 rc = BAD_VALUE;
12474 }
12475 }
12476 }
12477
12478 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12479 int32_t expCompensation = frame_settings.find(
12480 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12481 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12482 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12483 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12484 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012485 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012486 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12487 expCompensation)) {
12488 rc = BAD_VALUE;
12489 }
12490 }
12491
12492 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12493 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12494 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12495 rc = BAD_VALUE;
12496 }
12497 }
12498 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12499 rc = setHalFpsRange(frame_settings, hal_metadata);
12500 if (rc != NO_ERROR) {
12501 LOGE("setHalFpsRange failed");
12502 }
12503 }
12504
12505 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12506 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12507 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12508 rc = BAD_VALUE;
12509 }
12510 }
12511
12512 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12513 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12514 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12515 fwk_effectMode);
12516 if (NAME_NOT_FOUND != val) {
12517 uint8_t effectMode = (uint8_t)val;
12518 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12519 rc = BAD_VALUE;
12520 }
12521 }
12522 }
12523
12524 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12525 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12526 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12527 colorCorrectMode)) {
12528 rc = BAD_VALUE;
12529 }
12530 }
12531
12532 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12533 cam_color_correct_gains_t colorCorrectGains;
12534 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12535 colorCorrectGains.gains[i] =
12536 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12537 }
12538 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12539 colorCorrectGains)) {
12540 rc = BAD_VALUE;
12541 }
12542 }
12543
12544 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12545 cam_color_correct_matrix_t colorCorrectTransform;
12546 cam_rational_type_t transform_elem;
12547 size_t num = 0;
12548 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12549 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12550 transform_elem.numerator =
12551 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12552 transform_elem.denominator =
12553 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12554 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12555 num++;
12556 }
12557 }
12558 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12559 colorCorrectTransform)) {
12560 rc = BAD_VALUE;
12561 }
12562 }
12563
12564 cam_trigger_t aecTrigger;
12565 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12566 aecTrigger.trigger_id = -1;
12567 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12568 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12569 aecTrigger.trigger =
12570 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12571 aecTrigger.trigger_id =
12572 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12573 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12574 aecTrigger)) {
12575 rc = BAD_VALUE;
12576 }
12577 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12578 aecTrigger.trigger, aecTrigger.trigger_id);
12579 }
12580
12581 /*af_trigger must come with a trigger id*/
12582 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12583 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12584 cam_trigger_t af_trigger;
12585 af_trigger.trigger =
12586 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12587 af_trigger.trigger_id =
12588 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12589 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12590 rc = BAD_VALUE;
12591 }
12592 LOGD("AfTrigger: %d AfTriggerID: %d",
12593 af_trigger.trigger, af_trigger.trigger_id);
12594 }
12595
12596 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12597 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12598 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12599 rc = BAD_VALUE;
12600 }
12601 }
12602 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12603 cam_edge_application_t edge_application;
12604 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012605
Thierry Strudel3d639192016-09-09 11:52:26 -070012606 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12607 edge_application.sharpness = 0;
12608 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012609 edge_application.sharpness =
12610 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12611 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12612 int32_t sharpness =
12613 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12614 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12615 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12616 LOGD("Setting edge mode sharpness %d", sharpness);
12617 edge_application.sharpness = sharpness;
12618 }
12619 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012620 }
12621 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12622 rc = BAD_VALUE;
12623 }
12624 }
12625
12626 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12627 int32_t respectFlashMode = 1;
12628 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12629 uint8_t fwk_aeMode =
12630 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012631 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12632 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12633 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012634 respectFlashMode = 0;
12635 LOGH("AE Mode controls flash, ignore android.flash.mode");
12636 }
12637 }
12638 if (respectFlashMode) {
12639 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12640 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12641 LOGH("flash mode after mapping %d", val);
12642 // To check: CAM_INTF_META_FLASH_MODE usage
12643 if (NAME_NOT_FOUND != val) {
12644 uint8_t flashMode = (uint8_t)val;
12645 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12646 rc = BAD_VALUE;
12647 }
12648 }
12649 }
12650 }
12651
12652 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12653 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12654 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12655 rc = BAD_VALUE;
12656 }
12657 }
12658
12659 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12660 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12661 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12662 flashFiringTime)) {
12663 rc = BAD_VALUE;
12664 }
12665 }
12666
12667 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12668 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12669 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12670 hotPixelMode)) {
12671 rc = BAD_VALUE;
12672 }
12673 }
12674
12675 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12676 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12677 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12678 lensAperture)) {
12679 rc = BAD_VALUE;
12680 }
12681 }
12682
12683 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12684 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12685 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12686 filterDensity)) {
12687 rc = BAD_VALUE;
12688 }
12689 }
12690
12691 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12692 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12693 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12694 focalLength)) {
12695 rc = BAD_VALUE;
12696 }
12697 }
12698
12699 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12700 uint8_t optStabMode =
12701 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12702 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12703 optStabMode)) {
12704 rc = BAD_VALUE;
12705 }
12706 }
12707
12708 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12709 uint8_t videoStabMode =
12710 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12711 LOGD("videoStabMode from APP = %d", videoStabMode);
12712 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12713 videoStabMode)) {
12714 rc = BAD_VALUE;
12715 }
12716 }
12717
12718
12719 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12720 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12721 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12722 noiseRedMode)) {
12723 rc = BAD_VALUE;
12724 }
12725 }
12726
12727 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12728 float reprocessEffectiveExposureFactor =
12729 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12730 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12731 reprocessEffectiveExposureFactor)) {
12732 rc = BAD_VALUE;
12733 }
12734 }
12735
12736 cam_crop_region_t scalerCropRegion;
12737 bool scalerCropSet = false;
12738 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12739 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12740 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12741 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12742 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12743
12744 // Map coordinate system from active array to sensor output.
12745 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12746 scalerCropRegion.width, scalerCropRegion.height);
12747
12748 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12749 scalerCropRegion)) {
12750 rc = BAD_VALUE;
12751 }
12752 scalerCropSet = true;
12753 }
12754
12755 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12756 int64_t sensorExpTime =
12757 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12758 LOGD("setting sensorExpTime %lld", sensorExpTime);
12759 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12760 sensorExpTime)) {
12761 rc = BAD_VALUE;
12762 }
12763 }
12764
12765 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12766 int64_t sensorFrameDuration =
12767 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012768 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12769 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12770 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12771 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12772 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12773 sensorFrameDuration)) {
12774 rc = BAD_VALUE;
12775 }
12776 }
12777
12778 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12779 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12780 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12781 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12782 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12783 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12784 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12785 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12786 sensorSensitivity)) {
12787 rc = BAD_VALUE;
12788 }
12789 }
12790
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012791#ifndef USE_HAL_3_3
12792 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12793 int32_t ispSensitivity =
12794 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12795 if (ispSensitivity <
12796 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12797 ispSensitivity =
12798 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12799 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12800 }
12801 if (ispSensitivity >
12802 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12803 ispSensitivity =
12804 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12805 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12806 }
12807 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12808 ispSensitivity)) {
12809 rc = BAD_VALUE;
12810 }
12811 }
12812#endif
12813
Thierry Strudel3d639192016-09-09 11:52:26 -070012814 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12815 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12816 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12817 rc = BAD_VALUE;
12818 }
12819 }
12820
12821 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12822 uint8_t fwk_facedetectMode =
12823 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12824
12825 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12826 fwk_facedetectMode);
12827
12828 if (NAME_NOT_FOUND != val) {
12829 uint8_t facedetectMode = (uint8_t)val;
12830 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12831 facedetectMode)) {
12832 rc = BAD_VALUE;
12833 }
12834 }
12835 }
12836
Thierry Strudel54dc9782017-02-15 12:12:10 -080012837 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012838 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012839 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012840 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12841 histogramMode)) {
12842 rc = BAD_VALUE;
12843 }
12844 }
12845
12846 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12847 uint8_t sharpnessMapMode =
12848 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12849 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12850 sharpnessMapMode)) {
12851 rc = BAD_VALUE;
12852 }
12853 }
12854
12855 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12856 uint8_t tonemapMode =
12857 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12858 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12859 rc = BAD_VALUE;
12860 }
12861 }
12862 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12863 /*All tonemap channels will have the same number of points*/
12864 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12865 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12866 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12867 cam_rgb_tonemap_curves tonemapCurves;
12868 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12869 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12870 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12871 tonemapCurves.tonemap_points_cnt,
12872 CAM_MAX_TONEMAP_CURVE_SIZE);
12873 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12874 }
12875
12876 /* ch0 = G*/
12877 size_t point = 0;
12878 cam_tonemap_curve_t tonemapCurveGreen;
12879 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12880 for (size_t j = 0; j < 2; j++) {
12881 tonemapCurveGreen.tonemap_points[i][j] =
12882 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12883 point++;
12884 }
12885 }
12886 tonemapCurves.curves[0] = tonemapCurveGreen;
12887
12888 /* ch 1 = B */
12889 point = 0;
12890 cam_tonemap_curve_t tonemapCurveBlue;
12891 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12892 for (size_t j = 0; j < 2; j++) {
12893 tonemapCurveBlue.tonemap_points[i][j] =
12894 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12895 point++;
12896 }
12897 }
12898 tonemapCurves.curves[1] = tonemapCurveBlue;
12899
12900 /* ch 2 = R */
12901 point = 0;
12902 cam_tonemap_curve_t tonemapCurveRed;
12903 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12904 for (size_t j = 0; j < 2; j++) {
12905 tonemapCurveRed.tonemap_points[i][j] =
12906 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12907 point++;
12908 }
12909 }
12910 tonemapCurves.curves[2] = tonemapCurveRed;
12911
12912 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12913 tonemapCurves)) {
12914 rc = BAD_VALUE;
12915 }
12916 }
12917
12918 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12919 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12920 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12921 captureIntent)) {
12922 rc = BAD_VALUE;
12923 }
12924 }
12925
12926 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12927 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12928 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12929 blackLevelLock)) {
12930 rc = BAD_VALUE;
12931 }
12932 }
12933
12934 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12935 uint8_t lensShadingMapMode =
12936 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12937 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12938 lensShadingMapMode)) {
12939 rc = BAD_VALUE;
12940 }
12941 }
12942
12943 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12944 cam_area_t roi;
12945 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012946 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012947
12948 // Map coordinate system from active array to sensor output.
12949 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12950 roi.rect.height);
12951
12952 if (scalerCropSet) {
12953 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12954 }
12955 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12956 rc = BAD_VALUE;
12957 }
12958 }
12959
12960 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12961 cam_area_t roi;
12962 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012963 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012964
12965 // Map coordinate system from active array to sensor output.
12966 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12967 roi.rect.height);
12968
12969 if (scalerCropSet) {
12970 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12971 }
12972 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12973 rc = BAD_VALUE;
12974 }
12975 }
12976
12977 // CDS for non-HFR non-video mode
12978 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12979 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12980 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12981 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12982 LOGE("Invalid CDS mode %d!", *fwk_cds);
12983 } else {
12984 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12985 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12986 rc = BAD_VALUE;
12987 }
12988 }
12989 }
12990
Thierry Strudel04e026f2016-10-10 11:27:36 -070012991 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012992 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012993 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012994 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12995 }
12996 if (m_bVideoHdrEnabled)
12997 vhdr = CAM_VIDEO_HDR_MODE_ON;
12998
Thierry Strudel54dc9782017-02-15 12:12:10 -080012999 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
13000
13001 if(vhdr != curr_hdr_state)
13002 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
13003
Mansoor Aftab93a66e52017-01-26 14:58:25 -080013004 rc = setVideoHdrMode(mParameters, vhdr);
13005 if (rc != NO_ERROR) {
13006 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013007 }
13008
13009 //IR
13010 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
13011 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
13012 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080013013 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
13014 uint8_t isIRon = 0;
13015
13016 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013017 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
13018 LOGE("Invalid IR mode %d!", fwk_ir);
13019 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080013020 if(isIRon != curr_ir_state )
13021 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
13022
Thierry Strudel04e026f2016-10-10 11:27:36 -070013023 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13024 CAM_INTF_META_IR_MODE, fwk_ir)) {
13025 rc = BAD_VALUE;
13026 }
13027 }
13028 }
13029
Thierry Strudel54dc9782017-02-15 12:12:10 -080013030 //Binning Correction Mode
13031 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
13032 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
13033 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
13034 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
13035 || (0 > fwk_binning_correction)) {
13036 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
13037 } else {
13038 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13039 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
13040 rc = BAD_VALUE;
13041 }
13042 }
13043 }
13044
Thierry Strudel269c81a2016-10-12 12:13:59 -070013045 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
13046 float aec_speed;
13047 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
13048 LOGD("AEC Speed :%f", aec_speed);
13049 if ( aec_speed < 0 ) {
13050 LOGE("Invalid AEC mode %f!", aec_speed);
13051 } else {
13052 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
13053 aec_speed)) {
13054 rc = BAD_VALUE;
13055 }
13056 }
13057 }
13058
13059 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
13060 float awb_speed;
13061 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
13062 LOGD("AWB Speed :%f", awb_speed);
13063 if ( awb_speed < 0 ) {
13064 LOGE("Invalid AWB mode %f!", awb_speed);
13065 } else {
13066 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
13067 awb_speed)) {
13068 rc = BAD_VALUE;
13069 }
13070 }
13071 }
13072
Thierry Strudel3d639192016-09-09 11:52:26 -070013073 // TNR
13074 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
13075 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
13076 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080013077 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070013078 cam_denoise_param_t tnr;
13079 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
13080 tnr.process_plates =
13081 (cam_denoise_process_type_t)frame_settings.find(
13082 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
13083 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080013084
13085 if(b_TnrRequested != curr_tnr_state)
13086 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
13087
Thierry Strudel3d639192016-09-09 11:52:26 -070013088 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
13089 rc = BAD_VALUE;
13090 }
13091 }
13092
Thierry Strudel54dc9782017-02-15 12:12:10 -080013093 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013094 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080013095 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013096 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
13097 *exposure_metering_mode)) {
13098 rc = BAD_VALUE;
13099 }
13100 }
13101
Thierry Strudel3d639192016-09-09 11:52:26 -070013102 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
13103 int32_t fwk_testPatternMode =
13104 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
13105 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
13106 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
13107
13108 if (NAME_NOT_FOUND != testPatternMode) {
13109 cam_test_pattern_data_t testPatternData;
13110 memset(&testPatternData, 0, sizeof(testPatternData));
13111 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
13112 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
13113 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
13114 int32_t *fwk_testPatternData =
13115 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
13116 testPatternData.r = fwk_testPatternData[0];
13117 testPatternData.b = fwk_testPatternData[3];
13118 switch (gCamCapability[mCameraId]->color_arrangement) {
13119 case CAM_FILTER_ARRANGEMENT_RGGB:
13120 case CAM_FILTER_ARRANGEMENT_GRBG:
13121 testPatternData.gr = fwk_testPatternData[1];
13122 testPatternData.gb = fwk_testPatternData[2];
13123 break;
13124 case CAM_FILTER_ARRANGEMENT_GBRG:
13125 case CAM_FILTER_ARRANGEMENT_BGGR:
13126 testPatternData.gr = fwk_testPatternData[2];
13127 testPatternData.gb = fwk_testPatternData[1];
13128 break;
13129 default:
13130 LOGE("color arrangement %d is not supported",
13131 gCamCapability[mCameraId]->color_arrangement);
13132 break;
13133 }
13134 }
13135 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
13136 testPatternData)) {
13137 rc = BAD_VALUE;
13138 }
13139 } else {
13140 LOGE("Invalid framework sensor test pattern mode %d",
13141 fwk_testPatternMode);
13142 }
13143 }
13144
13145 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
13146 size_t count = 0;
13147 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
13148 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
13149 gps_coords.data.d, gps_coords.count, count);
13150 if (gps_coords.count != count) {
13151 rc = BAD_VALUE;
13152 }
13153 }
13154
13155 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
13156 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
13157 size_t count = 0;
13158 const char *gps_methods_src = (const char *)
13159 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
13160 memset(gps_methods, '\0', sizeof(gps_methods));
13161 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
13162 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
13163 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
13164 if (GPS_PROCESSING_METHOD_SIZE != count) {
13165 rc = BAD_VALUE;
13166 }
13167 }
13168
13169 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
13170 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
13171 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
13172 gps_timestamp)) {
13173 rc = BAD_VALUE;
13174 }
13175 }
13176
13177 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
13178 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
13179 cam_rotation_info_t rotation_info;
13180 if (orientation == 0) {
13181 rotation_info.rotation = ROTATE_0;
13182 } else if (orientation == 90) {
13183 rotation_info.rotation = ROTATE_90;
13184 } else if (orientation == 180) {
13185 rotation_info.rotation = ROTATE_180;
13186 } else if (orientation == 270) {
13187 rotation_info.rotation = ROTATE_270;
13188 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070013189 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070013190 rotation_info.streamId = snapshotStreamId;
13191 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
13192 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13193 rc = BAD_VALUE;
13194 }
13195 }
13196
13197 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13198 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13199 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13200 rc = BAD_VALUE;
13201 }
13202 }
13203
13204 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13205 uint32_t thumb_quality = (uint32_t)
13206 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13207 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13208 thumb_quality)) {
13209 rc = BAD_VALUE;
13210 }
13211 }
13212
13213 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13214 cam_dimension_t dim;
13215 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13216 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13217 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13218 rc = BAD_VALUE;
13219 }
13220 }
13221
13222 // Internal metadata
13223 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13224 size_t count = 0;
13225 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13226 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13227 privatedata.data.i32, privatedata.count, count);
13228 if (privatedata.count != count) {
13229 rc = BAD_VALUE;
13230 }
13231 }
13232
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013233 // ISO/Exposure Priority
13234 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13235 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13236 cam_priority_mode_t mode =
13237 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13238 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13239 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13240 use_iso_exp_pty.previewOnly = FALSE;
13241 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13242 use_iso_exp_pty.value = *ptr;
13243
13244 if(CAM_ISO_PRIORITY == mode) {
13245 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13246 use_iso_exp_pty)) {
13247 rc = BAD_VALUE;
13248 }
13249 }
13250 else {
13251 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13252 use_iso_exp_pty)) {
13253 rc = BAD_VALUE;
13254 }
13255 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013256
13257 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13258 rc = BAD_VALUE;
13259 }
13260 }
13261 } else {
13262 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13263 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013264 }
13265 }
13266
13267 // Saturation
13268 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13269 int32_t* use_saturation =
13270 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13271 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13272 rc = BAD_VALUE;
13273 }
13274 }
13275
Thierry Strudel3d639192016-09-09 11:52:26 -070013276 // EV step
13277 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13278 gCamCapability[mCameraId]->exp_compensation_step)) {
13279 rc = BAD_VALUE;
13280 }
13281
13282 // CDS info
13283 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13284 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13285 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13286
13287 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13288 CAM_INTF_META_CDS_DATA, *cdsData)) {
13289 rc = BAD_VALUE;
13290 }
13291 }
13292
Shuzhen Wang19463d72016-03-08 11:09:52 -080013293 // Hybrid AE
13294 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13295 uint8_t *hybrid_ae = (uint8_t *)
13296 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
Shuzhen Wang77b049a2017-08-30 12:24:36 -070013297 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13298 rc = BAD_VALUE;
13299 }
Shuzhen Wang19463d72016-03-08 11:09:52 -080013300 }
13301
Shuzhen Wang14415f52016-11-16 18:26:18 -080013302 // Histogram
13303 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13304 uint8_t histogramMode =
13305 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13306 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13307 histogramMode)) {
13308 rc = BAD_VALUE;
13309 }
13310 }
13311
13312 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13313 int32_t histogramBins =
13314 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13315 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13316 histogramBins)) {
13317 rc = BAD_VALUE;
13318 }
13319 }
13320
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013321 // Tracking AF
13322 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13323 uint8_t trackingAfTrigger =
13324 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13325 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13326 trackingAfTrigger)) {
13327 rc = BAD_VALUE;
13328 }
13329 }
13330
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013331 // Makernote
13332 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13333 if (entry.count != 0) {
13334 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13335 cam_makernote_t makernote;
13336 makernote.length = entry.count;
13337 memcpy(makernote.data, entry.data.u8, makernote.length);
13338 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13339 rc = BAD_VALUE;
13340 }
13341 } else {
13342 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13343 MAX_MAKERNOTE_LENGTH);
13344 rc = BAD_VALUE;
13345 }
13346 }
13347
Thierry Strudel3d639192016-09-09 11:52:26 -070013348 return rc;
13349}
13350
13351/*===========================================================================
13352 * FUNCTION : captureResultCb
13353 *
13354 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13355 *
13356 * PARAMETERS :
13357 * @frame : frame information from mm-camera-interface
13358 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13359 * @userdata: userdata
13360 *
13361 * RETURN : NONE
13362 *==========================================================================*/
13363void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13364 camera3_stream_buffer_t *buffer,
13365 uint32_t frame_number, bool isInputBuffer, void *userdata)
13366{
13367 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13368 if (hw == NULL) {
13369 LOGE("Invalid hw %p", hw);
13370 return;
13371 }
13372
13373 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13374 return;
13375}
13376
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013377/*===========================================================================
13378 * FUNCTION : setBufferErrorStatus
13379 *
13380 * DESCRIPTION: Callback handler for channels to report any buffer errors
13381 *
13382 * PARAMETERS :
13383 * @ch : Channel on which buffer error is reported from
13384 * @frame_number : frame number on which buffer error is reported on
13385 * @buffer_status : buffer error status
13386 * @userdata: userdata
13387 *
13388 * RETURN : NONE
13389 *==========================================================================*/
13390void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13391 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13392{
13393 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13394 if (hw == NULL) {
13395 LOGE("Invalid hw %p", hw);
13396 return;
13397 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013398
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013399 hw->setBufferErrorStatus(ch, frame_number, err);
13400 return;
13401}
13402
13403void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13404 uint32_t frameNumber, camera3_buffer_status_t err)
13405{
13406 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13407 pthread_mutex_lock(&mMutex);
13408
13409 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13410 if (req.frame_number != frameNumber)
13411 continue;
13412 for (auto& k : req.mPendingBufferList) {
13413 if(k.stream->priv == ch) {
13414 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13415 }
13416 }
13417 }
13418
13419 pthread_mutex_unlock(&mMutex);
13420 return;
13421}
Thierry Strudel3d639192016-09-09 11:52:26 -070013422/*===========================================================================
13423 * FUNCTION : initialize
13424 *
13425 * DESCRIPTION: Pass framework callback pointers to HAL
13426 *
13427 * PARAMETERS :
13428 *
13429 *
13430 * RETURN : Success : 0
13431 * Failure: -ENODEV
13432 *==========================================================================*/
13433
13434int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13435 const camera3_callback_ops_t *callback_ops)
13436{
13437 LOGD("E");
13438 QCamera3HardwareInterface *hw =
13439 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13440 if (!hw) {
13441 LOGE("NULL camera device");
13442 return -ENODEV;
13443 }
13444
13445 int rc = hw->initialize(callback_ops);
13446 LOGD("X");
13447 return rc;
13448}
13449
13450/*===========================================================================
13451 * FUNCTION : configure_streams
13452 *
13453 * DESCRIPTION:
13454 *
13455 * PARAMETERS :
13456 *
13457 *
13458 * RETURN : Success: 0
13459 * Failure: -EINVAL (if stream configuration is invalid)
13460 * -ENODEV (fatal error)
13461 *==========================================================================*/
13462
13463int QCamera3HardwareInterface::configure_streams(
13464 const struct camera3_device *device,
13465 camera3_stream_configuration_t *stream_list)
13466{
13467 LOGD("E");
13468 QCamera3HardwareInterface *hw =
13469 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13470 if (!hw) {
13471 LOGE("NULL camera device");
13472 return -ENODEV;
13473 }
13474 int rc = hw->configureStreams(stream_list);
13475 LOGD("X");
13476 return rc;
13477}
13478
13479/*===========================================================================
13480 * FUNCTION : construct_default_request_settings
13481 *
13482 * DESCRIPTION: Configure a settings buffer to meet the required use case
13483 *
13484 * PARAMETERS :
13485 *
13486 *
13487 * RETURN : Success: Return valid metadata
13488 * Failure: Return NULL
13489 *==========================================================================*/
13490const camera_metadata_t* QCamera3HardwareInterface::
13491 construct_default_request_settings(const struct camera3_device *device,
13492 int type)
13493{
13494
13495 LOGD("E");
13496 camera_metadata_t* fwk_metadata = NULL;
13497 QCamera3HardwareInterface *hw =
13498 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13499 if (!hw) {
13500 LOGE("NULL camera device");
13501 return NULL;
13502 }
13503
13504 fwk_metadata = hw->translateCapabilityToMetadata(type);
13505
13506 LOGD("X");
13507 return fwk_metadata;
13508}
13509
13510/*===========================================================================
13511 * FUNCTION : process_capture_request
13512 *
13513 * DESCRIPTION:
13514 *
13515 * PARAMETERS :
13516 *
13517 *
13518 * RETURN :
13519 *==========================================================================*/
13520int QCamera3HardwareInterface::process_capture_request(
13521 const struct camera3_device *device,
13522 camera3_capture_request_t *request)
13523{
13524 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013525 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013526 QCamera3HardwareInterface *hw =
13527 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13528 if (!hw) {
13529 LOGE("NULL camera device");
13530 return -EINVAL;
13531 }
13532
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013533 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013534 LOGD("X");
13535 return rc;
13536}
13537
13538/*===========================================================================
13539 * FUNCTION : dump
13540 *
13541 * DESCRIPTION:
13542 *
13543 * PARAMETERS :
13544 *
13545 *
13546 * RETURN :
13547 *==========================================================================*/
13548
13549void QCamera3HardwareInterface::dump(
13550 const struct camera3_device *device, int fd)
13551{
13552 /* Log level property is read when "adb shell dumpsys media.camera" is
13553 called so that the log level can be controlled without restarting
13554 the media server */
13555 getLogLevel();
13556
13557 LOGD("E");
13558 QCamera3HardwareInterface *hw =
13559 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13560 if (!hw) {
13561 LOGE("NULL camera device");
13562 return;
13563 }
13564
13565 hw->dump(fd);
13566 LOGD("X");
13567 return;
13568}
13569
13570/*===========================================================================
13571 * FUNCTION : flush
13572 *
13573 * DESCRIPTION:
13574 *
13575 * PARAMETERS :
13576 *
13577 *
13578 * RETURN :
13579 *==========================================================================*/
13580
13581int QCamera3HardwareInterface::flush(
13582 const struct camera3_device *device)
13583{
13584 int rc;
13585 LOGD("E");
13586 QCamera3HardwareInterface *hw =
13587 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13588 if (!hw) {
13589 LOGE("NULL camera device");
13590 return -EINVAL;
13591 }
13592
13593 pthread_mutex_lock(&hw->mMutex);
13594 // Validate current state
13595 switch (hw->mState) {
13596 case STARTED:
13597 /* valid state */
13598 break;
13599
13600 case ERROR:
13601 pthread_mutex_unlock(&hw->mMutex);
13602 hw->handleCameraDeviceError();
13603 return -ENODEV;
13604
13605 default:
13606 LOGI("Flush returned during state %d", hw->mState);
13607 pthread_mutex_unlock(&hw->mMutex);
13608 return 0;
13609 }
13610 pthread_mutex_unlock(&hw->mMutex);
13611
13612 rc = hw->flush(true /* restart channels */ );
13613 LOGD("X");
13614 return rc;
13615}
13616
13617/*===========================================================================
13618 * FUNCTION : close_camera_device
13619 *
13620 * DESCRIPTION:
13621 *
13622 * PARAMETERS :
13623 *
13624 *
13625 * RETURN :
13626 *==========================================================================*/
13627int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13628{
13629 int ret = NO_ERROR;
13630 QCamera3HardwareInterface *hw =
13631 reinterpret_cast<QCamera3HardwareInterface *>(
13632 reinterpret_cast<camera3_device_t *>(device)->priv);
13633 if (!hw) {
13634 LOGE("NULL camera device");
13635 return BAD_VALUE;
13636 }
13637
13638 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13639 delete hw;
13640 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013641 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013642 return ret;
13643}
13644
13645/*===========================================================================
13646 * FUNCTION : getWaveletDenoiseProcessPlate
13647 *
13648 * DESCRIPTION: query wavelet denoise process plate
13649 *
13650 * PARAMETERS : None
13651 *
13652 * RETURN : WNR prcocess plate value
13653 *==========================================================================*/
13654cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13655{
13656 char prop[PROPERTY_VALUE_MAX];
13657 memset(prop, 0, sizeof(prop));
13658 property_get("persist.denoise.process.plates", prop, "0");
13659 int processPlate = atoi(prop);
13660 switch(processPlate) {
13661 case 0:
13662 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13663 case 1:
13664 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13665 case 2:
13666 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13667 case 3:
13668 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13669 default:
13670 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13671 }
13672}
13673
13674
13675/*===========================================================================
13676 * FUNCTION : getTemporalDenoiseProcessPlate
13677 *
13678 * DESCRIPTION: query temporal denoise process plate
13679 *
13680 * PARAMETERS : None
13681 *
13682 * RETURN : TNR prcocess plate value
13683 *==========================================================================*/
13684cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13685{
13686 char prop[PROPERTY_VALUE_MAX];
13687 memset(prop, 0, sizeof(prop));
13688 property_get("persist.tnr.process.plates", prop, "0");
13689 int processPlate = atoi(prop);
13690 switch(processPlate) {
13691 case 0:
13692 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13693 case 1:
13694 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13695 case 2:
13696 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13697 case 3:
13698 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13699 default:
13700 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13701 }
13702}
13703
13704
13705/*===========================================================================
13706 * FUNCTION : extractSceneMode
13707 *
13708 * DESCRIPTION: Extract scene mode from frameworks set metadata
13709 *
13710 * PARAMETERS :
13711 * @frame_settings: CameraMetadata reference
13712 * @metaMode: ANDROID_CONTORL_MODE
13713 * @hal_metadata: hal metadata structure
13714 *
13715 * RETURN : None
13716 *==========================================================================*/
13717int32_t QCamera3HardwareInterface::extractSceneMode(
13718 const CameraMetadata &frame_settings, uint8_t metaMode,
13719 metadata_buffer_t *hal_metadata)
13720{
13721 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013722 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13723
13724 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13725 LOGD("Ignoring control mode OFF_KEEP_STATE");
13726 return NO_ERROR;
13727 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013728
13729 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13730 camera_metadata_ro_entry entry =
13731 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13732 if (0 == entry.count)
13733 return rc;
13734
13735 uint8_t fwk_sceneMode = entry.data.u8[0];
13736
13737 int val = lookupHalName(SCENE_MODES_MAP,
13738 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13739 fwk_sceneMode);
13740 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013741 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013742 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013743 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013744 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013745
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013746 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13747 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13748 }
13749
13750 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13751 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013752 cam_hdr_param_t hdr_params;
13753 hdr_params.hdr_enable = 1;
13754 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13755 hdr_params.hdr_need_1x = false;
13756 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13757 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13758 rc = BAD_VALUE;
13759 }
13760 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013761
Thierry Strudel3d639192016-09-09 11:52:26 -070013762 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13763 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13764 rc = BAD_VALUE;
13765 }
13766 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013767
13768 if (mForceHdrSnapshot) {
13769 cam_hdr_param_t hdr_params;
13770 hdr_params.hdr_enable = 1;
13771 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13772 hdr_params.hdr_need_1x = false;
13773 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13774 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13775 rc = BAD_VALUE;
13776 }
13777 }
13778
Thierry Strudel3d639192016-09-09 11:52:26 -070013779 return rc;
13780}
13781
13782/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013783 * FUNCTION : setVideoHdrMode
13784 *
13785 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13786 *
13787 * PARAMETERS :
13788 * @hal_metadata: hal metadata structure
13789 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13790 *
13791 * RETURN : None
13792 *==========================================================================*/
13793int32_t QCamera3HardwareInterface::setVideoHdrMode(
13794 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13795{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013796 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13797 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13798 }
13799
13800 LOGE("Invalid Video HDR mode %d!", vhdr);
13801 return BAD_VALUE;
13802}
13803
13804/*===========================================================================
13805 * FUNCTION : setSensorHDR
13806 *
13807 * DESCRIPTION: Enable/disable sensor HDR.
13808 *
13809 * PARAMETERS :
13810 * @hal_metadata: hal metadata structure
13811 * @enable: boolean whether to enable/disable sensor HDR
13812 *
13813 * RETURN : None
13814 *==========================================================================*/
13815int32_t QCamera3HardwareInterface::setSensorHDR(
13816 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13817{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013818 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013819 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13820
13821 if (enable) {
13822 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13823 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13824 #ifdef _LE_CAMERA_
13825 //Default to staggered HDR for IOT
13826 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13827 #else
13828 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13829 #endif
13830 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13831 }
13832
13833 bool isSupported = false;
13834 switch (sensor_hdr) {
13835 case CAM_SENSOR_HDR_IN_SENSOR:
13836 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13837 CAM_QCOM_FEATURE_SENSOR_HDR) {
13838 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013839 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013840 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013841 break;
13842 case CAM_SENSOR_HDR_ZIGZAG:
13843 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13844 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13845 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013846 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013847 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013848 break;
13849 case CAM_SENSOR_HDR_STAGGERED:
13850 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13851 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13852 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013853 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013854 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013855 break;
13856 case CAM_SENSOR_HDR_OFF:
13857 isSupported = true;
13858 LOGD("Turning off sensor HDR");
13859 break;
13860 default:
13861 LOGE("HDR mode %d not supported", sensor_hdr);
13862 rc = BAD_VALUE;
13863 break;
13864 }
13865
13866 if(isSupported) {
13867 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13868 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13869 rc = BAD_VALUE;
13870 } else {
13871 if(!isVideoHdrEnable)
13872 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013873 }
13874 }
13875 return rc;
13876}
13877
13878/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013879 * FUNCTION : needRotationReprocess
13880 *
13881 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13882 *
13883 * PARAMETERS : none
13884 *
13885 * RETURN : true: needed
13886 * false: no need
13887 *==========================================================================*/
13888bool QCamera3HardwareInterface::needRotationReprocess()
13889{
13890 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13891 // current rotation is not zero, and pp has the capability to process rotation
13892 LOGH("need do reprocess for rotation");
13893 return true;
13894 }
13895
13896 return false;
13897}
13898
13899/*===========================================================================
13900 * FUNCTION : needReprocess
13901 *
13902 * DESCRIPTION: if reprocess in needed
13903 *
13904 * PARAMETERS : none
13905 *
13906 * RETURN : true: needed
13907 * false: no need
13908 *==========================================================================*/
13909bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13910{
13911 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13912 // TODO: add for ZSL HDR later
13913 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13914 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13915 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13916 return true;
13917 } else {
13918 LOGH("already post processed frame");
13919 return false;
13920 }
13921 }
13922 return needRotationReprocess();
13923}
13924
13925/*===========================================================================
13926 * FUNCTION : needJpegExifRotation
13927 *
13928 * DESCRIPTION: if rotation from jpeg is needed
13929 *
13930 * PARAMETERS : none
13931 *
13932 * RETURN : true: needed
13933 * false: no need
13934 *==========================================================================*/
13935bool QCamera3HardwareInterface::needJpegExifRotation()
13936{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013937 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013938 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13939 LOGD("Need use Jpeg EXIF Rotation");
13940 return true;
13941 }
13942 return false;
13943}
13944
13945/*===========================================================================
13946 * FUNCTION : addOfflineReprocChannel
13947 *
13948 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13949 * coming from input channel
13950 *
13951 * PARAMETERS :
13952 * @config : reprocess configuration
13953 * @inputChHandle : pointer to the input (source) channel
13954 *
13955 *
13956 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13957 *==========================================================================*/
13958QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13959 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13960{
13961 int32_t rc = NO_ERROR;
13962 QCamera3ReprocessChannel *pChannel = NULL;
13963
13964 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013965 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13966 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013967 if (NULL == pChannel) {
13968 LOGE("no mem for reprocess channel");
13969 return NULL;
13970 }
13971
13972 rc = pChannel->initialize(IS_TYPE_NONE);
13973 if (rc != NO_ERROR) {
13974 LOGE("init reprocess channel failed, ret = %d", rc);
13975 delete pChannel;
13976 return NULL;
13977 }
13978
13979 // pp feature config
13980 cam_pp_feature_config_t pp_config;
13981 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13982
13983 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13984 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13985 & CAM_QCOM_FEATURE_DSDN) {
13986 //Use CPP CDS incase h/w supports it.
13987 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13988 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13989 }
13990 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13991 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13992 }
13993
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013994 if (config.hdr_param.hdr_enable) {
13995 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13996 pp_config.hdr_param = config.hdr_param;
13997 }
13998
13999 if (mForceHdrSnapshot) {
14000 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
14001 pp_config.hdr_param.hdr_enable = 1;
14002 pp_config.hdr_param.hdr_need_1x = 0;
14003 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
14004 }
14005
Thierry Strudel3d639192016-09-09 11:52:26 -070014006 rc = pChannel->addReprocStreamsFromSource(pp_config,
14007 config,
14008 IS_TYPE_NONE,
14009 mMetadataChannel);
14010
14011 if (rc != NO_ERROR) {
14012 delete pChannel;
14013 return NULL;
14014 }
14015 return pChannel;
14016}
14017
14018/*===========================================================================
14019 * FUNCTION : getMobicatMask
14020 *
14021 * DESCRIPTION: returns mobicat mask
14022 *
14023 * PARAMETERS : none
14024 *
14025 * RETURN : mobicat mask
14026 *
14027 *==========================================================================*/
14028uint8_t QCamera3HardwareInterface::getMobicatMask()
14029{
14030 return m_MobicatMask;
14031}
14032
14033/*===========================================================================
14034 * FUNCTION : setMobicat
14035 *
14036 * DESCRIPTION: set Mobicat on/off.
14037 *
14038 * PARAMETERS :
14039 * @params : none
14040 *
14041 * RETURN : int32_t type of status
14042 * NO_ERROR -- success
14043 * none-zero failure code
14044 *==========================================================================*/
14045int32_t QCamera3HardwareInterface::setMobicat()
14046{
Thierry Strudel3d639192016-09-09 11:52:26 -070014047 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014048
Shuzhen Wangb57ec912017-07-31 13:24:27 -070014049 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070014050 tune_cmd_t tune_cmd;
14051 tune_cmd.type = SET_RELOAD_CHROMATIX;
14052 tune_cmd.module = MODULE_ALL;
14053 tune_cmd.value = TRUE;
14054 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14055 CAM_INTF_PARM_SET_VFE_COMMAND,
14056 tune_cmd);
14057
14058 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14059 CAM_INTF_PARM_SET_PP_COMMAND,
14060 tune_cmd);
14061 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014062
14063 return ret;
14064}
14065
14066/*===========================================================================
14067* FUNCTION : getLogLevel
14068*
14069* DESCRIPTION: Reads the log level property into a variable
14070*
14071* PARAMETERS :
14072* None
14073*
14074* RETURN :
14075* None
14076*==========================================================================*/
14077void QCamera3HardwareInterface::getLogLevel()
14078{
14079 char prop[PROPERTY_VALUE_MAX];
14080 uint32_t globalLogLevel = 0;
14081
14082 property_get("persist.camera.hal.debug", prop, "0");
14083 int val = atoi(prop);
14084 if (0 <= val) {
14085 gCamHal3LogLevel = (uint32_t)val;
14086 }
14087
Thierry Strudel9ec39c62016-12-28 11:30:05 -080014088 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070014089 gKpiDebugLevel = atoi(prop);
14090
14091 property_get("persist.camera.global.debug", prop, "0");
14092 val = atoi(prop);
14093 if (0 <= val) {
14094 globalLogLevel = (uint32_t)val;
14095 }
14096
14097 /* Highest log level among hal.logs and global.logs is selected */
14098 if (gCamHal3LogLevel < globalLogLevel)
14099 gCamHal3LogLevel = globalLogLevel;
14100
14101 return;
14102}
14103
14104/*===========================================================================
14105 * FUNCTION : validateStreamRotations
14106 *
14107 * DESCRIPTION: Check if the rotations requested are supported
14108 *
14109 * PARAMETERS :
14110 * @stream_list : streams to be configured
14111 *
14112 * RETURN : NO_ERROR on success
14113 * -EINVAL on failure
14114 *
14115 *==========================================================================*/
14116int QCamera3HardwareInterface::validateStreamRotations(
14117 camera3_stream_configuration_t *streamList)
14118{
14119 int rc = NO_ERROR;
14120
14121 /*
14122 * Loop through all streams requested in configuration
14123 * Check if unsupported rotations have been requested on any of them
14124 */
14125 for (size_t j = 0; j < streamList->num_streams; j++){
14126 camera3_stream_t *newStream = streamList->streams[j];
14127
Emilian Peev35ceeed2017-06-29 11:58:56 -070014128 switch(newStream->rotation) {
14129 case CAMERA3_STREAM_ROTATION_0:
14130 case CAMERA3_STREAM_ROTATION_90:
14131 case CAMERA3_STREAM_ROTATION_180:
14132 case CAMERA3_STREAM_ROTATION_270:
14133 //Expected values
14134 break;
14135 default:
14136 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
14137 "type:%d and stream format:%d", __func__,
14138 newStream->rotation, newStream->stream_type,
14139 newStream->format);
14140 return -EINVAL;
14141 }
14142
Thierry Strudel3d639192016-09-09 11:52:26 -070014143 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
14144 bool isImplDef = (newStream->format ==
14145 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
14146 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
14147 isImplDef);
14148
14149 if (isRotated && (!isImplDef || isZsl)) {
14150 LOGE("Error: Unsupported rotation of %d requested for stream"
14151 "type:%d and stream format:%d",
14152 newStream->rotation, newStream->stream_type,
14153 newStream->format);
14154 rc = -EINVAL;
14155 break;
14156 }
14157 }
14158
14159 return rc;
14160}
14161
14162/*===========================================================================
14163* FUNCTION : getFlashInfo
14164*
14165* DESCRIPTION: Retrieve information about whether the device has a flash.
14166*
14167* PARAMETERS :
14168* @cameraId : Camera id to query
14169* @hasFlash : Boolean indicating whether there is a flash device
14170* associated with given camera
14171* @flashNode : If a flash device exists, this will be its device node.
14172*
14173* RETURN :
14174* None
14175*==========================================================================*/
14176void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
14177 bool& hasFlash,
14178 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
14179{
14180 cam_capability_t* camCapability = gCamCapability[cameraId];
14181 if (NULL == camCapability) {
14182 hasFlash = false;
14183 flashNode[0] = '\0';
14184 } else {
14185 hasFlash = camCapability->flash_available;
14186 strlcpy(flashNode,
14187 (char*)camCapability->flash_dev_name,
14188 QCAMERA_MAX_FILEPATH_LENGTH);
14189 }
14190}
14191
14192/*===========================================================================
14193* FUNCTION : getEepromVersionInfo
14194*
14195* DESCRIPTION: Retrieve version info of the sensor EEPROM data
14196*
14197* PARAMETERS : None
14198*
14199* RETURN : string describing EEPROM version
14200* "\0" if no such info available
14201*==========================================================================*/
14202const char *QCamera3HardwareInterface::getEepromVersionInfo()
14203{
14204 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14205}
14206
14207/*===========================================================================
14208* FUNCTION : getLdafCalib
14209*
14210* DESCRIPTION: Retrieve Laser AF calibration data
14211*
14212* PARAMETERS : None
14213*
14214* RETURN : Two uint32_t describing laser AF calibration data
14215* NULL if none is available.
14216*==========================================================================*/
14217const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14218{
14219 if (mLdafCalibExist) {
14220 return &mLdafCalib[0];
14221 } else {
14222 return NULL;
14223 }
14224}
14225
14226/*===========================================================================
Arnd Geis082a4d72017-08-24 10:33:07 -070014227* FUNCTION : getEaselFwVersion
14228*
14229* DESCRIPTION: Retrieve Easel firmware version
14230*
14231* PARAMETERS : None
14232*
14233* RETURN : string describing Firmware version
Arnd Geis8cbfc182017-09-07 14:46:41 -070014234* "\0" if version is not up to date
Arnd Geis082a4d72017-08-24 10:33:07 -070014235*==========================================================================*/
14236const char *QCamera3HardwareInterface::getEaselFwVersion()
14237{
Arnd Geis8cbfc182017-09-07 14:46:41 -070014238 if (mEaselFwUpdated) {
14239 return (const char *)&mEaselFwVersion[0];
14240 } else {
14241 return NULL;
Arnd Geis082a4d72017-08-24 10:33:07 -070014242 }
Arnd Geis082a4d72017-08-24 10:33:07 -070014243}
14244
14245/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014246 * FUNCTION : dynamicUpdateMetaStreamInfo
14247 *
14248 * DESCRIPTION: This function:
14249 * (1) stops all the channels
14250 * (2) returns error on pending requests and buffers
14251 * (3) sends metastream_info in setparams
14252 * (4) starts all channels
14253 * This is useful when sensor has to be restarted to apply any
14254 * settings such as frame rate from a different sensor mode
14255 *
14256 * PARAMETERS : None
14257 *
14258 * RETURN : NO_ERROR on success
14259 * Error codes on failure
14260 *
14261 *==========================================================================*/
14262int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14263{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014264 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014265 int rc = NO_ERROR;
14266
14267 LOGD("E");
14268
14269 rc = stopAllChannels();
14270 if (rc < 0) {
14271 LOGE("stopAllChannels failed");
14272 return rc;
14273 }
14274
14275 rc = notifyErrorForPendingRequests();
14276 if (rc < 0) {
14277 LOGE("notifyErrorForPendingRequests failed");
14278 return rc;
14279 }
14280
14281 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14282 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14283 "Format:%d",
14284 mStreamConfigInfo.type[i],
14285 mStreamConfigInfo.stream_sizes[i].width,
14286 mStreamConfigInfo.stream_sizes[i].height,
14287 mStreamConfigInfo.postprocess_mask[i],
14288 mStreamConfigInfo.format[i]);
14289 }
14290
14291 /* Send meta stream info once again so that ISP can start */
14292 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14293 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14294 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14295 mParameters);
14296 if (rc < 0) {
14297 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14298 }
14299
14300 rc = startAllChannels();
14301 if (rc < 0) {
14302 LOGE("startAllChannels failed");
14303 return rc;
14304 }
14305
14306 LOGD("X");
14307 return rc;
14308}
14309
14310/*===========================================================================
14311 * FUNCTION : stopAllChannels
14312 *
14313 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14314 *
14315 * PARAMETERS : None
14316 *
14317 * RETURN : NO_ERROR on success
14318 * Error codes on failure
14319 *
14320 *==========================================================================*/
14321int32_t QCamera3HardwareInterface::stopAllChannels()
14322{
14323 int32_t rc = NO_ERROR;
14324
14325 LOGD("Stopping all channels");
14326 // Stop the Streams/Channels
14327 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14328 it != mStreamInfo.end(); it++) {
14329 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14330 if (channel) {
14331 channel->stop();
14332 }
14333 (*it)->status = INVALID;
14334 }
14335
14336 if (mSupportChannel) {
14337 mSupportChannel->stop();
14338 }
14339 if (mAnalysisChannel) {
14340 mAnalysisChannel->stop();
14341 }
14342 if (mRawDumpChannel) {
14343 mRawDumpChannel->stop();
14344 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014345 if (mHdrPlusRawSrcChannel) {
14346 mHdrPlusRawSrcChannel->stop();
14347 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014348 if (mMetadataChannel) {
14349 /* If content of mStreamInfo is not 0, there is metadata stream */
14350 mMetadataChannel->stop();
14351 }
14352
14353 LOGD("All channels stopped");
14354 return rc;
14355}
14356
14357/*===========================================================================
14358 * FUNCTION : startAllChannels
14359 *
14360 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14361 *
14362 * PARAMETERS : None
14363 *
14364 * RETURN : NO_ERROR on success
14365 * Error codes on failure
14366 *
14367 *==========================================================================*/
14368int32_t QCamera3HardwareInterface::startAllChannels()
14369{
14370 int32_t rc = NO_ERROR;
14371
14372 LOGD("Start all channels ");
14373 // Start the Streams/Channels
14374 if (mMetadataChannel) {
14375 /* If content of mStreamInfo is not 0, there is metadata stream */
14376 rc = mMetadataChannel->start();
14377 if (rc < 0) {
14378 LOGE("META channel start failed");
14379 return rc;
14380 }
14381 }
14382 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14383 it != mStreamInfo.end(); it++) {
14384 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14385 if (channel) {
14386 rc = channel->start();
14387 if (rc < 0) {
14388 LOGE("channel start failed");
14389 return rc;
14390 }
14391 }
14392 }
14393 if (mAnalysisChannel) {
14394 mAnalysisChannel->start();
14395 }
14396 if (mSupportChannel) {
14397 rc = mSupportChannel->start();
14398 if (rc < 0) {
14399 LOGE("Support channel start failed");
14400 return rc;
14401 }
14402 }
14403 if (mRawDumpChannel) {
14404 rc = mRawDumpChannel->start();
14405 if (rc < 0) {
14406 LOGE("RAW dump channel start failed");
14407 return rc;
14408 }
14409 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014410 if (mHdrPlusRawSrcChannel) {
14411 rc = mHdrPlusRawSrcChannel->start();
14412 if (rc < 0) {
14413 LOGE("HDR+ RAW channel start failed");
14414 return rc;
14415 }
14416 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014417
14418 LOGD("All channels started");
14419 return rc;
14420}
14421
14422/*===========================================================================
14423 * FUNCTION : notifyErrorForPendingRequests
14424 *
14425 * DESCRIPTION: This function sends error for all the pending requests/buffers
14426 *
14427 * PARAMETERS : None
14428 *
14429 * RETURN : Error codes
14430 * NO_ERROR on success
14431 *
14432 *==========================================================================*/
14433int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14434{
Emilian Peev7650c122017-01-19 08:24:33 -080014435 notifyErrorFoPendingDepthData(mDepthChannel);
14436
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014437 auto pendingRequest = mPendingRequestsList.begin();
14438 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014439
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014440 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14441 // buffers (for which buffers aren't sent yet).
14442 while (pendingRequest != mPendingRequestsList.end() ||
14443 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14444 if (pendingRequest == mPendingRequestsList.end() ||
14445 pendingBuffer->frame_number < pendingRequest->frame_number) {
14446 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14447 // with error.
14448 for (auto &info : pendingBuffer->mPendingBufferList) {
14449 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014450 camera3_notify_msg_t notify_msg;
14451 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14452 notify_msg.type = CAMERA3_MSG_ERROR;
14453 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014454 notify_msg.message.error.error_stream = info.stream;
14455 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014456 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014457
14458 camera3_stream_buffer_t buffer = {};
14459 buffer.acquire_fence = -1;
14460 buffer.release_fence = -1;
14461 buffer.buffer = info.buffer;
14462 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14463 buffer.stream = info.stream;
14464 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014465 }
14466
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014467 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14468 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14469 pendingBuffer->frame_number > pendingRequest->frame_number) {
14470 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014471 camera3_notify_msg_t notify_msg;
14472 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14473 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014474 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14475 notify_msg.message.error.error_stream = nullptr;
14476 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014477 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014478
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014479 if (pendingRequest->input_buffer != nullptr) {
14480 camera3_capture_result result = {};
14481 result.frame_number = pendingRequest->frame_number;
14482 result.result = nullptr;
14483 result.input_buffer = pendingRequest->input_buffer;
14484 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014485 }
14486
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014487 mShutterDispatcher.clear(pendingRequest->frame_number);
14488 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14489 } else {
14490 // If both buffers and result metadata weren't sent yet, notify about a request error
14491 // and return buffers with error.
14492 for (auto &info : pendingBuffer->mPendingBufferList) {
14493 camera3_notify_msg_t notify_msg;
14494 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14495 notify_msg.type = CAMERA3_MSG_ERROR;
14496 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14497 notify_msg.message.error.error_stream = info.stream;
14498 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14499 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014500
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014501 camera3_stream_buffer_t buffer = {};
14502 buffer.acquire_fence = -1;
14503 buffer.release_fence = -1;
14504 buffer.buffer = info.buffer;
14505 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14506 buffer.stream = info.stream;
14507 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14508 }
14509
14510 if (pendingRequest->input_buffer != nullptr) {
14511 camera3_capture_result result = {};
14512 result.frame_number = pendingRequest->frame_number;
14513 result.result = nullptr;
14514 result.input_buffer = pendingRequest->input_buffer;
14515 orchestrateResult(&result);
14516 }
14517
14518 mShutterDispatcher.clear(pendingRequest->frame_number);
14519 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14520 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014521 }
14522 }
14523
14524 /* Reset pending frame Drop list and requests list */
14525 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014526 mShutterDispatcher.clear();
14527 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014528 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014529 mExpectedFrameDuration = 0;
14530 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014531 LOGH("Cleared all the pending buffers ");
14532
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014533 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014534}
14535
14536bool QCamera3HardwareInterface::isOnEncoder(
14537 const cam_dimension_t max_viewfinder_size,
14538 uint32_t width, uint32_t height)
14539{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014540 return ((width > (uint32_t)max_viewfinder_size.width) ||
14541 (height > (uint32_t)max_viewfinder_size.height) ||
14542 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14543 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014544}
14545
14546/*===========================================================================
14547 * FUNCTION : setBundleInfo
14548 *
14549 * DESCRIPTION: Set bundle info for all streams that are bundle.
14550 *
14551 * PARAMETERS : None
14552 *
14553 * RETURN : NO_ERROR on success
14554 * Error codes on failure
14555 *==========================================================================*/
14556int32_t QCamera3HardwareInterface::setBundleInfo()
14557{
14558 int32_t rc = NO_ERROR;
14559
14560 if (mChannelHandle) {
14561 cam_bundle_config_t bundleInfo;
14562 memset(&bundleInfo, 0, sizeof(bundleInfo));
14563 rc = mCameraHandle->ops->get_bundle_info(
14564 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14565 if (rc != NO_ERROR) {
14566 LOGE("get_bundle_info failed");
14567 return rc;
14568 }
14569 if (mAnalysisChannel) {
14570 mAnalysisChannel->setBundleInfo(bundleInfo);
14571 }
14572 if (mSupportChannel) {
14573 mSupportChannel->setBundleInfo(bundleInfo);
14574 }
14575 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14576 it != mStreamInfo.end(); it++) {
14577 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14578 channel->setBundleInfo(bundleInfo);
14579 }
14580 if (mRawDumpChannel) {
14581 mRawDumpChannel->setBundleInfo(bundleInfo);
14582 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014583 if (mHdrPlusRawSrcChannel) {
14584 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14585 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014586 }
14587
14588 return rc;
14589}
14590
14591/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014592 * FUNCTION : setInstantAEC
14593 *
14594 * DESCRIPTION: Set Instant AEC related params.
14595 *
14596 * PARAMETERS :
14597 * @meta: CameraMetadata reference
14598 *
14599 * RETURN : NO_ERROR on success
14600 * Error codes on failure
14601 *==========================================================================*/
14602int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14603{
14604 int32_t rc = NO_ERROR;
14605 uint8_t val = 0;
14606 char prop[PROPERTY_VALUE_MAX];
14607
14608 // First try to configure instant AEC from framework metadata
14609 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14610 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14611 }
14612
14613 // If framework did not set this value, try to read from set prop.
14614 if (val == 0) {
14615 memset(prop, 0, sizeof(prop));
14616 property_get("persist.camera.instant.aec", prop, "0");
14617 val = (uint8_t)atoi(prop);
14618 }
14619
14620 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14621 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14622 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14623 mInstantAEC = val;
14624 mInstantAECSettledFrameNumber = 0;
14625 mInstantAecFrameIdxCount = 0;
14626 LOGH("instantAEC value set %d",val);
14627 if (mInstantAEC) {
14628 memset(prop, 0, sizeof(prop));
14629 property_get("persist.camera.ae.instant.bound", prop, "10");
14630 int32_t aec_frame_skip_cnt = atoi(prop);
14631 if (aec_frame_skip_cnt >= 0) {
14632 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14633 } else {
14634 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14635 rc = BAD_VALUE;
14636 }
14637 }
14638 } else {
14639 LOGE("Bad instant aec value set %d", val);
14640 rc = BAD_VALUE;
14641 }
14642 return rc;
14643}
14644
14645/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014646 * FUNCTION : get_num_overall_buffers
14647 *
14648 * DESCRIPTION: Estimate number of pending buffers across all requests.
14649 *
14650 * PARAMETERS : None
14651 *
14652 * RETURN : Number of overall pending buffers
14653 *
14654 *==========================================================================*/
14655uint32_t PendingBuffersMap::get_num_overall_buffers()
14656{
14657 uint32_t sum_buffers = 0;
14658 for (auto &req : mPendingBuffersInRequest) {
14659 sum_buffers += req.mPendingBufferList.size();
14660 }
14661 return sum_buffers;
14662}
14663
14664/*===========================================================================
14665 * FUNCTION : removeBuf
14666 *
14667 * DESCRIPTION: Remove a matching buffer from tracker.
14668 *
14669 * PARAMETERS : @buffer: image buffer for the callback
14670 *
14671 * RETURN : None
14672 *
14673 *==========================================================================*/
14674void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14675{
14676 bool buffer_found = false;
14677 for (auto req = mPendingBuffersInRequest.begin();
14678 req != mPendingBuffersInRequest.end(); req++) {
14679 for (auto k = req->mPendingBufferList.begin();
14680 k != req->mPendingBufferList.end(); k++ ) {
14681 if (k->buffer == buffer) {
14682 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14683 req->frame_number, buffer);
14684 k = req->mPendingBufferList.erase(k);
14685 if (req->mPendingBufferList.empty()) {
14686 // Remove this request from Map
14687 req = mPendingBuffersInRequest.erase(req);
14688 }
14689 buffer_found = true;
14690 break;
14691 }
14692 }
14693 if (buffer_found) {
14694 break;
14695 }
14696 }
14697 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14698 get_num_overall_buffers());
14699}
14700
14701/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014702 * FUNCTION : getBufErrStatus
14703 *
14704 * DESCRIPTION: get buffer error status
14705 *
14706 * PARAMETERS : @buffer: buffer handle
14707 *
14708 * RETURN : Error status
14709 *
14710 *==========================================================================*/
14711int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14712{
14713 for (auto& req : mPendingBuffersInRequest) {
14714 for (auto& k : req.mPendingBufferList) {
14715 if (k.buffer == buffer)
14716 return k.bufStatus;
14717 }
14718 }
14719 return CAMERA3_BUFFER_STATUS_OK;
14720}
14721
14722/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014723 * FUNCTION : setPAAFSupport
14724 *
14725 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14726 * feature mask according to stream type and filter
14727 * arrangement
14728 *
14729 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14730 * @stream_type: stream type
14731 * @filter_arrangement: filter arrangement
14732 *
14733 * RETURN : None
14734 *==========================================================================*/
14735void QCamera3HardwareInterface::setPAAFSupport(
14736 cam_feature_mask_t& feature_mask,
14737 cam_stream_type_t stream_type,
14738 cam_color_filter_arrangement_t filter_arrangement)
14739{
Thierry Strudel3d639192016-09-09 11:52:26 -070014740 switch (filter_arrangement) {
14741 case CAM_FILTER_ARRANGEMENT_RGGB:
14742 case CAM_FILTER_ARRANGEMENT_GRBG:
14743 case CAM_FILTER_ARRANGEMENT_GBRG:
14744 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014745 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14746 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014747 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014748 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14749 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014750 }
14751 break;
14752 case CAM_FILTER_ARRANGEMENT_Y:
14753 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14754 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14755 }
14756 break;
14757 default:
14758 break;
14759 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014760 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14761 feature_mask, stream_type, filter_arrangement);
14762
14763
Thierry Strudel3d639192016-09-09 11:52:26 -070014764}
14765
14766/*===========================================================================
14767* FUNCTION : getSensorMountAngle
14768*
14769* DESCRIPTION: Retrieve sensor mount angle
14770*
14771* PARAMETERS : None
14772*
14773* RETURN : sensor mount angle in uint32_t
14774*==========================================================================*/
14775uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14776{
14777 return gCamCapability[mCameraId]->sensor_mount_angle;
14778}
14779
14780/*===========================================================================
14781* FUNCTION : getRelatedCalibrationData
14782*
14783* DESCRIPTION: Retrieve related system calibration data
14784*
14785* PARAMETERS : None
14786*
14787* RETURN : Pointer of related system calibration data
14788*==========================================================================*/
14789const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14790{
14791 return (const cam_related_system_calibration_data_t *)
14792 &(gCamCapability[mCameraId]->related_cam_calibration);
14793}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014794
14795/*===========================================================================
14796 * FUNCTION : is60HzZone
14797 *
14798 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14799 *
14800 * PARAMETERS : None
14801 *
14802 * RETURN : True if in 60Hz zone, False otherwise
14803 *==========================================================================*/
14804bool QCamera3HardwareInterface::is60HzZone()
14805{
14806 time_t t = time(NULL);
14807 struct tm lt;
14808
14809 struct tm* r = localtime_r(&t, &lt);
14810
14811 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14812 return true;
14813 else
14814 return false;
14815}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014816
14817/*===========================================================================
14818 * FUNCTION : adjustBlackLevelForCFA
14819 *
14820 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14821 * of bayer CFA (Color Filter Array).
14822 *
14823 * PARAMETERS : @input: black level pattern in the order of RGGB
14824 * @output: black level pattern in the order of CFA
14825 * @color_arrangement: CFA color arrangement
14826 *
14827 * RETURN : None
14828 *==========================================================================*/
14829template<typename T>
14830void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14831 T input[BLACK_LEVEL_PATTERN_CNT],
14832 T output[BLACK_LEVEL_PATTERN_CNT],
14833 cam_color_filter_arrangement_t color_arrangement)
14834{
14835 switch (color_arrangement) {
14836 case CAM_FILTER_ARRANGEMENT_GRBG:
14837 output[0] = input[1];
14838 output[1] = input[0];
14839 output[2] = input[3];
14840 output[3] = input[2];
14841 break;
14842 case CAM_FILTER_ARRANGEMENT_GBRG:
14843 output[0] = input[2];
14844 output[1] = input[3];
14845 output[2] = input[0];
14846 output[3] = input[1];
14847 break;
14848 case CAM_FILTER_ARRANGEMENT_BGGR:
14849 output[0] = input[3];
14850 output[1] = input[2];
14851 output[2] = input[1];
14852 output[3] = input[0];
14853 break;
14854 case CAM_FILTER_ARRANGEMENT_RGGB:
14855 output[0] = input[0];
14856 output[1] = input[1];
14857 output[2] = input[2];
14858 output[3] = input[3];
14859 break;
14860 default:
14861 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14862 break;
14863 }
14864}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014865
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014866void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14867 CameraMetadata &resultMetadata,
14868 std::shared_ptr<metadata_buffer_t> settings)
14869{
14870 if (settings == nullptr) {
14871 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14872 return;
14873 }
14874
14875 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14876 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14877 }
14878
14879 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14880 String8 str((const char *)gps_methods);
14881 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14882 }
14883
14884 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14885 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14886 }
14887
14888 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14889 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14890 }
14891
14892 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14893 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14894 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14895 }
14896
14897 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14898 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14899 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14900 }
14901
14902 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14903 int32_t fwk_thumb_size[2];
14904 fwk_thumb_size[0] = thumb_size->width;
14905 fwk_thumb_size[1] = thumb_size->height;
14906 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14907 }
14908
14909 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14910 uint8_t fwk_intent = intent[0];
14911 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14912 }
14913}
14914
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014915bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14916 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chenec328c82017-08-30 16:41:08 -070014917 if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
14918 metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
14919 ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
14920 return false;
14921 }
14922
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014923 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14924 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14925 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014926 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014927 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014928 return false;
14929 }
14930
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014931 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014932 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14933 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014934 return false;
14935 }
14936
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014937 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14938 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14939 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14940 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14941 return false;
14942 }
14943
14944 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14945 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14946 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14947 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14948 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
14949 return false;
14950 }
14951
14952 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
14953 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
14954 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
14955 return false;
14956 }
14957
14958 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
14959 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
14960 ANDROID_CONTROL_EFFECT_MODE_OFF) {
14961 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
14962 return false;
14963 }
14964
14965 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
14966 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
14967 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
14968 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
14969 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
14970 return false;
14971 }
14972
14973 // TODO (b/32585046): support non-ZSL.
14974 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
14975 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
14976 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
14977 return false;
14978 }
14979
14980 // TODO (b/32586081): support flash.
14981 if (!metadata.exists(ANDROID_FLASH_MODE) ||
14982 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
14983 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
14984 return false;
14985 }
14986
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014987 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
14988 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
14989 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
14990 return false;
14991 }
14992
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014993
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014994 // TODO (b/36693254, b/36690506): support other outputs.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014995 if (!gEnableMultipleHdrplusOutputs && request.num_output_buffers != 1) {
14996 ALOGV("%s: Only support 1 output: %d", __FUNCTION__, request.num_output_buffers);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014997 return false;
14998 }
14999
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015000 switch (request.output_buffers[0].stream->format) {
15001 case HAL_PIXEL_FORMAT_BLOB:
15002 break;
15003 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15004 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15005 // TODO (b/36693254): Only support full size.
15006 if (!gEnableMultipleHdrplusOutputs) {
15007 if (static_cast<int>(request.output_buffers[0].stream->width) !=
15008 gCamCapability[mCameraId]->picture_sizes_tbl[0].width ||
15009 static_cast<int>(request.output_buffers[0].stream->height) !=
15010 gCamCapability[mCameraId]->picture_sizes_tbl[0].height) {
15011 ALOGV("%s: Only full size is supported.", __FUNCTION__);
15012 return false;
15013 }
15014 }
15015 break;
15016 default:
15017 ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
15018 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
15019 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
15020 request.output_buffers[0].stream->width,
15021 request.output_buffers[0].stream->height,
15022 request.output_buffers[0].stream->format);
15023 }
15024 return false;
15025 }
15026
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070015027 return true;
15028}
15029
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015030void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
15031 if (hdrPlusRequest == nullptr) return;
15032
15033 for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
15034 // Find the stream for this buffer.
15035 for (auto streamInfo : mStreamInfo) {
15036 if (streamInfo->id == outputBufferIter.first) {
15037 if (streamInfo->channel == mPictureChannel) {
15038 // For picture channel, this buffer is internally allocated so return this
15039 // buffer to picture channel.
15040 mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
15041 } else {
15042 // Unregister this buffer for other channels.
15043 streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
15044 }
15045 break;
15046 }
15047 }
15048 }
15049
15050 hdrPlusRequest->outputBuffers.clear();
15051 hdrPlusRequest->frameworkOutputBuffers.clear();
15052}
15053
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070015054bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
15055 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
15056 const CameraMetadata &metadata)
15057{
15058 if (hdrPlusRequest == nullptr) return false;
15059 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
15060
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015061 status_t res = OK;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015062 pbcamera::CaptureRequest pbRequest;
15063 pbRequest.id = request.frame_number;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015064 // Iterate through all requested output buffers and add them to an HDR+ request.
15065 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
15066 // Find the index of the stream in mStreamInfo.
15067 uint32_t pbStreamId = 0;
15068 bool found = false;
15069 for (auto streamInfo : mStreamInfo) {
15070 if (streamInfo->stream == request.output_buffers[i].stream) {
15071 pbStreamId = streamInfo->id;
15072 found = true;
15073 break;
15074 }
15075 }
15076
15077 if (!found) {
15078 ALOGE("%s: requested stream was not configured.", __FUNCTION__);
15079 abortPendingHdrplusRequest(hdrPlusRequest);
15080 return false;
15081 }
15082 auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
15083 switch (request.output_buffers[i].stream->format) {
15084 case HAL_PIXEL_FORMAT_BLOB:
15085 {
15086 // For jpeg output, get a YUV buffer from pic channel.
15087 QCamera3PicChannel *picChannel =
15088 (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
15089 res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
15090 if (res != OK) {
15091 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
15092 __FUNCTION__, strerror(-res), res);
15093 abortPendingHdrplusRequest(hdrPlusRequest);
15094 return false;
15095 }
15096 break;
15097 }
15098 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15099 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15100 {
15101 // For YUV output, register the buffer and get the buffer def from the channel.
15102 QCamera3ProcessingChannel *channel =
15103 (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
15104 res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
15105 outBuffer.get());
15106 if (res != OK) {
15107 ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
15108 strerror(-res), res);
15109 abortPendingHdrplusRequest(hdrPlusRequest);
15110 return false;
15111 }
15112 break;
15113 }
15114 default:
15115 abortPendingHdrplusRequest(hdrPlusRequest);
15116 return false;
15117 }
15118
15119 pbcamera::StreamBuffer buffer;
15120 buffer.streamId = pbStreamId;
15121 buffer.dmaBufFd = outBuffer->fd;
15122 buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
15123 buffer.dataSize = outBuffer->frame_len;
15124
15125 pbRequest.outputBuffers.push_back(buffer);
15126
15127 hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
15128 hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
15129 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015130
15131 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070015132 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015133 if (res != OK) {
15134 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
15135 strerror(-res), res);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015136 abortPendingHdrplusRequest(hdrPlusRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015137 return false;
15138 }
15139
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015140 return true;
15141}
15142
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015143status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
15144{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015145 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
15146 return OK;
15147 }
15148
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015149 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015150 if (res != OK) {
15151 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
15152 strerror(-res), res);
15153 return res;
15154 }
15155 gHdrPlusClientOpening = true;
15156
15157 return OK;
15158}
15159
Chien-Yu Chenee335912017-02-09 17:53:20 -080015160status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
15161{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015162 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015163
Chien-Yu Chena6c99062017-05-23 13:45:06 -070015164 if (mHdrPlusModeEnabled) {
15165 return OK;
15166 }
15167
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015168 // Check if gHdrPlusClient is opened or being opened.
15169 if (gHdrPlusClient == nullptr) {
15170 if (gHdrPlusClientOpening) {
15171 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
15172 return OK;
15173 }
15174
15175 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015176 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015177 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
15178 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015179 return res;
15180 }
15181
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015182 // When opening HDR+ client completes, HDR+ mode will be enabled.
15183 return OK;
15184
Chien-Yu Chenee335912017-02-09 17:53:20 -080015185 }
15186
15187 // Configure stream for HDR+.
15188 res = configureHdrPlusStreamsLocked();
15189 if (res != OK) {
15190 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015191 return res;
15192 }
15193
15194 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
15195 res = gHdrPlusClient->setZslHdrPlusMode(true);
15196 if (res != OK) {
15197 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080015198 return res;
15199 }
15200
15201 mHdrPlusModeEnabled = true;
15202 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
15203
15204 return OK;
15205}
15206
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015207void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
15208{
15209 if (gHdrPlusClientOpening) {
15210 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
15211 }
15212}
15213
Chien-Yu Chenee335912017-02-09 17:53:20 -080015214void QCamera3HardwareInterface::disableHdrPlusModeLocked()
15215{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015216 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015217 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015218 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
15219 if (res != OK) {
15220 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
15221 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015222
15223 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015224 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015225 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015226 }
15227
15228 mHdrPlusModeEnabled = false;
15229 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
15230}
15231
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015232bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
15233{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015234 // Check that at least one YUV or one JPEG output is configured.
15235 // TODO: Support RAW (b/36690506)
15236 for (auto streamInfo : mStreamInfo) {
15237 if (streamInfo != nullptr && streamInfo->stream != nullptr) {
15238 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
15239 (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
15240 streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15241 streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
15242 return true;
15243 }
15244 }
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015245 }
15246
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015247 return false;
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015248}
15249
Chien-Yu Chenee335912017-02-09 17:53:20 -080015250status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015251{
15252 pbcamera::InputConfiguration inputConfig;
15253 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
15254 status_t res = OK;
15255
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015256 // Sensor MIPI will send data to Easel.
15257 inputConfig.isSensorInput = true;
15258 inputConfig.sensorMode.cameraId = mCameraId;
15259 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
15260 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
15261 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
15262 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
15263 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
15264 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
15265 if (mSensorModeInfo.num_raw_bits != 10) {
15266 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
15267 mSensorModeInfo.num_raw_bits);
15268 return BAD_VALUE;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015269 }
15270
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015271 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015272
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015273 // Iterate through configured output streams in HAL and configure those streams in HDR+
15274 // service.
15275 for (auto streamInfo : mStreamInfo) {
15276 pbcamera::StreamConfiguration outputConfig;
15277 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
15278 switch (streamInfo->stream->format) {
15279 case HAL_PIXEL_FORMAT_BLOB:
15280 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15281 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15282 res = fillPbStreamConfig(&outputConfig, streamInfo->id,
15283 streamInfo->channel, /*stream index*/0);
15284 if (res != OK) {
15285 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
15286 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015287
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015288 return res;
15289 }
15290
15291 outputStreamConfigs.push_back(outputConfig);
15292 break;
15293 default:
15294 // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
15295 break;
15296 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015297 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015298 }
15299
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015300 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015301 if (res != OK) {
15302 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
15303 strerror(-res), res);
15304 return res;
15305 }
15306
15307 return OK;
15308}
15309
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015310void QCamera3HardwareInterface::handleEaselFatalError()
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015311{
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015312 pthread_mutex_lock(&mMutex);
15313 mState = ERROR;
15314 pthread_mutex_unlock(&mMutex);
15315
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015316 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015317}
15318
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015319void QCamera3HardwareInterface::handleEaselFatalErrorAsync()
15320{
15321 if (mEaselErrorFuture.valid()) {
15322 // The error future has been invoked.
15323 return;
15324 }
15325
15326 // Launch a future to handle the fatal error.
15327 mEaselErrorFuture = std::async(std::launch::async,
15328 &QCamera3HardwareInterface::handleEaselFatalError, this);
15329}
15330
15331void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15332{
15333 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15334 handleEaselFatalErrorAsync();
15335}
15336
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015337void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15338{
Arnd Geis8cbfc182017-09-07 14:46:41 -070015339 int rc = NO_ERROR;
15340
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015341 if (client == nullptr) {
15342 ALOGE("%s: Opened client is null.", __FUNCTION__);
15343 return;
15344 }
15345
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015346 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015347 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15348
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015349 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015350 if (!gHdrPlusClientOpening) {
15351 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15352 return;
15353 }
15354
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015355 gHdrPlusClient = std::move(client);
15356 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015357 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015358
15359 // Set static metadata.
15360 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15361 if (res != OK) {
15362 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15363 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015364 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015365 gHdrPlusClient = nullptr;
15366 return;
15367 }
15368
15369 // Enable HDR+ mode.
15370 res = enableHdrPlusModeLocked();
15371 if (res != OK) {
15372 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15373 }
Arnd Geis8cbfc182017-09-07 14:46:41 -070015374
15375 // Get Easel firmware version
15376 if (EaselManagerClientOpened) {
15377 rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
15378 if (rc != OK) {
15379 ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
15380 } else {
15381 mEaselFwUpdated = true;
15382 }
15383 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015384}
15385
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015386void QCamera3HardwareInterface::onOpenFailed(status_t err)
15387{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015388 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015389 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015390 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015391 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015392}
15393
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015394void QCamera3HardwareInterface::onFatalError()
15395{
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015396 ALOGE("%s: HDR+ client encountered a fatal error.", __FUNCTION__);
15397 handleEaselFatalErrorAsync();
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015398}
15399
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015400void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15401{
15402 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15403 __LINE__, requestId, apSensorTimestampNs);
15404
15405 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15406}
15407
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015408void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15409{
15410 pthread_mutex_lock(&mMutex);
15411
15412 // Find the pending request for this result metadata.
15413 auto requestIter = mPendingRequestsList.begin();
15414 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15415 requestIter++;
15416 }
15417
15418 if (requestIter == mPendingRequestsList.end()) {
15419 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15420 pthread_mutex_unlock(&mMutex);
15421 return;
15422 }
15423
15424 requestIter->partial_result_cnt++;
15425
15426 CameraMetadata metadata;
15427 uint8_t ready = true;
15428 metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15429
15430 // Send it to framework.
15431 camera3_capture_result_t result = {};
15432
15433 result.result = metadata.getAndLock();
15434 // Populate metadata result
15435 result.frame_number = requestId;
15436 result.num_output_buffers = 0;
15437 result.output_buffers = NULL;
15438 result.partial_result = requestIter->partial_result_cnt;
15439
15440 orchestrateResult(&result);
15441 metadata.unlock(result.result);
15442
15443 pthread_mutex_unlock(&mMutex);
15444}
15445
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070015446void QCamera3HardwareInterface::onPostview(uint32_t requestId,
15447 std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
15448 uint32_t stride, int32_t format)
15449{
15450 if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
15451 ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
15452 __LINE__, width, height, requestId);
15453 char buf[FILENAME_MAX] = {};
15454 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
15455 requestId, width, height);
15456
15457 pbcamera::StreamConfiguration config = {};
15458 config.image.width = width;
15459 config.image.height = height;
15460 config.image.format = format;
15461
15462 pbcamera::PlaneConfiguration plane = {};
15463 plane.stride = stride;
15464 plane.scanline = height;
15465
15466 config.image.planes.push_back(plane);
15467
15468 pbcamera::StreamBuffer buffer = {};
15469 buffer.streamId = 0;
15470 buffer.dmaBufFd = -1;
15471 buffer.data = postview->data();
15472 buffer.dataSize = postview->size();
15473
15474 hdrplus_client_utils::writePpm(buf, config, buffer);
15475 }
15476
15477 pthread_mutex_lock(&mMutex);
15478
15479 // Find the pending request for this result metadata.
15480 auto requestIter = mPendingRequestsList.begin();
15481 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15482 requestIter++;
15483 }
15484
15485 if (requestIter == mPendingRequestsList.end()) {
15486 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15487 pthread_mutex_unlock(&mMutex);
15488 return;
15489 }
15490
15491 requestIter->partial_result_cnt++;
15492
15493 CameraMetadata metadata;
15494 int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
15495 static_cast<int32_t>(stride)};
15496 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
15497 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
15498
15499 // Send it to framework.
15500 camera3_capture_result_t result = {};
15501
15502 result.result = metadata.getAndLock();
15503 // Populate metadata result
15504 result.frame_number = requestId;
15505 result.num_output_buffers = 0;
15506 result.output_buffers = NULL;
15507 result.partial_result = requestIter->partial_result_cnt;
15508
15509 orchestrateResult(&result);
15510 metadata.unlock(result.result);
15511
15512 pthread_mutex_unlock(&mMutex);
15513}
15514
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015515void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015516 const camera_metadata_t &resultMetadata)
15517{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015518 if (result == nullptr) {
15519 ALOGE("%s: result is nullptr.", __FUNCTION__);
15520 return;
15521 }
15522
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015523 // Find the pending HDR+ request.
15524 HdrPlusPendingRequest pendingRequest;
15525 {
15526 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15527 auto req = mHdrPlusPendingRequests.find(result->requestId);
15528 pendingRequest = req->second;
15529 }
15530
15531 // Update the result metadata with the settings of the HDR+ still capture request because
15532 // the result metadata belongs to a ZSL buffer.
15533 CameraMetadata metadata;
15534 metadata = &resultMetadata;
15535 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15536 camera_metadata_t* updatedResultMetadata = metadata.release();
15537
15538 uint32_t halSnapshotStreamId = 0;
15539 if (mPictureChannel != nullptr) {
15540 halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
15541 }
15542
15543 auto halMetadata = std::make_shared<metadata_buffer_t>();
15544 clear_metadata_buffer(halMetadata.get());
15545
15546 // Convert updated result metadata to HAL metadata.
15547 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15548 halSnapshotStreamId, /*minFrameDuration*/0);
15549 if (res != 0) {
15550 ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
15551 }
15552
15553 for (auto &outputBuffer : result->outputBuffers) {
15554 uint32_t streamId = outputBuffer.streamId;
15555
15556 // Find the framework output buffer in the pending request.
15557 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15558 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15559 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15560 streamId);
15561 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015562 }
15563
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015564 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15565
15566 // Find the channel for the output buffer.
15567 QCamera3ProcessingChannel *channel =
15568 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15569
15570 // Find the output buffer def.
15571 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15572 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15573 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15574 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015575 }
15576
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015577 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015578
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015579 // Check whether to dump the buffer.
15580 if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15581 frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
15582 // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
15583 char prop[PROPERTY_VALUE_MAX];
15584 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15585 bool dumpYuvOutput = atoi(prop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015586
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015587 if (dumpYuvOutput) {
15588 // Dump yuv buffer to a ppm file.
15589 pbcamera::StreamConfiguration outputConfig;
15590 status_t rc = fillPbStreamConfig(&outputConfig, streamId,
15591 channel, /*stream index*/0);
15592 if (rc == OK) {
15593 char buf[FILENAME_MAX] = {};
15594 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15595 result->requestId, streamId,
15596 outputConfig.image.width, outputConfig.image.height);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015597
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015598 hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
15599 } else {
15600 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
15601 "%s (%d).", __FUNCTION__, strerror(-rc), rc);
15602 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015603 }
15604 }
15605
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015606 if (channel == mPictureChannel) {
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015607 // Return the buffer to pic channel for encoding.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015608 mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
15609 frameworkOutputBuffer->buffer, result->requestId,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015610 halMetadata);
15611 } else {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015612 // Return the buffer to camera framework.
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015613 pthread_mutex_lock(&mMutex);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015614 handleBufferWithLock(frameworkOutputBuffer, result->requestId);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015615 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015616
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015617 channel->unregisterBuffer(outputBufferDef.get());
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015618 }
15619 }
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015620
15621 // Send HDR+ metadata to framework.
15622 {
15623 pthread_mutex_lock(&mMutex);
15624
15625 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15626 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
15627 pthread_mutex_unlock(&mMutex);
15628 }
15629
15630 // Remove the HDR+ pending request.
15631 {
15632 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15633 auto req = mHdrPlusPendingRequests.find(result->requestId);
15634 mHdrPlusPendingRequests.erase(req);
15635 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015636}
15637
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015638void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15639{
15640 if (failedResult == nullptr) {
15641 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15642 return;
15643 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015644
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015645 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015646
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015647 // Find the pending HDR+ request.
15648 HdrPlusPendingRequest pendingRequest;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015649 {
15650 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015651 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15652 if (req == mHdrPlusPendingRequests.end()) {
15653 ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
15654 return;
15655 }
15656 pendingRequest = req->second;
15657 }
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015658
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015659 for (auto &outputBuffer : failedResult->outputBuffers) {
15660 uint32_t streamId = outputBuffer.streamId;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015661
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015662 // Find the channel
15663 // Find the framework output buffer in the pending request.
15664 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15665 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15666 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15667 streamId);
15668 continue;
15669 }
15670
15671 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15672
15673 // Find the channel for the output buffer.
15674 QCamera3ProcessingChannel *channel =
15675 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15676
15677 // Find the output buffer def.
15678 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15679 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15680 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15681 continue;
15682 }
15683
15684 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
15685
15686 if (channel == mPictureChannel) {
15687 // Return the buffer to pic channel.
15688 mPictureChannel->returnYuvBuffer(outputBufferDef.get());
15689 } else {
15690 channel->unregisterBuffer(outputBufferDef.get());
15691 }
15692 }
15693
15694 // Remove the HDR+ pending request.
15695 {
15696 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15697 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15698 mHdrPlusPendingRequests.erase(req);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015699 }
15700
15701 pthread_mutex_lock(&mMutex);
15702
15703 // Find the pending buffers.
15704 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15705 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15706 if (pendingBuffers->frame_number == failedResult->requestId) {
15707 break;
15708 }
15709 pendingBuffers++;
15710 }
15711
15712 // Send out buffer errors for the pending buffers.
15713 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15714 std::vector<camera3_stream_buffer_t> streamBuffers;
15715 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15716 // Prepare a stream buffer.
15717 camera3_stream_buffer_t streamBuffer = {};
15718 streamBuffer.stream = buffer.stream;
15719 streamBuffer.buffer = buffer.buffer;
15720 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15721 streamBuffer.acquire_fence = -1;
15722 streamBuffer.release_fence = -1;
15723
15724 streamBuffers.push_back(streamBuffer);
15725
15726 // Send out error buffer event.
15727 camera3_notify_msg_t notify_msg = {};
15728 notify_msg.type = CAMERA3_MSG_ERROR;
15729 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15730 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15731 notify_msg.message.error.error_stream = buffer.stream;
15732
15733 orchestrateNotify(&notify_msg);
15734 }
15735
15736 camera3_capture_result_t result = {};
15737 result.frame_number = pendingBuffers->frame_number;
15738 result.num_output_buffers = streamBuffers.size();
15739 result.output_buffers = &streamBuffers[0];
15740
15741 // Send out result with buffer errors.
15742 orchestrateResult(&result);
15743
15744 // Remove pending buffers.
15745 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15746 }
15747
15748 // Remove pending request.
15749 auto halRequest = mPendingRequestsList.begin();
15750 while (halRequest != mPendingRequestsList.end()) {
15751 if (halRequest->frame_number == failedResult->requestId) {
15752 mPendingRequestsList.erase(halRequest);
15753 break;
15754 }
15755 halRequest++;
15756 }
15757
15758 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015759}
15760
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015761
15762ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15763 mParent(parent) {}
15764
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015765void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015766{
15767 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015768
15769 if (isReprocess) {
15770 mReprocessShutters.emplace(frameNumber, Shutter());
15771 } else {
15772 mShutters.emplace(frameNumber, Shutter());
15773 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015774}
15775
15776void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15777{
15778 std::lock_guard<std::mutex> lock(mLock);
15779
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015780 std::map<uint32_t, Shutter> *shutters = nullptr;
15781
15782 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015783 auto shutter = mShutters.find(frameNumber);
15784 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015785 shutter = mReprocessShutters.find(frameNumber);
15786 if (shutter == mReprocessShutters.end()) {
15787 // Shutter was already sent.
15788 return;
15789 }
15790 shutters = &mReprocessShutters;
15791 } else {
15792 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015793 }
15794
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015795 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015796 shutter->second.ready = true;
15797 shutter->second.timestamp = timestamp;
15798
15799 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015800 shutter = shutters->begin();
15801 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015802 if (!shutter->second.ready) {
15803 // If this shutter is not ready, the following shutters can't be sent.
15804 break;
15805 }
15806
15807 camera3_notify_msg_t msg = {};
15808 msg.type = CAMERA3_MSG_SHUTTER;
15809 msg.message.shutter.frame_number = shutter->first;
15810 msg.message.shutter.timestamp = shutter->second.timestamp;
15811 mParent->orchestrateNotify(&msg);
15812
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015813 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015814 }
15815}
15816
15817void ShutterDispatcher::clear(uint32_t frameNumber)
15818{
15819 std::lock_guard<std::mutex> lock(mLock);
15820 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015821 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015822}
15823
15824void ShutterDispatcher::clear()
15825{
15826 std::lock_guard<std::mutex> lock(mLock);
15827
15828 // Log errors for stale shutters.
15829 for (auto &shutter : mShutters) {
15830 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15831 __FUNCTION__, shutter.first, shutter.second.ready,
15832 shutter.second.timestamp);
15833 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015834
15835 // Log errors for stale reprocess shutters.
15836 for (auto &shutter : mReprocessShutters) {
15837 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15838 __FUNCTION__, shutter.first, shutter.second.ready,
15839 shutter.second.timestamp);
15840 }
15841
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015842 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015843 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015844}
15845
15846OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15847 mParent(parent) {}
15848
15849status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15850{
15851 std::lock_guard<std::mutex> lock(mLock);
15852 mStreamBuffers.clear();
15853 if (!streamList) {
15854 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15855 return -EINVAL;
15856 }
15857
15858 // Create a "frame-number -> buffer" map for each stream.
15859 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15860 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15861 }
15862
15863 return OK;
15864}
15865
15866status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15867{
15868 std::lock_guard<std::mutex> lock(mLock);
15869
15870 // Find the "frame-number -> buffer" map for the stream.
15871 auto buffers = mStreamBuffers.find(stream);
15872 if (buffers == mStreamBuffers.end()) {
15873 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15874 return -EINVAL;
15875 }
15876
15877 // Create an unready buffer for this frame number.
15878 buffers->second.emplace(frameNumber, Buffer());
15879 return OK;
15880}
15881
15882void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15883 const camera3_stream_buffer_t &buffer)
15884{
15885 std::lock_guard<std::mutex> lock(mLock);
15886
15887 // Find the frame number -> buffer map for the stream.
15888 auto buffers = mStreamBuffers.find(buffer.stream);
15889 if (buffers == mStreamBuffers.end()) {
15890 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15891 return;
15892 }
15893
15894 // Find the unready buffer this frame number and mark it ready.
15895 auto pendingBuffer = buffers->second.find(frameNumber);
15896 if (pendingBuffer == buffers->second.end()) {
15897 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15898 return;
15899 }
15900
15901 pendingBuffer->second.ready = true;
15902 pendingBuffer->second.buffer = buffer;
15903
15904 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15905 pendingBuffer = buffers->second.begin();
15906 while (pendingBuffer != buffers->second.end()) {
15907 if (!pendingBuffer->second.ready) {
15908 // If this buffer is not ready, the following buffers can't be sent.
15909 break;
15910 }
15911
15912 camera3_capture_result_t result = {};
15913 result.frame_number = pendingBuffer->first;
15914 result.num_output_buffers = 1;
15915 result.output_buffers = &pendingBuffer->second.buffer;
15916
15917 // Send out result with buffer errors.
15918 mParent->orchestrateResult(&result);
15919
15920 pendingBuffer = buffers->second.erase(pendingBuffer);
15921 }
15922}
15923
15924void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15925{
15926 std::lock_guard<std::mutex> lock(mLock);
15927
15928 // Log errors for stale buffers.
15929 for (auto &buffers : mStreamBuffers) {
15930 for (auto &buffer : buffers.second) {
15931 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15932 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15933 }
15934 buffers.second.clear();
15935 }
15936
15937 if (clearConfiguredStreams) {
15938 mStreamBuffers.clear();
15939 }
15940}
15941
Thierry Strudel3d639192016-09-09 11:52:26 -070015942}; //end namespace qcamera