blob: 044b60d7f453bfa6ee8dfc443e27abda72a269c9 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070075// mm_camera has 2 partial results: 3A, and final result.
76// HDR+ requests have 3 partial results: postview, next request ready, and final result.
77#define PARTIAL_RESULT_COUNT 3
Thierry Strudel3d639192016-09-09 11:52:26 -070078#define FRAME_SKIP_DELAY 0
79
80#define MAX_VALUE_8BIT ((1<<8)-1)
81#define MAX_VALUE_10BIT ((1<<10)-1)
82#define MAX_VALUE_12BIT ((1<<12)-1)
83
84#define VIDEO_4K_WIDTH 3840
85#define VIDEO_4K_HEIGHT 2160
86
Jason Leeb9e76432017-03-10 17:14:19 -080087#define MAX_EIS_WIDTH 3840
88#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070089
90#define MAX_RAW_STREAMS 1
91#define MAX_STALLING_STREAMS 1
92#define MAX_PROCESSED_STREAMS 3
93/* Batch mode is enabled only if FPS set is equal to or greater than this */
94#define MIN_FPS_FOR_BATCH_MODE (120)
95#define PREVIEW_FPS_FOR_HFR (30)
96#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080097#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070098#define MAX_HFR_BATCH_SIZE (8)
99#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +0100101#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700148std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -0700155bool gEnableMultipleHdrplusOutputs = false; // Whether to enable multiple output from Easel HDR+.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800157// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
158bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700159
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700160std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700161
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
164 {"On", CAM_CDS_MODE_ON},
165 {"Off", CAM_CDS_MODE_OFF},
166 {"Auto",CAM_CDS_MODE_AUTO}
167};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700168const QCamera3HardwareInterface::QCameraMap<
169 camera_metadata_enum_android_video_hdr_mode_t,
170 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
171 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
172 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
173};
174
Thierry Strudel54dc9782017-02-15 12:12:10 -0800175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_binning_correction_mode_t,
177 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
178 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
179 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
180};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700181
182const QCamera3HardwareInterface::QCameraMap<
183 camera_metadata_enum_android_ir_mode_t,
184 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
185 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
186 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
187 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
188};
Thierry Strudel3d639192016-09-09 11:52:26 -0700189
190const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_effect_mode_t,
192 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
193 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
194 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
195 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
196 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
197 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
198 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
199 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
202};
203
204const QCamera3HardwareInterface::QCameraMap<
205 camera_metadata_enum_android_control_awb_mode_t,
206 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
207 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
208 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
209 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
210 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
211 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
212 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
215 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
216};
217
218const QCamera3HardwareInterface::QCameraMap<
219 camera_metadata_enum_android_control_scene_mode_t,
220 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
221 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
222 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
223 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
227 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
228 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
229 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
230 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
231 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
232 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
233 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
234 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
235 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800236 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
237 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_af_mode_t,
242 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
245 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
246 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
247 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_color_correction_aberration_mode_t,
254 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
255 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
256 CAM_COLOR_CORRECTION_ABERRATION_OFF },
257 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
258 CAM_COLOR_CORRECTION_ABERRATION_FAST },
259 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
260 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_control_ae_antibanding_mode_t,
265 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
270};
271
272const QCamera3HardwareInterface::QCameraMap<
273 camera_metadata_enum_android_control_ae_mode_t,
274 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
275 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
278 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700279 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
280 { (camera_metadata_enum_android_control_ae_mode_t)
281 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_flash_mode_t,
286 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
287 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
288 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
289 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_statistics_face_detect_mode_t,
294 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
297 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
298};
299
300const QCamera3HardwareInterface::QCameraMap<
301 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
302 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
303 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
304 CAM_FOCUS_UNCALIBRATED },
305 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
306 CAM_FOCUS_APPROXIMATE },
307 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
308 CAM_FOCUS_CALIBRATED }
309};
310
311const QCamera3HardwareInterface::QCameraMap<
312 camera_metadata_enum_android_lens_state_t,
313 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
314 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
315 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
316};
317
318const int32_t available_thumbnail_sizes[] = {0, 0,
319 176, 144,
320 240, 144,
321 256, 144,
322 240, 160,
323 256, 154,
324 240, 240,
325 320, 240};
326
327const QCamera3HardwareInterface::QCameraMap<
328 camera_metadata_enum_android_sensor_test_pattern_mode_t,
329 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
335 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
336};
337
338/* Since there is no mapping for all the options some Android enum are not listed.
339 * Also, the order in this list is important because while mapping from HAL to Android it will
340 * traverse from lower to higher index which means that for HAL values that are map to different
341 * Android values, the traverse logic will select the first one found.
342 */
343const QCamera3HardwareInterface::QCameraMap<
344 camera_metadata_enum_android_sensor_reference_illuminant1_t,
345 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
361 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
362};
363
364const QCamera3HardwareInterface::QCameraMap<
365 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
366 { 60, CAM_HFR_MODE_60FPS},
367 { 90, CAM_HFR_MODE_90FPS},
368 { 120, CAM_HFR_MODE_120FPS},
369 { 150, CAM_HFR_MODE_150FPS},
370 { 180, CAM_HFR_MODE_180FPS},
371 { 210, CAM_HFR_MODE_210FPS},
372 { 240, CAM_HFR_MODE_240FPS},
373 { 480, CAM_HFR_MODE_480FPS},
374};
375
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_instant_aec_mode_t,
378 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
379 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
381 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
382};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800383
384const QCamera3HardwareInterface::QCameraMap<
385 qcamera3_ext_exposure_meter_mode_t,
386 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
387 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
388 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
389 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
390 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
391 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
392 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
393 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
394};
395
396const QCamera3HardwareInterface::QCameraMap<
397 qcamera3_ext_iso_mode_t,
398 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
399 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
400 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
401 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
402 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
403 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
404 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
405 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
406 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
407};
408
Thierry Strudel3d639192016-09-09 11:52:26 -0700409camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
410 .initialize = QCamera3HardwareInterface::initialize,
411 .configure_streams = QCamera3HardwareInterface::configure_streams,
412 .register_stream_buffers = NULL,
413 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
414 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
415 .get_metadata_vendor_tag_ops = NULL,
416 .dump = QCamera3HardwareInterface::dump,
417 .flush = QCamera3HardwareInterface::flush,
418 .reserved = {0},
419};
420
421// initialise to some default value
422uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
423
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700424static inline void logEaselEvent(const char *tag, const char *event) {
425 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
426 struct timespec ts = {};
427 static int64_t kMsPerSec = 1000;
428 static int64_t kNsPerMs = 1000000;
429 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
430 if (res != OK) {
431 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
432 } else {
433 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
434 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
435 }
436 }
437}
438
Thierry Strudel3d639192016-09-09 11:52:26 -0700439/*===========================================================================
440 * FUNCTION : QCamera3HardwareInterface
441 *
442 * DESCRIPTION: constructor of QCamera3HardwareInterface
443 *
444 * PARAMETERS :
445 * @cameraId : camera ID
446 *
447 * RETURN : none
448 *==========================================================================*/
449QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
450 const camera_module_callbacks_t *callbacks)
451 : mCameraId(cameraId),
452 mCameraHandle(NULL),
453 mCameraInitialized(false),
454 mCallbackOps(NULL),
455 mMetadataChannel(NULL),
456 mPictureChannel(NULL),
457 mRawChannel(NULL),
458 mSupportChannel(NULL),
459 mAnalysisChannel(NULL),
460 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700461 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700462 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800463 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100464 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800465 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700466 mChannelHandle(0),
467 mFirstConfiguration(true),
468 mFlush(false),
469 mFlushPerf(false),
470 mParamHeap(NULL),
471 mParameters(NULL),
472 mPrevParameters(NULL),
473 m_bIsVideo(false),
474 m_bIs4KVideo(false),
475 m_bEisSupportedSize(false),
476 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800477 m_bEis3PropertyEnabled(false),
Binhao Lin09245482017-08-31 18:25:29 -0700478 m_bAVTimerEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700479 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700480 mShutterDispatcher(this),
481 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mMinProcessedFrameDuration(0),
483 mMinJpegFrameDuration(0),
484 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100485 mExpectedFrameDuration(0),
486 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700487 mMetaFrameCount(0U),
488 mUpdateDebugLevel(false),
489 mCallbacks(callbacks),
490 mCaptureIntent(0),
491 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800492 /* DevCamDebug metadata internal m control*/
493 mDevCamDebugMetaEnable(0),
494 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700495 mBatchSize(0),
496 mToBeQueuedVidBufs(0),
497 mHFRVideoFps(DEFAULT_VIDEO_FPS),
498 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800499 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800500 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mFirstFrameNumberInBatch(0),
502 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800503 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700504 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
505 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000506 mPDSupported(false),
507 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700508 mInstantAEC(false),
509 mResetInstantAEC(false),
510 mInstantAECSettledFrameNumber(0),
511 mAecSkipDisplayFrameBound(0),
512 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700513 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800514 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700515 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700517 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700518 mState(CLOSED),
519 mIsDeviceLinked(false),
520 mIsMainCamera(true),
521 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700522 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800523 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800524 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700525 mZslEnabled(false),
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700526 mEaselMipiStarted(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800527 mIsApInputUsedForHdrPlus(false),
528 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700529 m_bSensorHDREnabled(false),
Shuzhen Wang3569d4a2017-09-04 19:10:28 -0700530 mAfTrigger(),
531 mSceneDistance(-1)
Thierry Strudel3d639192016-09-09 11:52:26 -0700532{
533 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCommon.init(gCamCapability[cameraId]);
535 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700536#ifndef USE_HAL_3_3
537 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
538#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700539 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700540#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700541 mCameraDevice.common.close = close_camera_device;
542 mCameraDevice.ops = &mCameraOps;
543 mCameraDevice.priv = this;
544 gCamCapability[cameraId]->version = CAM_HAL_V3;
545 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
546 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
547 gCamCapability[cameraId]->min_num_pp_bufs = 3;
548
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800549 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700550
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800551 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700552 mPendingLiveRequest = 0;
553 mCurrentRequestId = -1;
554 pthread_mutex_init(&mMutex, NULL);
555
556 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
557 mDefaultMetadata[i] = NULL;
558
559 // Getting system props of different kinds
560 char prop[PROPERTY_VALUE_MAX];
561 memset(prop, 0, sizeof(prop));
562 property_get("persist.camera.raw.dump", prop, "0");
563 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800564 property_get("persist.camera.hal3.force.hdr", prop, "0");
565 mForceHdrSnapshot = atoi(prop);
566
Thierry Strudel3d639192016-09-09 11:52:26 -0700567 if (mEnableRawDump)
568 LOGD("Raw dump from Camera HAL enabled");
569
570 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
571 memset(mLdafCalib, 0, sizeof(mLdafCalib));
572
Arnd Geis082a4d72017-08-24 10:33:07 -0700573 memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
Arnd Geis8cbfc182017-09-07 14:46:41 -0700574 mEaselFwUpdated = false;
Arnd Geis082a4d72017-08-24 10:33:07 -0700575
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 memset(prop, 0, sizeof(prop));
577 property_get("persist.camera.tnr.preview", prop, "0");
578 m_bTnrPreview = (uint8_t)atoi(prop);
579
580 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800581 property_get("persist.camera.swtnr.preview", prop, "1");
582 m_bSwTnrPreview = (uint8_t)atoi(prop);
583
584 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700585 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700586 m_bTnrVideo = (uint8_t)atoi(prop);
587
588 memset(prop, 0, sizeof(prop));
589 property_get("persist.camera.avtimer.debug", prop, "0");
590 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800591 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700592
Thierry Strudel54dc9782017-02-15 12:12:10 -0800593 memset(prop, 0, sizeof(prop));
594 property_get("persist.camera.cacmode.disable", prop, "0");
595 m_cacModeDisabled = (uint8_t)atoi(prop);
596
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700597 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700598 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700599
Thierry Strudel3d639192016-09-09 11:52:26 -0700600 //Load and read GPU library.
601 lib_surface_utils = NULL;
602 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700603 mSurfaceStridePadding = CAM_PAD_TO_64;
604#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700605 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
606 if (lib_surface_utils) {
607 *(void **)&LINK_get_surface_pixel_alignment =
608 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
609 if (LINK_get_surface_pixel_alignment) {
610 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
611 }
612 dlclose(lib_surface_utils);
613 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700614#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000615 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
616 mPDSupported = (0 <= mPDIndex) ? true : false;
617
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700618 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700619}
620
621/*===========================================================================
622 * FUNCTION : ~QCamera3HardwareInterface
623 *
624 * DESCRIPTION: destructor of QCamera3HardwareInterface
625 *
626 * PARAMETERS : none
627 *
628 * RETURN : none
629 *==========================================================================*/
630QCamera3HardwareInterface::~QCamera3HardwareInterface()
631{
632 LOGD("E");
633
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800634 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700635
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800636 // Disable power hint and enable the perf lock for close camera
637 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
638 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
639
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700640 // Close HDR+ client first before destroying HAL.
641 {
642 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
643 finishHdrPlusClientOpeningLocked(l);
644 if (gHdrPlusClient != nullptr) {
645 // Disable HDR+ mode.
646 disableHdrPlusModeLocked();
647 // Disconnect Easel if it's connected.
648 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
649 gHdrPlusClient = nullptr;
650 }
651 }
652
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800653 // unlink of dualcam during close camera
654 if (mIsDeviceLinked) {
655 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
656 &m_pDualCamCmdPtr->bundle_info;
657 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
658 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
659 pthread_mutex_lock(&gCamLock);
660
661 if (mIsMainCamera == 1) {
662 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
663 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
664 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
665 // related session id should be session id of linked session
666 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
667 } else {
668 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
669 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
670 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
671 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
672 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800673 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800674 pthread_mutex_unlock(&gCamLock);
675
676 rc = mCameraHandle->ops->set_dual_cam_cmd(
677 mCameraHandle->camera_handle);
678 if (rc < 0) {
679 LOGE("Dualcam: Unlink failed, but still proceed to close");
680 }
681 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700682
683 /* We need to stop all streams before deleting any stream */
684 if (mRawDumpChannel) {
685 mRawDumpChannel->stop();
686 }
687
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700688 if (mHdrPlusRawSrcChannel) {
689 mHdrPlusRawSrcChannel->stop();
690 }
691
Thierry Strudel3d639192016-09-09 11:52:26 -0700692 // NOTE: 'camera3_stream_t *' objects are already freed at
693 // this stage by the framework
694 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
695 it != mStreamInfo.end(); it++) {
696 QCamera3ProcessingChannel *channel = (*it)->channel;
697 if (channel) {
698 channel->stop();
699 }
700 }
701 if (mSupportChannel)
702 mSupportChannel->stop();
703
704 if (mAnalysisChannel) {
705 mAnalysisChannel->stop();
706 }
707 if (mMetadataChannel) {
708 mMetadataChannel->stop();
709 }
710 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700711 stopChannelLocked(/*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700712 }
713
714 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
715 it != mStreamInfo.end(); it++) {
716 QCamera3ProcessingChannel *channel = (*it)->channel;
717 if (channel)
718 delete channel;
719 free (*it);
720 }
721 if (mSupportChannel) {
722 delete mSupportChannel;
723 mSupportChannel = NULL;
724 }
725
726 if (mAnalysisChannel) {
727 delete mAnalysisChannel;
728 mAnalysisChannel = NULL;
729 }
730 if (mRawDumpChannel) {
731 delete mRawDumpChannel;
732 mRawDumpChannel = NULL;
733 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700734 if (mHdrPlusRawSrcChannel) {
735 delete mHdrPlusRawSrcChannel;
736 mHdrPlusRawSrcChannel = NULL;
737 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700738 if (mDummyBatchChannel) {
739 delete mDummyBatchChannel;
740 mDummyBatchChannel = NULL;
741 }
742
743 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800744 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700745
746 if (mMetadataChannel) {
747 delete mMetadataChannel;
748 mMetadataChannel = NULL;
749 }
750
751 /* Clean up all channels */
752 if (mCameraInitialized) {
753 if(!mFirstConfiguration){
754 //send the last unconfigure
755 cam_stream_size_info_t stream_config_info;
756 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
757 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
758 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800759 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700760 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700761 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
763 stream_config_info);
764 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
765 if (rc < 0) {
766 LOGE("set_parms failed for unconfigure");
767 }
768 }
769 deinitParameters();
770 }
771
772 if (mChannelHandle) {
773 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
774 mChannelHandle);
775 LOGH("deleting channel %d", mChannelHandle);
776 mChannelHandle = 0;
777 }
778
779 if (mState != CLOSED)
780 closeCamera();
781
782 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
783 req.mPendingBufferList.clear();
784 }
785 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700786 for (pendingRequestIterator i = mPendingRequestsList.begin();
787 i != mPendingRequestsList.end();) {
788 i = erasePendingRequest(i);
789 }
790 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
791 if (mDefaultMetadata[i])
792 free_camera_metadata(mDefaultMetadata[i]);
793
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800794 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700795
796 pthread_cond_destroy(&mRequestCond);
797
798 pthread_cond_destroy(&mBuffersCond);
799
800 pthread_mutex_destroy(&mMutex);
801 LOGD("X");
802}
803
804/*===========================================================================
805 * FUNCTION : erasePendingRequest
806 *
807 * DESCRIPTION: function to erase a desired pending request after freeing any
808 * allocated memory
809 *
810 * PARAMETERS :
811 * @i : iterator pointing to pending request to be erased
812 *
813 * RETURN : iterator pointing to the next request
814 *==========================================================================*/
815QCamera3HardwareInterface::pendingRequestIterator
816 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
817{
818 if (i->input_buffer != NULL) {
819 free(i->input_buffer);
820 i->input_buffer = NULL;
821 }
822 if (i->settings != NULL)
823 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100824
825 mExpectedInflightDuration -= i->expectedFrameDuration;
826 if (mExpectedInflightDuration < 0) {
827 LOGE("Negative expected in-flight duration!");
828 mExpectedInflightDuration = 0;
829 }
830
Thierry Strudel3d639192016-09-09 11:52:26 -0700831 return mPendingRequestsList.erase(i);
832}
833
834/*===========================================================================
835 * FUNCTION : camEvtHandle
836 *
837 * DESCRIPTION: Function registered to mm-camera-interface to handle events
838 *
839 * PARAMETERS :
840 * @camera_handle : interface layer camera handle
841 * @evt : ptr to event
842 * @user_data : user data ptr
843 *
844 * RETURN : none
845 *==========================================================================*/
846void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
847 mm_camera_event_t *evt,
848 void *user_data)
849{
850 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
851 if (obj && evt) {
852 switch(evt->server_event_type) {
853 case CAM_EVENT_TYPE_DAEMON_DIED:
854 pthread_mutex_lock(&obj->mMutex);
855 obj->mState = ERROR;
856 pthread_mutex_unlock(&obj->mMutex);
857 LOGE("Fatal, camera daemon died");
858 break;
859
860 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
861 LOGD("HAL got request pull from Daemon");
862 pthread_mutex_lock(&obj->mMutex);
863 obj->mWokenUpByDaemon = true;
864 obj->unblockRequestIfNecessary();
865 pthread_mutex_unlock(&obj->mMutex);
866 break;
867
868 default:
869 LOGW("Warning: Unhandled event %d",
870 evt->server_event_type);
871 break;
872 }
873 } else {
874 LOGE("NULL user_data/evt");
875 }
876}
877
878/*===========================================================================
879 * FUNCTION : openCamera
880 *
881 * DESCRIPTION: open camera
882 *
883 * PARAMETERS :
884 * @hw_device : double ptr for camera device struct
885 *
886 * RETURN : int32_t type of status
887 * NO_ERROR -- success
888 * none-zero failure code
889 *==========================================================================*/
890int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
891{
892 int rc = 0;
893 if (mState != CLOSED) {
894 *hw_device = NULL;
895 return PERMISSION_DENIED;
896 }
897
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700898 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800899 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700900 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
901 mCameraId);
902
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700903 if (mCameraHandle) {
904 LOGE("Failure: Camera already opened");
905 return ALREADY_EXISTS;
906 }
907
908 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700909 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700910 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700911 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700912 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700913 if (rc != 0) {
914 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
915 return rc;
916 }
Arnd Geis8cbfc182017-09-07 14:46:41 -0700917 mEaselFwUpdated = false;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700918 }
919 }
920
Thierry Strudel3d639192016-09-09 11:52:26 -0700921 rc = openCamera();
922 if (rc == 0) {
923 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800924 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700925 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700926
927 // Suspend Easel because opening camera failed.
928 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700929 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700930 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
931 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700932 if (suspendErr != 0) {
933 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
934 strerror(-suspendErr), suspendErr);
935 }
936 }
937 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800938 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700939
Thierry Strudel3d639192016-09-09 11:52:26 -0700940 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
941 mCameraId, rc);
942
943 if (rc == NO_ERROR) {
944 mState = OPENED;
945 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800946
Thierry Strudel3d639192016-09-09 11:52:26 -0700947 return rc;
948}
949
950/*===========================================================================
951 * FUNCTION : openCamera
952 *
953 * DESCRIPTION: open camera
954 *
955 * PARAMETERS : none
956 *
957 * RETURN : int32_t type of status
958 * NO_ERROR -- success
959 * none-zero failure code
960 *==========================================================================*/
961int QCamera3HardwareInterface::openCamera()
962{
963 int rc = 0;
964 char value[PROPERTY_VALUE_MAX];
965
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800966 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800967
Thierry Strudel3d639192016-09-09 11:52:26 -0700968 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
969 if (rc < 0) {
970 LOGE("Failed to reserve flash for camera id: %d",
971 mCameraId);
972 return UNKNOWN_ERROR;
973 }
974
975 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
976 if (rc) {
977 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
978 return rc;
979 }
980
981 if (!mCameraHandle) {
982 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
983 return -ENODEV;
984 }
985
986 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
987 camEvtHandle, (void *)this);
988
989 if (rc < 0) {
990 LOGE("Error, failed to register event callback");
991 /* Not closing camera here since it is already handled in destructor */
992 return FAILED_TRANSACTION;
993 }
994
995 mExifParams.debug_params =
996 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
997 if (mExifParams.debug_params) {
998 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
999 } else {
1000 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
1001 return NO_MEMORY;
1002 }
1003 mFirstConfiguration = true;
1004
1005 //Notify display HAL that a camera session is active.
1006 //But avoid calling the same during bootup because camera service might open/close
1007 //cameras at boot time during its initialization and display service will also internally
1008 //wait for camera service to initialize first while calling this display API, resulting in a
1009 //deadlock situation. Since boot time camera open/close calls are made only to fetch
1010 //capabilities, no need of this display bw optimization.
1011 //Use "service.bootanim.exit" property to know boot status.
1012 property_get("service.bootanim.exit", value, "0");
1013 if (atoi(value) == 1) {
1014 pthread_mutex_lock(&gCamLock);
1015 if (gNumCameraSessions++ == 0) {
1016 setCameraLaunchStatus(true);
1017 }
1018 pthread_mutex_unlock(&gCamLock);
1019 }
1020
1021 //fill the session id needed while linking dual cam
1022 pthread_mutex_lock(&gCamLock);
1023 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1024 &sessionId[mCameraId]);
1025 pthread_mutex_unlock(&gCamLock);
1026
1027 if (rc < 0) {
1028 LOGE("Error, failed to get sessiion id");
1029 return UNKNOWN_ERROR;
1030 } else {
1031 //Allocate related cam sync buffer
1032 //this is needed for the payload that goes along with bundling cmd for related
1033 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001034 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1035 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001036 if(rc != OK) {
1037 rc = NO_MEMORY;
1038 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1039 return NO_MEMORY;
1040 }
1041
1042 //Map memory for related cam sync buffer
1043 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001044 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1045 m_pDualCamCmdHeap->getFd(0),
1046 sizeof(cam_dual_camera_cmd_info_t),
1047 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001048 if(rc < 0) {
1049 LOGE("Dualcam: failed to map Related cam sync buffer");
1050 rc = FAILED_TRANSACTION;
1051 return NO_MEMORY;
1052 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001053 m_pDualCamCmdPtr =
1054 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001055 }
1056
1057 LOGH("mCameraId=%d",mCameraId);
1058
1059 return NO_ERROR;
1060}
1061
1062/*===========================================================================
1063 * FUNCTION : closeCamera
1064 *
1065 * DESCRIPTION: close camera
1066 *
1067 * PARAMETERS : none
1068 *
1069 * RETURN : int32_t type of status
1070 * NO_ERROR -- success
1071 * none-zero failure code
1072 *==========================================================================*/
1073int QCamera3HardwareInterface::closeCamera()
1074{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001075 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001076 int rc = NO_ERROR;
1077 char value[PROPERTY_VALUE_MAX];
1078
1079 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1080 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001081
1082 // unmap memory for related cam sync buffer
1083 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001084 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001085 if (NULL != m_pDualCamCmdHeap) {
1086 m_pDualCamCmdHeap->deallocate();
1087 delete m_pDualCamCmdHeap;
1088 m_pDualCamCmdHeap = NULL;
1089 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001090 }
1091
Thierry Strudel3d639192016-09-09 11:52:26 -07001092 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1093 mCameraHandle = NULL;
1094
1095 //reset session id to some invalid id
1096 pthread_mutex_lock(&gCamLock);
1097 sessionId[mCameraId] = 0xDEADBEEF;
1098 pthread_mutex_unlock(&gCamLock);
1099
1100 //Notify display HAL that there is no active camera session
1101 //but avoid calling the same during bootup. Refer to openCamera
1102 //for more details.
1103 property_get("service.bootanim.exit", value, "0");
1104 if (atoi(value) == 1) {
1105 pthread_mutex_lock(&gCamLock);
1106 if (--gNumCameraSessions == 0) {
1107 setCameraLaunchStatus(false);
1108 }
1109 pthread_mutex_unlock(&gCamLock);
1110 }
1111
Thierry Strudel3d639192016-09-09 11:52:26 -07001112 if (mExifParams.debug_params) {
1113 free(mExifParams.debug_params);
1114 mExifParams.debug_params = NULL;
1115 }
1116 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1117 LOGW("Failed to release flash for camera id: %d",
1118 mCameraId);
1119 }
1120 mState = CLOSED;
1121 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1122 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001123
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001124 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001125 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001126 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001127 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001128 if (rc != 0) {
1129 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1130 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001131 }
1132 }
1133
Thierry Strudel3d639192016-09-09 11:52:26 -07001134 return rc;
1135}
1136
1137/*===========================================================================
1138 * FUNCTION : initialize
1139 *
1140 * DESCRIPTION: Initialize frameworks callback functions
1141 *
1142 * PARAMETERS :
1143 * @callback_ops : callback function to frameworks
1144 *
1145 * RETURN :
1146 *
1147 *==========================================================================*/
1148int QCamera3HardwareInterface::initialize(
1149 const struct camera3_callback_ops *callback_ops)
1150{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001151 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001152 int rc;
1153
1154 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1155 pthread_mutex_lock(&mMutex);
1156
1157 // Validate current state
1158 switch (mState) {
1159 case OPENED:
1160 /* valid state */
1161 break;
1162 default:
1163 LOGE("Invalid state %d", mState);
1164 rc = -ENODEV;
1165 goto err1;
1166 }
1167
1168 rc = initParameters();
1169 if (rc < 0) {
1170 LOGE("initParamters failed %d", rc);
1171 goto err1;
1172 }
1173 mCallbackOps = callback_ops;
1174
1175 mChannelHandle = mCameraHandle->ops->add_channel(
1176 mCameraHandle->camera_handle, NULL, NULL, this);
1177 if (mChannelHandle == 0) {
1178 LOGE("add_channel failed");
1179 rc = -ENOMEM;
1180 pthread_mutex_unlock(&mMutex);
1181 return rc;
1182 }
1183
1184 pthread_mutex_unlock(&mMutex);
1185 mCameraInitialized = true;
1186 mState = INITIALIZED;
1187 LOGI("X");
1188 return 0;
1189
1190err1:
1191 pthread_mutex_unlock(&mMutex);
1192 return rc;
1193}
1194
1195/*===========================================================================
1196 * FUNCTION : validateStreamDimensions
1197 *
1198 * DESCRIPTION: Check if the configuration requested are those advertised
1199 *
1200 * PARAMETERS :
1201 * @stream_list : streams to be configured
1202 *
1203 * RETURN :
1204 *
1205 *==========================================================================*/
1206int QCamera3HardwareInterface::validateStreamDimensions(
1207 camera3_stream_configuration_t *streamList)
1208{
1209 int rc = NO_ERROR;
1210 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001211 uint32_t depthWidth = 0;
1212 uint32_t depthHeight = 0;
1213 if (mPDSupported) {
1214 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1215 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1216 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001217
1218 camera3_stream_t *inputStream = NULL;
1219 /*
1220 * Loop through all streams to find input stream if it exists*
1221 */
1222 for (size_t i = 0; i< streamList->num_streams; i++) {
1223 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1224 if (inputStream != NULL) {
1225 LOGE("Error, Multiple input streams requested");
1226 return -EINVAL;
1227 }
1228 inputStream = streamList->streams[i];
1229 }
1230 }
1231 /*
1232 * Loop through all streams requested in configuration
1233 * Check if unsupported sizes have been requested on any of them
1234 */
1235 for (size_t j = 0; j < streamList->num_streams; j++) {
1236 bool sizeFound = false;
1237 camera3_stream_t *newStream = streamList->streams[j];
1238
1239 uint32_t rotatedHeight = newStream->height;
1240 uint32_t rotatedWidth = newStream->width;
1241 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1242 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1243 rotatedHeight = newStream->width;
1244 rotatedWidth = newStream->height;
1245 }
1246
1247 /*
1248 * Sizes are different for each type of stream format check against
1249 * appropriate table.
1250 */
1251 switch (newStream->format) {
1252 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1253 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1254 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001255 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1256 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1257 mPDSupported) {
1258 if ((depthWidth == newStream->width) &&
1259 (depthHeight == newStream->height)) {
1260 sizeFound = true;
1261 }
1262 break;
1263 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001264 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1265 for (size_t i = 0; i < count; i++) {
1266 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1267 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1268 sizeFound = true;
1269 break;
1270 }
1271 }
1272 break;
1273 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001274 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1275 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001276 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001277 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001278 if ((depthSamplesCount == newStream->width) &&
1279 (1 == newStream->height)) {
1280 sizeFound = true;
1281 }
1282 break;
1283 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001284 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1285 /* Verify set size against generated sizes table */
1286 for (size_t i = 0; i < count; i++) {
1287 if (((int32_t)rotatedWidth ==
1288 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1289 ((int32_t)rotatedHeight ==
1290 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1291 sizeFound = true;
1292 break;
1293 }
1294 }
1295 break;
1296 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1297 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1298 default:
1299 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1300 || newStream->stream_type == CAMERA3_STREAM_INPUT
1301 || IS_USAGE_ZSL(newStream->usage)) {
1302 if (((int32_t)rotatedWidth ==
1303 gCamCapability[mCameraId]->active_array_size.width) &&
1304 ((int32_t)rotatedHeight ==
1305 gCamCapability[mCameraId]->active_array_size.height)) {
1306 sizeFound = true;
1307 break;
1308 }
1309 /* We could potentially break here to enforce ZSL stream
1310 * set from frameworks always is full active array size
1311 * but it is not clear from the spc if framework will always
1312 * follow that, also we have logic to override to full array
1313 * size, so keeping the logic lenient at the moment
1314 */
1315 }
1316 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1317 MAX_SIZES_CNT);
1318 for (size_t i = 0; i < count; i++) {
1319 if (((int32_t)rotatedWidth ==
1320 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1321 ((int32_t)rotatedHeight ==
1322 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1323 sizeFound = true;
1324 break;
1325 }
1326 }
1327 break;
1328 } /* End of switch(newStream->format) */
1329
1330 /* We error out even if a single stream has unsupported size set */
1331 if (!sizeFound) {
1332 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1333 rotatedWidth, rotatedHeight, newStream->format,
1334 gCamCapability[mCameraId]->active_array_size.width,
1335 gCamCapability[mCameraId]->active_array_size.height);
1336 rc = -EINVAL;
1337 break;
1338 }
1339 } /* End of for each stream */
1340 return rc;
1341}
1342
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001343/*===========================================================================
1344 * FUNCTION : validateUsageFlags
1345 *
1346 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1347 *
1348 * PARAMETERS :
1349 * @stream_list : streams to be configured
1350 *
1351 * RETURN :
1352 * NO_ERROR if the usage flags are supported
1353 * error code if usage flags are not supported
1354 *
1355 *==========================================================================*/
1356int QCamera3HardwareInterface::validateUsageFlags(
1357 const camera3_stream_configuration_t* streamList)
1358{
1359 for (size_t j = 0; j < streamList->num_streams; j++) {
1360 const camera3_stream_t *newStream = streamList->streams[j];
1361
1362 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1363 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1364 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1365 continue;
1366 }
1367
Jason Leec4cf5032017-05-24 18:31:41 -07001368 // Here we only care whether it's EIS3 or not
1369 char is_type_value[PROPERTY_VALUE_MAX];
1370 property_get("persist.camera.is_type", is_type_value, "4");
1371 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1372 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1373 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1374 isType = IS_TYPE_NONE;
1375
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001376 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1377 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1378 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1379 bool forcePreviewUBWC = true;
1380 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1381 forcePreviewUBWC = false;
1382 }
1383 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001384 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001385 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001386 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001387 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001388 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001389
1390 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1391 // So color spaces will always match.
1392
1393 // Check whether underlying formats of shared streams match.
1394 if (isVideo && isPreview && videoFormat != previewFormat) {
1395 LOGE("Combined video and preview usage flag is not supported");
1396 return -EINVAL;
1397 }
1398 if (isPreview && isZSL && previewFormat != zslFormat) {
1399 LOGE("Combined preview and zsl usage flag is not supported");
1400 return -EINVAL;
1401 }
1402 if (isVideo && isZSL && videoFormat != zslFormat) {
1403 LOGE("Combined video and zsl usage flag is not supported");
1404 return -EINVAL;
1405 }
1406 }
1407 return NO_ERROR;
1408}
1409
1410/*===========================================================================
1411 * FUNCTION : validateUsageFlagsForEis
1412 *
1413 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1414 *
1415 * PARAMETERS :
1416 * @stream_list : streams to be configured
1417 *
1418 * RETURN :
1419 * NO_ERROR if the usage flags are supported
1420 * error code if usage flags are not supported
1421 *
1422 *==========================================================================*/
1423int QCamera3HardwareInterface::validateUsageFlagsForEis(
1424 const camera3_stream_configuration_t* streamList)
1425{
1426 for (size_t j = 0; j < streamList->num_streams; j++) {
1427 const camera3_stream_t *newStream = streamList->streams[j];
1428
1429 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1430 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1431
1432 // Because EIS is "hard-coded" for certain use case, and current
1433 // implementation doesn't support shared preview and video on the same
1434 // stream, return failure if EIS is forced on.
1435 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1436 LOGE("Combined video and preview usage flag is not supported due to EIS");
1437 return -EINVAL;
1438 }
1439 }
1440 return NO_ERROR;
1441}
1442
Thierry Strudel3d639192016-09-09 11:52:26 -07001443/*==============================================================================
1444 * FUNCTION : isSupportChannelNeeded
1445 *
1446 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1447 *
1448 * PARAMETERS :
1449 * @stream_list : streams to be configured
1450 * @stream_config_info : the config info for streams to be configured
1451 *
1452 * RETURN : Boolen true/false decision
1453 *
1454 *==========================================================================*/
1455bool QCamera3HardwareInterface::isSupportChannelNeeded(
1456 camera3_stream_configuration_t *streamList,
1457 cam_stream_size_info_t stream_config_info)
1458{
1459 uint32_t i;
1460 bool pprocRequested = false;
1461 /* Check for conditions where PProc pipeline does not have any streams*/
1462 for (i = 0; i < stream_config_info.num_streams; i++) {
1463 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1464 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1465 pprocRequested = true;
1466 break;
1467 }
1468 }
1469
1470 if (pprocRequested == false )
1471 return true;
1472
1473 /* Dummy stream needed if only raw or jpeg streams present */
1474 for (i = 0; i < streamList->num_streams; i++) {
1475 switch(streamList->streams[i]->format) {
1476 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1477 case HAL_PIXEL_FORMAT_RAW10:
1478 case HAL_PIXEL_FORMAT_RAW16:
1479 case HAL_PIXEL_FORMAT_BLOB:
1480 break;
1481 default:
1482 return false;
1483 }
1484 }
1485 return true;
1486}
1487
1488/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001489 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001490 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001491 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001492 *
1493 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001494 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001495 *
1496 * RETURN : int32_t type of status
1497 * NO_ERROR -- success
1498 * none-zero failure code
1499 *
1500 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001501int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001502{
1503 int32_t rc = NO_ERROR;
1504
1505 cam_dimension_t max_dim = {0, 0};
1506 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1507 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1508 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1509 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1510 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1511 }
1512
1513 clear_metadata_buffer(mParameters);
1514
1515 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1516 max_dim);
1517 if (rc != NO_ERROR) {
1518 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1519 return rc;
1520 }
1521
1522 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1523 if (rc != NO_ERROR) {
1524 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1525 return rc;
1526 }
1527
1528 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001529 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001530
1531 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1532 mParameters);
1533 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001534 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001535 return rc;
1536 }
1537
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001538 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001539 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1540 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1541 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1542 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1543 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001544
1545 return rc;
1546}
1547
1548/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001549 * FUNCTION : getCurrentSensorModeInfo
1550 *
1551 * DESCRIPTION: Get sensor mode information that is currently selected.
1552 *
1553 * PARAMETERS :
1554 * @sensorModeInfo : sensor mode information (output)
1555 *
1556 * RETURN : int32_t type of status
1557 * NO_ERROR -- success
1558 * none-zero failure code
1559 *
1560 *==========================================================================*/
1561int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1562{
1563 int32_t rc = NO_ERROR;
1564
1565 clear_metadata_buffer(mParameters);
1566 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1567
1568 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1569 mParameters);
1570 if (rc != NO_ERROR) {
1571 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1572 return rc;
1573 }
1574
1575 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1576 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1577 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1578 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1579 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1580 sensorModeInfo.num_raw_bits);
1581
1582 return rc;
1583}
1584
1585/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001586 * FUNCTION : addToPPFeatureMask
1587 *
1588 * DESCRIPTION: add additional features to pp feature mask based on
1589 * stream type and usecase
1590 *
1591 * PARAMETERS :
1592 * @stream_format : stream type for feature mask
1593 * @stream_idx : stream idx within postprocess_mask list to change
1594 *
1595 * RETURN : NULL
1596 *
1597 *==========================================================================*/
1598void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1599 uint32_t stream_idx)
1600{
1601 char feature_mask_value[PROPERTY_VALUE_MAX];
1602 cam_feature_mask_t feature_mask;
1603 int args_converted;
1604 int property_len;
1605
1606 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001607#ifdef _LE_CAMERA_
1608 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1609 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1610 property_len = property_get("persist.camera.hal3.feature",
1611 feature_mask_value, swtnr_feature_mask_value);
1612#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001613 property_len = property_get("persist.camera.hal3.feature",
1614 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001615#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001616 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1617 (feature_mask_value[1] == 'x')) {
1618 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1619 } else {
1620 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1621 }
1622 if (1 != args_converted) {
1623 feature_mask = 0;
1624 LOGE("Wrong feature mask %s", feature_mask_value);
1625 return;
1626 }
1627
1628 switch (stream_format) {
1629 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1630 /* Add LLVD to pp feature mask only if video hint is enabled */
1631 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1632 mStreamConfigInfo.postprocess_mask[stream_idx]
1633 |= CAM_QTI_FEATURE_SW_TNR;
1634 LOGH("Added SW TNR to pp feature mask");
1635 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1636 mStreamConfigInfo.postprocess_mask[stream_idx]
1637 |= CAM_QCOM_FEATURE_LLVD;
1638 LOGH("Added LLVD SeeMore to pp feature mask");
1639 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001640 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1641 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1642 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1643 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001644 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1645 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1646 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1647 CAM_QTI_FEATURE_BINNING_CORRECTION;
1648 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001649 break;
1650 }
1651 default:
1652 break;
1653 }
1654 LOGD("PP feature mask %llx",
1655 mStreamConfigInfo.postprocess_mask[stream_idx]);
1656}
1657
1658/*==============================================================================
1659 * FUNCTION : updateFpsInPreviewBuffer
1660 *
1661 * DESCRIPTION: update FPS information in preview buffer.
1662 *
1663 * PARAMETERS :
1664 * @metadata : pointer to metadata buffer
1665 * @frame_number: frame_number to look for in pending buffer list
1666 *
1667 * RETURN : None
1668 *
1669 *==========================================================================*/
1670void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1671 uint32_t frame_number)
1672{
1673 // Mark all pending buffers for this particular request
1674 // with corresponding framerate information
1675 for (List<PendingBuffersInRequest>::iterator req =
1676 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1677 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1678 for(List<PendingBufferInfo>::iterator j =
1679 req->mPendingBufferList.begin();
1680 j != req->mPendingBufferList.end(); j++) {
1681 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1682 if ((req->frame_number == frame_number) &&
1683 (channel->getStreamTypeMask() &
1684 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1685 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1686 CAM_INTF_PARM_FPS_RANGE, metadata) {
1687 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1688 struct private_handle_t *priv_handle =
1689 (struct private_handle_t *)(*(j->buffer));
1690 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1691 }
1692 }
1693 }
1694 }
1695}
1696
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001697/*==============================================================================
1698 * FUNCTION : updateTimeStampInPendingBuffers
1699 *
1700 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1701 * of a frame number
1702 *
1703 * PARAMETERS :
1704 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1705 * @timestamp : timestamp to be set
1706 *
1707 * RETURN : None
1708 *
1709 *==========================================================================*/
1710void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1711 uint32_t frameNumber, nsecs_t timestamp)
1712{
1713 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1714 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
Binhao Lin09245482017-08-31 18:25:29 -07001715 // WAR: save the av_timestamp to the next frame
1716 if(req->frame_number == frameNumber + 1) {
1717 req->av_timestamp = timestamp;
1718 }
1719
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001720 if (req->frame_number != frameNumber)
1721 continue;
1722
1723 for (auto k = req->mPendingBufferList.begin();
1724 k != req->mPendingBufferList.end(); k++ ) {
Binhao Lin09245482017-08-31 18:25:29 -07001725 // WAR: update timestamp when it's not VT usecase
1726 QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
1727 if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
1728 m_bAVTimerEnabled)) {
1729 struct private_handle_t *priv_handle =
1730 (struct private_handle_t *) (*(k->buffer));
1731 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1732 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001733 }
1734 }
1735 return;
1736}
1737
Thierry Strudel3d639192016-09-09 11:52:26 -07001738/*===========================================================================
1739 * FUNCTION : configureStreams
1740 *
1741 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1742 * and output streams.
1743 *
1744 * PARAMETERS :
1745 * @stream_list : streams to be configured
1746 *
1747 * RETURN :
1748 *
1749 *==========================================================================*/
1750int QCamera3HardwareInterface::configureStreams(
1751 camera3_stream_configuration_t *streamList)
1752{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001753 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001754 int rc = 0;
1755
1756 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001757 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001758 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001759 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001760
1761 return rc;
1762}
1763
1764/*===========================================================================
1765 * FUNCTION : configureStreamsPerfLocked
1766 *
1767 * DESCRIPTION: configureStreams while perfLock is held.
1768 *
1769 * PARAMETERS :
1770 * @stream_list : streams to be configured
1771 *
1772 * RETURN : int32_t type of status
1773 * NO_ERROR -- success
1774 * none-zero failure code
1775 *==========================================================================*/
1776int QCamera3HardwareInterface::configureStreamsPerfLocked(
1777 camera3_stream_configuration_t *streamList)
1778{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001779 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001780 int rc = 0;
1781
1782 // Sanity check stream_list
1783 if (streamList == NULL) {
1784 LOGE("NULL stream configuration");
1785 return BAD_VALUE;
1786 }
1787 if (streamList->streams == NULL) {
1788 LOGE("NULL stream list");
1789 return BAD_VALUE;
1790 }
1791
1792 if (streamList->num_streams < 1) {
1793 LOGE("Bad number of streams requested: %d",
1794 streamList->num_streams);
1795 return BAD_VALUE;
1796 }
1797
1798 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1799 LOGE("Maximum number of streams %d exceeded: %d",
1800 MAX_NUM_STREAMS, streamList->num_streams);
1801 return BAD_VALUE;
1802 }
1803
Jason Leec4cf5032017-05-24 18:31:41 -07001804 mOpMode = streamList->operation_mode;
1805 LOGD("mOpMode: %d", mOpMode);
1806
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001807 rc = validateUsageFlags(streamList);
1808 if (rc != NO_ERROR) {
1809 return rc;
1810 }
1811
Chien-Yu Chen11c8edc2017-09-11 20:54:24 -07001812 // Disable HDR+ if it's enabled;
Chien-Yu Chen153c5172017-09-08 11:33:19 -07001813 {
1814 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1815 finishHdrPlusClientOpeningLocked(l);
1816 disableHdrPlusModeLocked();
1817 }
1818
Thierry Strudel3d639192016-09-09 11:52:26 -07001819 /* first invalidate all the steams in the mStreamList
1820 * if they appear again, they will be validated */
1821 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1822 it != mStreamInfo.end(); it++) {
1823 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1824 if (channel) {
1825 channel->stop();
1826 }
1827 (*it)->status = INVALID;
1828 }
1829
1830 if (mRawDumpChannel) {
1831 mRawDumpChannel->stop();
1832 delete mRawDumpChannel;
1833 mRawDumpChannel = NULL;
1834 }
1835
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001836 if (mHdrPlusRawSrcChannel) {
1837 mHdrPlusRawSrcChannel->stop();
1838 delete mHdrPlusRawSrcChannel;
1839 mHdrPlusRawSrcChannel = NULL;
1840 }
1841
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 if (mSupportChannel)
1843 mSupportChannel->stop();
1844
1845 if (mAnalysisChannel) {
1846 mAnalysisChannel->stop();
1847 }
1848 if (mMetadataChannel) {
1849 /* If content of mStreamInfo is not 0, there is metadata stream */
1850 mMetadataChannel->stop();
1851 }
1852 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07001853 stopChannelLocked(/*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001854 }
1855
1856 pthread_mutex_lock(&mMutex);
1857
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001858 mPictureChannel = NULL;
1859
Thierry Strudel3d639192016-09-09 11:52:26 -07001860 // Check state
1861 switch (mState) {
1862 case INITIALIZED:
1863 case CONFIGURED:
1864 case STARTED:
1865 /* valid state */
1866 break;
1867 default:
1868 LOGE("Invalid state %d", mState);
1869 pthread_mutex_unlock(&mMutex);
1870 return -ENODEV;
1871 }
1872
1873 /* Check whether we have video stream */
1874 m_bIs4KVideo = false;
1875 m_bIsVideo = false;
1876 m_bEisSupportedSize = false;
1877 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001878 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001879 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001880 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001881 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001882 uint32_t videoWidth = 0U;
1883 uint32_t videoHeight = 0U;
1884 size_t rawStreamCnt = 0;
1885 size_t stallStreamCnt = 0;
1886 size_t processedStreamCnt = 0;
1887 // Number of streams on ISP encoder path
1888 size_t numStreamsOnEncoder = 0;
1889 size_t numYuv888OnEncoder = 0;
1890 bool bYuv888OverrideJpeg = false;
1891 cam_dimension_t largeYuv888Size = {0, 0};
1892 cam_dimension_t maxViewfinderSize = {0, 0};
1893 bool bJpegExceeds4K = false;
1894 bool bJpegOnEncoder = false;
1895 bool bUseCommonFeatureMask = false;
1896 cam_feature_mask_t commonFeatureMask = 0;
1897 bool bSmallJpegSize = false;
1898 uint32_t width_ratio;
1899 uint32_t height_ratio;
1900 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1901 camera3_stream_t *inputStream = NULL;
1902 bool isJpeg = false;
1903 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001904 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001905 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001906
1907 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1908
1909 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001910 uint8_t eis_prop_set;
1911 uint32_t maxEisWidth = 0;
1912 uint32_t maxEisHeight = 0;
1913
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001914 // Initialize all instant AEC related variables
1915 mInstantAEC = false;
1916 mResetInstantAEC = false;
1917 mInstantAECSettledFrameNumber = 0;
1918 mAecSkipDisplayFrameBound = 0;
1919 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001920 mCurrFeatureState = 0;
1921 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001922
Binhao Lin09245482017-08-31 18:25:29 -07001923 m_bAVTimerEnabled = false;
1924
Thierry Strudel3d639192016-09-09 11:52:26 -07001925 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1926
1927 size_t count = IS_TYPE_MAX;
1928 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1929 for (size_t i = 0; i < count; i++) {
1930 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001931 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1932 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 break;
1934 }
1935 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001936
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001937 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001938 maxEisWidth = MAX_EIS_WIDTH;
1939 maxEisHeight = MAX_EIS_HEIGHT;
1940 }
1941
1942 /* EIS setprop control */
1943 char eis_prop[PROPERTY_VALUE_MAX];
1944 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001945 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001946 eis_prop_set = (uint8_t)atoi(eis_prop);
1947
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001948 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001949 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1950
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001951 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1952 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001953
Thierry Strudel3d639192016-09-09 11:52:26 -07001954 /* stream configurations */
1955 for (size_t i = 0; i < streamList->num_streams; i++) {
1956 camera3_stream_t *newStream = streamList->streams[i];
1957 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1958 "height = %d, rotation = %d, usage = 0x%x",
1959 i, newStream->stream_type, newStream->format,
1960 newStream->width, newStream->height, newStream->rotation,
1961 newStream->usage);
1962 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1963 newStream->stream_type == CAMERA3_STREAM_INPUT){
1964 isZsl = true;
1965 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001966 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1967 IS_USAGE_PREVIEW(newStream->usage)) {
1968 isPreview = true;
1969 }
1970
Thierry Strudel3d639192016-09-09 11:52:26 -07001971 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1972 inputStream = newStream;
1973 }
1974
Emilian Peev7650c122017-01-19 08:24:33 -08001975 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1976 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001977 isJpeg = true;
1978 jpegSize.width = newStream->width;
1979 jpegSize.height = newStream->height;
1980 if (newStream->width > VIDEO_4K_WIDTH ||
1981 newStream->height > VIDEO_4K_HEIGHT)
1982 bJpegExceeds4K = true;
1983 }
1984
1985 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1986 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1987 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001988 // In HAL3 we can have multiple different video streams.
1989 // The variables video width and height are used below as
1990 // dimensions of the biggest of them
1991 if (videoWidth < newStream->width ||
1992 videoHeight < newStream->height) {
1993 videoWidth = newStream->width;
1994 videoHeight = newStream->height;
1995 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001996 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1997 (VIDEO_4K_HEIGHT <= newStream->height)) {
1998 m_bIs4KVideo = true;
1999 }
2000 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
2001 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002002
Thierry Strudel3d639192016-09-09 11:52:26 -07002003 }
2004 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
2005 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
2006 switch (newStream->format) {
2007 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002008 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2009 depthPresent = true;
2010 break;
2011 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002012 stallStreamCnt++;
2013 if (isOnEncoder(maxViewfinderSize, newStream->width,
2014 newStream->height)) {
2015 numStreamsOnEncoder++;
2016 bJpegOnEncoder = true;
2017 }
2018 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
2019 newStream->width);
2020 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2021 newStream->height);;
2022 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2023 "FATAL: max_downscale_factor cannot be zero and so assert");
2024 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2025 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2026 LOGH("Setting small jpeg size flag to true");
2027 bSmallJpegSize = true;
2028 }
2029 break;
2030 case HAL_PIXEL_FORMAT_RAW10:
2031 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2032 case HAL_PIXEL_FORMAT_RAW16:
2033 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002034 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2035 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2036 pdStatCount++;
2037 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002038 break;
2039 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2040 processedStreamCnt++;
2041 if (isOnEncoder(maxViewfinderSize, newStream->width,
2042 newStream->height)) {
2043 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2044 !IS_USAGE_ZSL(newStream->usage)) {
2045 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2046 }
2047 numStreamsOnEncoder++;
2048 }
2049 break;
2050 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2051 processedStreamCnt++;
2052 if (isOnEncoder(maxViewfinderSize, newStream->width,
2053 newStream->height)) {
2054 // If Yuv888 size is not greater than 4K, set feature mask
2055 // to SUPERSET so that it support concurrent request on
2056 // YUV and JPEG.
2057 if (newStream->width <= VIDEO_4K_WIDTH &&
2058 newStream->height <= VIDEO_4K_HEIGHT) {
2059 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2060 }
2061 numStreamsOnEncoder++;
2062 numYuv888OnEncoder++;
2063 largeYuv888Size.width = newStream->width;
2064 largeYuv888Size.height = newStream->height;
2065 }
2066 break;
2067 default:
2068 processedStreamCnt++;
2069 if (isOnEncoder(maxViewfinderSize, newStream->width,
2070 newStream->height)) {
2071 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2072 numStreamsOnEncoder++;
2073 }
2074 break;
2075 }
2076
2077 }
2078 }
2079
2080 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2081 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2082 !m_bIsVideo) {
2083 m_bEisEnable = false;
2084 }
2085
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002086 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2087 pthread_mutex_unlock(&mMutex);
2088 return -EINVAL;
2089 }
2090
Thierry Strudel54dc9782017-02-15 12:12:10 -08002091 uint8_t forceEnableTnr = 0;
2092 char tnr_prop[PROPERTY_VALUE_MAX];
2093 memset(tnr_prop, 0, sizeof(tnr_prop));
2094 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2095 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2096
Thierry Strudel3d639192016-09-09 11:52:26 -07002097 /* Logic to enable/disable TNR based on specific config size/etc.*/
2098 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002099 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2100 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002101 else if (forceEnableTnr)
2102 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002103
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002104 char videoHdrProp[PROPERTY_VALUE_MAX];
2105 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2106 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2107 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2108
2109 if (hdr_mode_prop == 1 && m_bIsVideo &&
2110 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2111 m_bVideoHdrEnabled = true;
2112 else
2113 m_bVideoHdrEnabled = false;
2114
2115
Thierry Strudel3d639192016-09-09 11:52:26 -07002116 /* Check if num_streams is sane */
2117 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2118 rawStreamCnt > MAX_RAW_STREAMS ||
2119 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2120 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2121 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2122 pthread_mutex_unlock(&mMutex);
2123 return -EINVAL;
2124 }
2125 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002126 if (isZsl && m_bIs4KVideo) {
2127 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 }
2131 /* Check if stream sizes are sane */
2132 if (numStreamsOnEncoder > 2) {
2133 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2134 pthread_mutex_unlock(&mMutex);
2135 return -EINVAL;
2136 } else if (1 < numStreamsOnEncoder){
2137 bUseCommonFeatureMask = true;
2138 LOGH("Multiple streams above max viewfinder size, common mask needed");
2139 }
2140
2141 /* Check if BLOB size is greater than 4k in 4k recording case */
2142 if (m_bIs4KVideo && bJpegExceeds4K) {
2143 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2144 pthread_mutex_unlock(&mMutex);
2145 return -EINVAL;
2146 }
2147
Emilian Peev7650c122017-01-19 08:24:33 -08002148 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2149 depthPresent) {
2150 LOGE("HAL doesn't support depth streams in HFR mode!");
2151 pthread_mutex_unlock(&mMutex);
2152 return -EINVAL;
2153 }
2154
Thierry Strudel3d639192016-09-09 11:52:26 -07002155 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2156 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2157 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2158 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2159 // configurations:
2160 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2161 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2162 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2163 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2164 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2165 __func__);
2166 pthread_mutex_unlock(&mMutex);
2167 return -EINVAL;
2168 }
2169
2170 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2171 // the YUV stream's size is greater or equal to the JPEG size, set common
2172 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2173 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2174 jpegSize.width, jpegSize.height) &&
2175 largeYuv888Size.width > jpegSize.width &&
2176 largeYuv888Size.height > jpegSize.height) {
2177 bYuv888OverrideJpeg = true;
2178 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2179 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2180 }
2181
2182 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2183 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2184 commonFeatureMask);
2185 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2186 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2187
2188 rc = validateStreamDimensions(streamList);
2189 if (rc == NO_ERROR) {
2190 rc = validateStreamRotations(streamList);
2191 }
2192 if (rc != NO_ERROR) {
2193 LOGE("Invalid stream configuration requested!");
2194 pthread_mutex_unlock(&mMutex);
2195 return rc;
2196 }
2197
Emilian Peev0f3c3162017-03-15 12:57:46 +00002198 if (1 < pdStatCount) {
2199 LOGE("HAL doesn't support multiple PD streams");
2200 pthread_mutex_unlock(&mMutex);
2201 return -EINVAL;
2202 }
2203
2204 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2205 (1 == pdStatCount)) {
2206 LOGE("HAL doesn't support PD streams in HFR mode!");
2207 pthread_mutex_unlock(&mMutex);
2208 return -EINVAL;
2209 }
2210
Thierry Strudel3d639192016-09-09 11:52:26 -07002211 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2212 for (size_t i = 0; i < streamList->num_streams; i++) {
2213 camera3_stream_t *newStream = streamList->streams[i];
2214 LOGH("newStream type = %d, stream format = %d "
2215 "stream size : %d x %d, stream rotation = %d",
2216 newStream->stream_type, newStream->format,
2217 newStream->width, newStream->height, newStream->rotation);
2218 //if the stream is in the mStreamList validate it
2219 bool stream_exists = false;
2220 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2221 it != mStreamInfo.end(); it++) {
2222 if ((*it)->stream == newStream) {
2223 QCamera3ProcessingChannel *channel =
2224 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2225 stream_exists = true;
2226 if (channel)
2227 delete channel;
2228 (*it)->status = VALID;
2229 (*it)->stream->priv = NULL;
2230 (*it)->channel = NULL;
2231 }
2232 }
2233 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2234 //new stream
2235 stream_info_t* stream_info;
2236 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2237 if (!stream_info) {
2238 LOGE("Could not allocate stream info");
2239 rc = -ENOMEM;
2240 pthread_mutex_unlock(&mMutex);
2241 return rc;
2242 }
2243 stream_info->stream = newStream;
2244 stream_info->status = VALID;
2245 stream_info->channel = NULL;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07002246 stream_info->id = i;
Thierry Strudel3d639192016-09-09 11:52:26 -07002247 mStreamInfo.push_back(stream_info);
2248 }
2249 /* Covers Opaque ZSL and API1 F/W ZSL */
2250 if (IS_USAGE_ZSL(newStream->usage)
2251 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2252 if (zslStream != NULL) {
2253 LOGE("Multiple input/reprocess streams requested!");
2254 pthread_mutex_unlock(&mMutex);
2255 return BAD_VALUE;
2256 }
2257 zslStream = newStream;
2258 }
2259 /* Covers YUV reprocess */
2260 if (inputStream != NULL) {
2261 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2262 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2263 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2264 && inputStream->width == newStream->width
2265 && inputStream->height == newStream->height) {
2266 if (zslStream != NULL) {
2267 /* This scenario indicates multiple YUV streams with same size
2268 * as input stream have been requested, since zsl stream handle
2269 * is solely use for the purpose of overriding the size of streams
2270 * which share h/w streams we will just make a guess here as to
2271 * which of the stream is a ZSL stream, this will be refactored
2272 * once we make generic logic for streams sharing encoder output
2273 */
2274 LOGH("Warning, Multiple ip/reprocess streams requested!");
2275 }
2276 zslStream = newStream;
2277 }
2278 }
2279 }
2280
2281 /* If a zsl stream is set, we know that we have configured at least one input or
2282 bidirectional stream */
2283 if (NULL != zslStream) {
2284 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2285 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2286 mInputStreamInfo.format = zslStream->format;
2287 mInputStreamInfo.usage = zslStream->usage;
2288 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2289 mInputStreamInfo.dim.width,
2290 mInputStreamInfo.dim.height,
2291 mInputStreamInfo.format, mInputStreamInfo.usage);
2292 }
2293
2294 cleanAndSortStreamInfo();
2295 if (mMetadataChannel) {
2296 delete mMetadataChannel;
2297 mMetadataChannel = NULL;
2298 }
2299 if (mSupportChannel) {
2300 delete mSupportChannel;
2301 mSupportChannel = NULL;
2302 }
2303
2304 if (mAnalysisChannel) {
2305 delete mAnalysisChannel;
2306 mAnalysisChannel = NULL;
2307 }
2308
2309 if (mDummyBatchChannel) {
2310 delete mDummyBatchChannel;
2311 mDummyBatchChannel = NULL;
2312 }
2313
Emilian Peev7650c122017-01-19 08:24:33 -08002314 if (mDepthChannel) {
2315 mDepthChannel = NULL;
2316 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002317 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002318
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002319 mShutterDispatcher.clear();
2320 mOutputBufferDispatcher.clear();
2321
Thierry Strudel2896d122017-02-23 19:18:03 -08002322 char is_type_value[PROPERTY_VALUE_MAX];
2323 property_get("persist.camera.is_type", is_type_value, "4");
2324 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2325
Binhao Line406f062017-05-03 14:39:44 -07002326 char property_value[PROPERTY_VALUE_MAX];
2327 property_get("persist.camera.gzoom.at", property_value, "0");
2328 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002329 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2330 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2331 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2332 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002333
2334 property_get("persist.camera.gzoom.4k", property_value, "0");
2335 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2336
Thierry Strudel3d639192016-09-09 11:52:26 -07002337 //Create metadata channel and initialize it
2338 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2339 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2340 gCamCapability[mCameraId]->color_arrangement);
2341 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2342 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002343 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002344 if (mMetadataChannel == NULL) {
2345 LOGE("failed to allocate metadata channel");
2346 rc = -ENOMEM;
2347 pthread_mutex_unlock(&mMutex);
2348 return rc;
2349 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002350 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002351 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2352 if (rc < 0) {
2353 LOGE("metadata channel initialization failed");
2354 delete mMetadataChannel;
2355 mMetadataChannel = NULL;
2356 pthread_mutex_unlock(&mMutex);
2357 return rc;
2358 }
2359
Thierry Strudel2896d122017-02-23 19:18:03 -08002360 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002361 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002362 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002363 // Keep track of preview/video streams indices.
2364 // There could be more than one preview streams, but only one video stream.
2365 int32_t video_stream_idx = -1;
2366 int32_t preview_stream_idx[streamList->num_streams];
2367 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002368 bool previewTnr[streamList->num_streams];
2369 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2370 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2371 // Loop through once to determine preview TNR conditions before creating channels.
2372 for (size_t i = 0; i < streamList->num_streams; i++) {
2373 camera3_stream_t *newStream = streamList->streams[i];
2374 uint32_t stream_usage = newStream->usage;
2375 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2376 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2377 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2378 video_stream_idx = (int32_t)i;
2379 else
2380 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2381 }
2382 }
2383 // By default, preview stream TNR is disabled.
2384 // Enable TNR to the preview stream if all conditions below are satisfied:
2385 // 1. preview resolution == video resolution.
2386 // 2. video stream TNR is enabled.
2387 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2388 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2389 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2390 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2391 if (m_bTnrEnabled && m_bTnrVideo &&
2392 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2393 video_stream->width == preview_stream->width &&
2394 video_stream->height == preview_stream->height) {
2395 previewTnr[preview_stream_idx[i]] = true;
2396 }
2397 }
2398
Thierry Strudel3d639192016-09-09 11:52:26 -07002399 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2400 /* Allocate channel objects for the requested streams */
2401 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002402
Thierry Strudel3d639192016-09-09 11:52:26 -07002403 camera3_stream_t *newStream = streamList->streams[i];
2404 uint32_t stream_usage = newStream->usage;
2405 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2406 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2407 struct camera_info *p_info = NULL;
2408 pthread_mutex_lock(&gCamLock);
2409 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2410 pthread_mutex_unlock(&gCamLock);
2411 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2412 || IS_USAGE_ZSL(newStream->usage)) &&
2413 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002414 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002415 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002416 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2417 if (bUseCommonFeatureMask)
2418 zsl_ppmask = commonFeatureMask;
2419 else
2420 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002421 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002422 if (numStreamsOnEncoder > 0)
2423 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2424 else
2425 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002426 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002427 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002428 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002429 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002430 LOGH("Input stream configured, reprocess config");
2431 } else {
2432 //for non zsl streams find out the format
2433 switch (newStream->format) {
2434 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2435 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002436 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2438 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2439 /* add additional features to pp feature mask */
2440 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2441 mStreamConfigInfo.num_streams);
2442
2443 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2444 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2445 CAM_STREAM_TYPE_VIDEO;
2446 if (m_bTnrEnabled && m_bTnrVideo) {
2447 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2448 CAM_QCOM_FEATURE_CPP_TNR;
2449 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2450 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2451 ~CAM_QCOM_FEATURE_CDS;
2452 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002453 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2454 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2455 CAM_QTI_FEATURE_PPEISCORE;
2456 }
Binhao Line406f062017-05-03 14:39:44 -07002457 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2458 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2459 CAM_QCOM_FEATURE_GOOG_ZOOM;
2460 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002461 } else {
2462 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2463 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002464 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002465 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2466 CAM_QCOM_FEATURE_CPP_TNR;
2467 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2468 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2469 ~CAM_QCOM_FEATURE_CDS;
2470 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002471 if(!m_bSwTnrPreview) {
2472 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2473 ~CAM_QTI_FEATURE_SW_TNR;
2474 }
Binhao Line406f062017-05-03 14:39:44 -07002475 if (is_goog_zoom_preview_enabled) {
2476 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2477 CAM_QCOM_FEATURE_GOOG_ZOOM;
2478 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002479 padding_info.width_padding = mSurfaceStridePadding;
2480 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002481 previewSize.width = (int32_t)newStream->width;
2482 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002483 }
2484 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2485 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2486 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2487 newStream->height;
2488 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2489 newStream->width;
2490 }
2491 }
2492 break;
2493 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002494 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002495 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2496 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2497 if (bUseCommonFeatureMask)
2498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2499 commonFeatureMask;
2500 else
2501 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2502 CAM_QCOM_FEATURE_NONE;
2503 } else {
2504 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2505 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2506 }
2507 break;
2508 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002509 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002510 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2511 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2512 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2513 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2514 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002515 /* Remove rotation if it is not supported
2516 for 4K LiveVideo snapshot case (online processing) */
2517 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2518 CAM_QCOM_FEATURE_ROTATION)) {
2519 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2520 &= ~CAM_QCOM_FEATURE_ROTATION;
2521 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002522 } else {
2523 if (bUseCommonFeatureMask &&
2524 isOnEncoder(maxViewfinderSize, newStream->width,
2525 newStream->height)) {
2526 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2527 } else {
2528 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2529 }
2530 }
2531 if (isZsl) {
2532 if (zslStream) {
2533 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2534 (int32_t)zslStream->width;
2535 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2536 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002537 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2538 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002539 } else {
2540 LOGE("Error, No ZSL stream identified");
2541 pthread_mutex_unlock(&mMutex);
2542 return -EINVAL;
2543 }
2544 } else if (m_bIs4KVideo) {
2545 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2546 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2547 } else if (bYuv888OverrideJpeg) {
2548 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2549 (int32_t)largeYuv888Size.width;
2550 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2551 (int32_t)largeYuv888Size.height;
2552 }
2553 break;
2554 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2555 case HAL_PIXEL_FORMAT_RAW16:
2556 case HAL_PIXEL_FORMAT_RAW10:
2557 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2558 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2559 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002560 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2561 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2562 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2563 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2564 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2565 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2566 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2567 gCamCapability[mCameraId]->dt[mPDIndex];
2568 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2569 gCamCapability[mCameraId]->vc[mPDIndex];
2570 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002571 break;
2572 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002573 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002574 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2575 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2576 break;
2577 }
2578 }
2579
2580 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2581 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2582 gCamCapability[mCameraId]->color_arrangement);
2583
2584 if (newStream->priv == NULL) {
2585 //New stream, construct channel
2586 switch (newStream->stream_type) {
2587 case CAMERA3_STREAM_INPUT:
2588 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2589 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2590 break;
2591 case CAMERA3_STREAM_BIDIRECTIONAL:
2592 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2593 GRALLOC_USAGE_HW_CAMERA_WRITE;
2594 break;
2595 case CAMERA3_STREAM_OUTPUT:
2596 /* For video encoding stream, set read/write rarely
2597 * flag so that they may be set to un-cached */
2598 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2599 newStream->usage |=
2600 (GRALLOC_USAGE_SW_READ_RARELY |
2601 GRALLOC_USAGE_SW_WRITE_RARELY |
2602 GRALLOC_USAGE_HW_CAMERA_WRITE);
2603 else if (IS_USAGE_ZSL(newStream->usage))
2604 {
2605 LOGD("ZSL usage flag skipping");
2606 }
2607 else if (newStream == zslStream
2608 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2609 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2610 } else
2611 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2612 break;
2613 default:
2614 LOGE("Invalid stream_type %d", newStream->stream_type);
2615 break;
2616 }
2617
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002618 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002619 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2620 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2621 QCamera3ProcessingChannel *channel = NULL;
2622 switch (newStream->format) {
2623 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2624 if ((newStream->usage &
2625 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2626 (streamList->operation_mode ==
2627 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2628 ) {
2629 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2630 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002631 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002632 this,
2633 newStream,
2634 (cam_stream_type_t)
2635 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2636 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2637 mMetadataChannel,
2638 0); //heap buffers are not required for HFR video channel
2639 if (channel == NULL) {
2640 LOGE("allocation of channel failed");
2641 pthread_mutex_unlock(&mMutex);
2642 return -ENOMEM;
2643 }
2644 //channel->getNumBuffers() will return 0 here so use
2645 //MAX_INFLIGH_HFR_REQUESTS
2646 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2647 newStream->priv = channel;
2648 LOGI("num video buffers in HFR mode: %d",
2649 MAX_INFLIGHT_HFR_REQUESTS);
2650 } else {
2651 /* Copy stream contents in HFR preview only case to create
2652 * dummy batch channel so that sensor streaming is in
2653 * HFR mode */
2654 if (!m_bIsVideo && (streamList->operation_mode ==
2655 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2656 mDummyBatchStream = *newStream;
2657 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002658 int bufferCount = MAX_INFLIGHT_REQUESTS;
2659 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2660 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002661 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2662 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2663 bufferCount = m_bIs4KVideo ?
2664 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2665 }
2666
Thierry Strudel2896d122017-02-23 19:18:03 -08002667 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002668 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2669 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002670 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002671 this,
2672 newStream,
2673 (cam_stream_type_t)
2674 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2675 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2676 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002677 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002678 if (channel == NULL) {
2679 LOGE("allocation of channel failed");
2680 pthread_mutex_unlock(&mMutex);
2681 return -ENOMEM;
2682 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002683 /* disable UBWC for preview, though supported,
2684 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002685 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002686 (previewSize.width == (int32_t)videoWidth)&&
2687 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002688 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002689 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002690 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002691 /* When goog_zoom is linked to the preview or video stream,
2692 * disable ubwc to the linked stream */
2693 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2694 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2695 channel->setUBWCEnabled(false);
2696 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002697 newStream->max_buffers = channel->getNumBuffers();
2698 newStream->priv = channel;
2699 }
2700 break;
2701 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2702 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2703 mChannelHandle,
2704 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002705 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 this,
2707 newStream,
2708 (cam_stream_type_t)
2709 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2710 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2711 mMetadataChannel);
2712 if (channel == NULL) {
2713 LOGE("allocation of YUV channel failed");
2714 pthread_mutex_unlock(&mMutex);
2715 return -ENOMEM;
2716 }
2717 newStream->max_buffers = channel->getNumBuffers();
2718 newStream->priv = channel;
2719 break;
2720 }
2721 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2722 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002723 case HAL_PIXEL_FORMAT_RAW10: {
2724 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2725 (HAL_DATASPACE_DEPTH != newStream->data_space))
2726 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002727 mRawChannel = new QCamera3RawChannel(
2728 mCameraHandle->camera_handle, mChannelHandle,
2729 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002730 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002731 this, newStream,
2732 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002733 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002734 if (mRawChannel == NULL) {
2735 LOGE("allocation of raw channel failed");
2736 pthread_mutex_unlock(&mMutex);
2737 return -ENOMEM;
2738 }
2739 newStream->max_buffers = mRawChannel->getNumBuffers();
2740 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2741 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002742 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002743 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002744 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2745 mDepthChannel = new QCamera3DepthChannel(
2746 mCameraHandle->camera_handle, mChannelHandle,
2747 mCameraHandle->ops, NULL, NULL, &padding_info,
2748 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2749 mMetadataChannel);
2750 if (NULL == mDepthChannel) {
2751 LOGE("Allocation of depth channel failed");
2752 pthread_mutex_unlock(&mMutex);
2753 return NO_MEMORY;
2754 }
2755 newStream->priv = mDepthChannel;
2756 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2757 } else {
2758 // Max live snapshot inflight buffer is 1. This is to mitigate
2759 // frame drop issues for video snapshot. The more buffers being
2760 // allocated, the more frame drops there are.
2761 mPictureChannel = new QCamera3PicChannel(
2762 mCameraHandle->camera_handle, mChannelHandle,
2763 mCameraHandle->ops, captureResultCb,
2764 setBufferErrorStatus, &padding_info, this, newStream,
2765 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2766 m_bIs4KVideo, isZsl, mMetadataChannel,
2767 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2768 if (mPictureChannel == NULL) {
2769 LOGE("allocation of channel failed");
2770 pthread_mutex_unlock(&mMutex);
2771 return -ENOMEM;
2772 }
2773 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2774 newStream->max_buffers = mPictureChannel->getNumBuffers();
2775 mPictureChannel->overrideYuvSize(
2776 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2777 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002778 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002779 break;
2780
2781 default:
2782 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002783 pthread_mutex_unlock(&mMutex);
2784 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002785 }
2786 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2787 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2788 } else {
2789 LOGE("Error, Unknown stream type");
2790 pthread_mutex_unlock(&mMutex);
2791 return -EINVAL;
2792 }
2793
2794 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002795 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002796 // Here we only care whether it's EIS3 or not
2797 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2798 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2799 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2800 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002801 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002802 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002803 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002804 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2805 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2806 }
2807 }
2808
2809 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2810 it != mStreamInfo.end(); it++) {
2811 if ((*it)->stream == newStream) {
2812 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2813 break;
2814 }
2815 }
2816 } else {
2817 // Channel already exists for this stream
2818 // Do nothing for now
2819 }
2820 padding_info = gCamCapability[mCameraId]->padding_info;
2821
Emilian Peev7650c122017-01-19 08:24:33 -08002822 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002823 * since there is no real stream associated with it
2824 */
Emilian Peev7650c122017-01-19 08:24:33 -08002825 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002826 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2827 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002828 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002829 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002830 }
2831
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002832 // Let buffer dispatcher know the configured streams.
2833 mOutputBufferDispatcher.configureStreams(streamList);
2834
Thierry Strudel2896d122017-02-23 19:18:03 -08002835 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2836 onlyRaw = false;
2837 }
2838
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002839 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002840 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002841 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002842 cam_analysis_info_t analysisInfo;
2843 int32_t ret = NO_ERROR;
2844 ret = mCommon.getAnalysisInfo(
2845 FALSE,
2846 analysisFeatureMask,
2847 &analysisInfo);
2848 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002849 cam_color_filter_arrangement_t analysis_color_arrangement =
2850 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2851 CAM_FILTER_ARRANGEMENT_Y :
2852 gCamCapability[mCameraId]->color_arrangement);
2853 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2854 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002855 cam_dimension_t analysisDim;
2856 analysisDim = mCommon.getMatchingDimension(previewSize,
2857 analysisInfo.analysis_recommended_res);
2858
2859 mAnalysisChannel = new QCamera3SupportChannel(
2860 mCameraHandle->camera_handle,
2861 mChannelHandle,
2862 mCameraHandle->ops,
2863 &analysisInfo.analysis_padding_info,
2864 analysisFeatureMask,
2865 CAM_STREAM_TYPE_ANALYSIS,
2866 &analysisDim,
2867 (analysisInfo.analysis_format
2868 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2869 : CAM_FORMAT_YUV_420_NV21),
2870 analysisInfo.hw_analysis_supported,
2871 gCamCapability[mCameraId]->color_arrangement,
2872 this,
2873 0); // force buffer count to 0
2874 } else {
2875 LOGW("getAnalysisInfo failed, ret = %d", ret);
2876 }
2877 if (!mAnalysisChannel) {
2878 LOGW("Analysis channel cannot be created");
2879 }
2880 }
2881
Thierry Strudel3d639192016-09-09 11:52:26 -07002882 //RAW DUMP channel
2883 if (mEnableRawDump && isRawStreamRequested == false){
2884 cam_dimension_t rawDumpSize;
2885 rawDumpSize = getMaxRawSize(mCameraId);
2886 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2887 setPAAFSupport(rawDumpFeatureMask,
2888 CAM_STREAM_TYPE_RAW,
2889 gCamCapability[mCameraId]->color_arrangement);
2890 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2891 mChannelHandle,
2892 mCameraHandle->ops,
2893 rawDumpSize,
2894 &padding_info,
2895 this, rawDumpFeatureMask);
2896 if (!mRawDumpChannel) {
2897 LOGE("Raw Dump channel cannot be created");
2898 pthread_mutex_unlock(&mMutex);
2899 return -ENOMEM;
2900 }
2901 }
2902
Thierry Strudel3d639192016-09-09 11:52:26 -07002903 if (mAnalysisChannel) {
2904 cam_analysis_info_t analysisInfo;
2905 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2906 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2907 CAM_STREAM_TYPE_ANALYSIS;
2908 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2909 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002910 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002911 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2912 &analysisInfo);
2913 if (rc != NO_ERROR) {
2914 LOGE("getAnalysisInfo failed, ret = %d", rc);
2915 pthread_mutex_unlock(&mMutex);
2916 return rc;
2917 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002918 cam_color_filter_arrangement_t analysis_color_arrangement =
2919 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2920 CAM_FILTER_ARRANGEMENT_Y :
2921 gCamCapability[mCameraId]->color_arrangement);
2922 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2923 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2924 analysis_color_arrangement);
2925
Thierry Strudel3d639192016-09-09 11:52:26 -07002926 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002927 mCommon.getMatchingDimension(previewSize,
2928 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002929 mStreamConfigInfo.num_streams++;
2930 }
2931
Thierry Strudel2896d122017-02-23 19:18:03 -08002932 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002933 cam_analysis_info_t supportInfo;
2934 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2935 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2936 setPAAFSupport(callbackFeatureMask,
2937 CAM_STREAM_TYPE_CALLBACK,
2938 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002939 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002940 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002941 if (ret != NO_ERROR) {
2942 /* Ignore the error for Mono camera
2943 * because the PAAF bit mask is only set
2944 * for CAM_STREAM_TYPE_ANALYSIS stream type
2945 */
2946 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2947 LOGW("getAnalysisInfo failed, ret = %d", ret);
2948 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002949 }
2950 mSupportChannel = new QCamera3SupportChannel(
2951 mCameraHandle->camera_handle,
2952 mChannelHandle,
2953 mCameraHandle->ops,
2954 &gCamCapability[mCameraId]->padding_info,
2955 callbackFeatureMask,
2956 CAM_STREAM_TYPE_CALLBACK,
2957 &QCamera3SupportChannel::kDim,
2958 CAM_FORMAT_YUV_420_NV21,
2959 supportInfo.hw_analysis_supported,
2960 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002961 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002962 if (!mSupportChannel) {
2963 LOGE("dummy channel cannot be created");
2964 pthread_mutex_unlock(&mMutex);
2965 return -ENOMEM;
2966 }
2967 }
2968
2969 if (mSupportChannel) {
2970 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2971 QCamera3SupportChannel::kDim;
2972 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2973 CAM_STREAM_TYPE_CALLBACK;
2974 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2975 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2976 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2977 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2978 gCamCapability[mCameraId]->color_arrangement);
2979 mStreamConfigInfo.num_streams++;
2980 }
2981
2982 if (mRawDumpChannel) {
2983 cam_dimension_t rawSize;
2984 rawSize = getMaxRawSize(mCameraId);
2985 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2986 rawSize;
2987 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2988 CAM_STREAM_TYPE_RAW;
2989 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2990 CAM_QCOM_FEATURE_NONE;
2991 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2992 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2993 gCamCapability[mCameraId]->color_arrangement);
2994 mStreamConfigInfo.num_streams++;
2995 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002996
2997 if (mHdrPlusRawSrcChannel) {
2998 cam_dimension_t rawSize;
2999 rawSize = getMaxRawSize(mCameraId);
3000 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
3001 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
3002 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
3003 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3004 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3005 gCamCapability[mCameraId]->color_arrangement);
3006 mStreamConfigInfo.num_streams++;
3007 }
3008
Thierry Strudel3d639192016-09-09 11:52:26 -07003009 /* In HFR mode, if video stream is not added, create a dummy channel so that
3010 * ISP can create a batch mode even for preview only case. This channel is
3011 * never 'start'ed (no stream-on), it is only 'initialized' */
3012 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
3013 !m_bIsVideo) {
3014 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3015 setPAAFSupport(dummyFeatureMask,
3016 CAM_STREAM_TYPE_VIDEO,
3017 gCamCapability[mCameraId]->color_arrangement);
3018 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
3019 mChannelHandle,
3020 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003021 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07003022 this,
3023 &mDummyBatchStream,
3024 CAM_STREAM_TYPE_VIDEO,
3025 dummyFeatureMask,
3026 mMetadataChannel);
3027 if (NULL == mDummyBatchChannel) {
3028 LOGE("creation of mDummyBatchChannel failed."
3029 "Preview will use non-hfr sensor mode ");
3030 }
3031 }
3032 if (mDummyBatchChannel) {
3033 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3034 mDummyBatchStream.width;
3035 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3036 mDummyBatchStream.height;
3037 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3038 CAM_STREAM_TYPE_VIDEO;
3039 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3040 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3041 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3042 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3043 gCamCapability[mCameraId]->color_arrangement);
3044 mStreamConfigInfo.num_streams++;
3045 }
3046
3047 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3048 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003049 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003050 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003051
3052 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3053 for (pendingRequestIterator i = mPendingRequestsList.begin();
3054 i != mPendingRequestsList.end();) {
3055 i = erasePendingRequest(i);
3056 }
3057 mPendingFrameDropList.clear();
3058 // Initialize/Reset the pending buffers list
3059 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3060 req.mPendingBufferList.clear();
3061 }
3062 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003063 mExpectedInflightDuration = 0;
3064 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003065
Thierry Strudel3d639192016-09-09 11:52:26 -07003066 mCurJpegMeta.clear();
3067 //Get min frame duration for this streams configuration
3068 deriveMinFrameDuration();
3069
Chien-Yu Chenee335912017-02-09 17:53:20 -08003070 mFirstPreviewIntentSeen = false;
3071
Thierry Strudel3d639192016-09-09 11:52:26 -07003072 // Update state
3073 mState = CONFIGURED;
3074
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003075 mFirstMetadataCallback = true;
3076
Thierry Strudel3d639192016-09-09 11:52:26 -07003077 pthread_mutex_unlock(&mMutex);
3078
3079 return rc;
3080}
3081
3082/*===========================================================================
3083 * FUNCTION : validateCaptureRequest
3084 *
3085 * DESCRIPTION: validate a capture request from camera service
3086 *
3087 * PARAMETERS :
3088 * @request : request from framework to process
3089 *
3090 * RETURN :
3091 *
3092 *==========================================================================*/
3093int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003094 camera3_capture_request_t *request,
3095 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003096{
3097 ssize_t idx = 0;
3098 const camera3_stream_buffer_t *b;
3099 CameraMetadata meta;
3100
3101 /* Sanity check the request */
3102 if (request == NULL) {
3103 LOGE("NULL capture request");
3104 return BAD_VALUE;
3105 }
3106
3107 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3108 /*settings cannot be null for the first request*/
3109 return BAD_VALUE;
3110 }
3111
3112 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003113 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3114 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003115 LOGE("Request %d: No output buffers provided!",
3116 __FUNCTION__, frameNumber);
3117 return BAD_VALUE;
3118 }
3119 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3120 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3121 request->num_output_buffers, MAX_NUM_STREAMS);
3122 return BAD_VALUE;
3123 }
3124 if (request->input_buffer != NULL) {
3125 b = request->input_buffer;
3126 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3127 LOGE("Request %d: Buffer %ld: Status not OK!",
3128 frameNumber, (long)idx);
3129 return BAD_VALUE;
3130 }
3131 if (b->release_fence != -1) {
3132 LOGE("Request %d: Buffer %ld: Has a release fence!",
3133 frameNumber, (long)idx);
3134 return BAD_VALUE;
3135 }
3136 if (b->buffer == NULL) {
3137 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3138 frameNumber, (long)idx);
3139 return BAD_VALUE;
3140 }
3141 }
3142
3143 // Validate all buffers
3144 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003145 if (b == NULL) {
3146 return BAD_VALUE;
3147 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003148 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003149 QCamera3ProcessingChannel *channel =
3150 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3151 if (channel == NULL) {
3152 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3153 frameNumber, (long)idx);
3154 return BAD_VALUE;
3155 }
3156 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3157 LOGE("Request %d: Buffer %ld: Status not OK!",
3158 frameNumber, (long)idx);
3159 return BAD_VALUE;
3160 }
3161 if (b->release_fence != -1) {
3162 LOGE("Request %d: Buffer %ld: Has a release fence!",
3163 frameNumber, (long)idx);
3164 return BAD_VALUE;
3165 }
3166 if (b->buffer == NULL) {
3167 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3168 frameNumber, (long)idx);
3169 return BAD_VALUE;
3170 }
3171 if (*(b->buffer) == NULL) {
3172 LOGE("Request %d: Buffer %ld: NULL private handle!",
3173 frameNumber, (long)idx);
3174 return BAD_VALUE;
3175 }
3176 idx++;
3177 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003178 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003179 return NO_ERROR;
3180}
3181
3182/*===========================================================================
3183 * FUNCTION : deriveMinFrameDuration
3184 *
3185 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3186 * on currently configured streams.
3187 *
3188 * PARAMETERS : NONE
3189 *
3190 * RETURN : NONE
3191 *
3192 *==========================================================================*/
3193void QCamera3HardwareInterface::deriveMinFrameDuration()
3194{
3195 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003196 bool hasRaw = false;
3197
3198 mMinRawFrameDuration = 0;
3199 mMinJpegFrameDuration = 0;
3200 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003201
3202 maxJpegDim = 0;
3203 maxProcessedDim = 0;
3204 maxRawDim = 0;
3205
3206 // Figure out maximum jpeg, processed, and raw dimensions
3207 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3208 it != mStreamInfo.end(); it++) {
3209
3210 // Input stream doesn't have valid stream_type
3211 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3212 continue;
3213
3214 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3215 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3216 if (dimension > maxJpegDim)
3217 maxJpegDim = dimension;
3218 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3219 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3220 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003221 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003222 if (dimension > maxRawDim)
3223 maxRawDim = dimension;
3224 } else {
3225 if (dimension > maxProcessedDim)
3226 maxProcessedDim = dimension;
3227 }
3228 }
3229
3230 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3231 MAX_SIZES_CNT);
3232
3233 //Assume all jpeg dimensions are in processed dimensions.
3234 if (maxJpegDim > maxProcessedDim)
3235 maxProcessedDim = maxJpegDim;
3236 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003237 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003238 maxRawDim = INT32_MAX;
3239
3240 for (size_t i = 0; i < count; i++) {
3241 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3242 gCamCapability[mCameraId]->raw_dim[i].height;
3243 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3244 maxRawDim = dimension;
3245 }
3246 }
3247
3248 //Find minimum durations for processed, jpeg, and raw
3249 for (size_t i = 0; i < count; i++) {
3250 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3251 gCamCapability[mCameraId]->raw_dim[i].height) {
3252 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3253 break;
3254 }
3255 }
3256 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3257 for (size_t i = 0; i < count; i++) {
3258 if (maxProcessedDim ==
3259 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3260 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3261 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3262 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3263 break;
3264 }
3265 }
3266}
3267
3268/*===========================================================================
3269 * FUNCTION : getMinFrameDuration
3270 *
3271 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3272 * and current request configuration.
3273 *
3274 * PARAMETERS : @request: requset sent by the frameworks
3275 *
3276 * RETURN : min farme duration for a particular request
3277 *
3278 *==========================================================================*/
3279int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3280{
3281 bool hasJpegStream = false;
3282 bool hasRawStream = false;
3283 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3284 const camera3_stream_t *stream = request->output_buffers[i].stream;
3285 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3286 hasJpegStream = true;
3287 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3288 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3289 stream->format == HAL_PIXEL_FORMAT_RAW16)
3290 hasRawStream = true;
3291 }
3292
3293 if (!hasJpegStream)
3294 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3295 else
3296 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3297}
3298
3299/*===========================================================================
3300 * FUNCTION : handleBuffersDuringFlushLock
3301 *
3302 * DESCRIPTION: Account for buffers returned from back-end during flush
3303 * This function is executed while mMutex is held by the caller.
3304 *
3305 * PARAMETERS :
3306 * @buffer: image buffer for the callback
3307 *
3308 * RETURN :
3309 *==========================================================================*/
3310void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3311{
3312 bool buffer_found = false;
3313 for (List<PendingBuffersInRequest>::iterator req =
3314 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3315 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3316 for (List<PendingBufferInfo>::iterator i =
3317 req->mPendingBufferList.begin();
3318 i != req->mPendingBufferList.end(); i++) {
3319 if (i->buffer == buffer->buffer) {
3320 mPendingBuffersMap.numPendingBufsAtFlush--;
3321 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3322 buffer->buffer, req->frame_number,
3323 mPendingBuffersMap.numPendingBufsAtFlush);
3324 buffer_found = true;
3325 break;
3326 }
3327 }
3328 if (buffer_found) {
3329 break;
3330 }
3331 }
3332 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3333 //signal the flush()
3334 LOGD("All buffers returned to HAL. Continue flush");
3335 pthread_cond_signal(&mBuffersCond);
3336 }
3337}
3338
Thierry Strudel3d639192016-09-09 11:52:26 -07003339/*===========================================================================
3340 * FUNCTION : handleBatchMetadata
3341 *
3342 * DESCRIPTION: Handles metadata buffer callback in batch mode
3343 *
3344 * PARAMETERS : @metadata_buf: metadata buffer
3345 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3346 * the meta buf in this method
3347 *
3348 * RETURN :
3349 *
3350 *==========================================================================*/
3351void QCamera3HardwareInterface::handleBatchMetadata(
3352 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3353{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003354 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003355
3356 if (NULL == metadata_buf) {
3357 LOGE("metadata_buf is NULL");
3358 return;
3359 }
3360 /* In batch mode, the metdata will contain the frame number and timestamp of
3361 * the last frame in the batch. Eg: a batch containing buffers from request
3362 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3363 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3364 * multiple process_capture_results */
3365 metadata_buffer_t *metadata =
3366 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3367 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3368 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3369 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3370 uint32_t frame_number = 0, urgent_frame_number = 0;
3371 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3372 bool invalid_metadata = false;
3373 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3374 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003375 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003376
3377 int32_t *p_frame_number_valid =
3378 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3379 uint32_t *p_frame_number =
3380 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3381 int64_t *p_capture_time =
3382 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3383 int32_t *p_urgent_frame_number_valid =
3384 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3385 uint32_t *p_urgent_frame_number =
3386 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3387
3388 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3389 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3390 (NULL == p_urgent_frame_number)) {
3391 LOGE("Invalid metadata");
3392 invalid_metadata = true;
3393 } else {
3394 frame_number_valid = *p_frame_number_valid;
3395 last_frame_number = *p_frame_number;
3396 last_frame_capture_time = *p_capture_time;
3397 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3398 last_urgent_frame_number = *p_urgent_frame_number;
3399 }
3400
3401 /* In batchmode, when no video buffers are requested, set_parms are sent
3402 * for every capture_request. The difference between consecutive urgent
3403 * frame numbers and frame numbers should be used to interpolate the
3404 * corresponding frame numbers and time stamps */
3405 pthread_mutex_lock(&mMutex);
3406 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003407 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3408 if(idx < 0) {
3409 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3410 last_urgent_frame_number);
3411 mState = ERROR;
3412 pthread_mutex_unlock(&mMutex);
3413 return;
3414 }
3415 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003416 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3417 first_urgent_frame_number;
3418
3419 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3420 urgent_frame_number_valid,
3421 first_urgent_frame_number, last_urgent_frame_number);
3422 }
3423
3424 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003425 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3426 if(idx < 0) {
3427 LOGE("Invalid frame number received: %d. Irrecoverable error",
3428 last_frame_number);
3429 mState = ERROR;
3430 pthread_mutex_unlock(&mMutex);
3431 return;
3432 }
3433 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003434 frameNumDiff = last_frame_number + 1 -
3435 first_frame_number;
3436 mPendingBatchMap.removeItem(last_frame_number);
3437
3438 LOGD("frm: valid: %d frm_num: %d - %d",
3439 frame_number_valid,
3440 first_frame_number, last_frame_number);
3441
3442 }
3443 pthread_mutex_unlock(&mMutex);
3444
3445 if (urgent_frame_number_valid || frame_number_valid) {
3446 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3447 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3448 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3449 urgentFrameNumDiff, last_urgent_frame_number);
3450 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3451 LOGE("frameNumDiff: %d frameNum: %d",
3452 frameNumDiff, last_frame_number);
3453 }
3454
3455 for (size_t i = 0; i < loopCount; i++) {
3456 /* handleMetadataWithLock is called even for invalid_metadata for
3457 * pipeline depth calculation */
3458 if (!invalid_metadata) {
3459 /* Infer frame number. Batch metadata contains frame number of the
3460 * last frame */
3461 if (urgent_frame_number_valid) {
3462 if (i < urgentFrameNumDiff) {
3463 urgent_frame_number =
3464 first_urgent_frame_number + i;
3465 LOGD("inferred urgent frame_number: %d",
3466 urgent_frame_number);
3467 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3468 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3469 } else {
3470 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3471 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3472 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3473 }
3474 }
3475
3476 /* Infer frame number. Batch metadata contains frame number of the
3477 * last frame */
3478 if (frame_number_valid) {
3479 if (i < frameNumDiff) {
3480 frame_number = first_frame_number + i;
3481 LOGD("inferred frame_number: %d", frame_number);
3482 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3483 CAM_INTF_META_FRAME_NUMBER, frame_number);
3484 } else {
3485 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3486 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3487 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3488 }
3489 }
3490
3491 if (last_frame_capture_time) {
3492 //Infer timestamp
3493 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003494 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003496 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003497 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3498 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3499 LOGD("batch capture_time: %lld, capture_time: %lld",
3500 last_frame_capture_time, capture_time);
3501 }
3502 }
3503 pthread_mutex_lock(&mMutex);
3504 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003505 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003506 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3507 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003508 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003509 pthread_mutex_unlock(&mMutex);
3510 }
3511
3512 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003513 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003514 mMetadataChannel->bufDone(metadata_buf);
3515 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003516 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003517 }
3518}
3519
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003520void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3521 camera3_error_msg_code_t errorCode)
3522{
3523 camera3_notify_msg_t notify_msg;
3524 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3525 notify_msg.type = CAMERA3_MSG_ERROR;
3526 notify_msg.message.error.error_code = errorCode;
3527 notify_msg.message.error.error_stream = NULL;
3528 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003529 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003530
3531 return;
3532}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003533
3534/*===========================================================================
3535 * FUNCTION : sendPartialMetadataWithLock
3536 *
3537 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3538 *
3539 * PARAMETERS : @metadata: metadata buffer
3540 * @requestIter: The iterator for the pending capture request for
3541 * which the partial result is being sen
3542 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3543 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003544 * @isJumpstartMetadata: Whether this is a partial metadata for
3545 * jumpstart, i.e. even though it doesn't map to a valid partial
3546 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003547 *
3548 * RETURN :
3549 *
3550 *==========================================================================*/
3551
3552void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3553 metadata_buffer_t *metadata,
3554 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003555 bool lastUrgentMetadataInBatch,
3556 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003557{
3558 camera3_capture_result_t result;
3559 memset(&result, 0, sizeof(camera3_capture_result_t));
3560
3561 requestIter->partial_result_cnt++;
3562
3563 // Extract 3A metadata
3564 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003565 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3566 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003567 // Populate metadata result
3568 result.frame_number = requestIter->frame_number;
3569 result.num_output_buffers = 0;
3570 result.output_buffers = NULL;
3571 result.partial_result = requestIter->partial_result_cnt;
3572
3573 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003574 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003575 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3576 // Notify HDR+ client about the partial metadata.
3577 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3578 result.partial_result == PARTIAL_RESULT_COUNT);
3579 }
3580 }
3581
3582 orchestrateResult(&result);
3583 LOGD("urgent frame_number = %u", result.frame_number);
3584 free_camera_metadata((camera_metadata_t *)result.result);
3585}
3586
Thierry Strudel3d639192016-09-09 11:52:26 -07003587/*===========================================================================
3588 * FUNCTION : handleMetadataWithLock
3589 *
3590 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3591 *
3592 * PARAMETERS : @metadata_buf: metadata buffer
3593 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3594 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003595 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3596 * last urgent metadata in a batch. Always true for non-batch mode
3597 * @lastMetadataInBatch: Boolean to indicate whether this is the
3598 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003599 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3600 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003601 *
3602 * RETURN :
3603 *
3604 *==========================================================================*/
3605void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003606 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003607 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3608 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003609{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003610 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003611 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3612 //during flush do not send metadata from this thread
3613 LOGD("not sending metadata during flush or when mState is error");
3614 if (free_and_bufdone_meta_buf) {
3615 mMetadataChannel->bufDone(metadata_buf);
3616 free(metadata_buf);
3617 }
3618 return;
3619 }
3620
3621 //not in flush
3622 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3623 int32_t frame_number_valid, urgent_frame_number_valid;
3624 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003625 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003626 nsecs_t currentSysTime;
3627
3628 int32_t *p_frame_number_valid =
3629 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3630 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3631 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003632 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003633 int32_t *p_urgent_frame_number_valid =
3634 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3635 uint32_t *p_urgent_frame_number =
3636 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3637 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3638 metadata) {
3639 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3640 *p_frame_number_valid, *p_frame_number);
3641 }
3642
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003643 camera_metadata_t *resultMetadata = nullptr;
3644
Thierry Strudel3d639192016-09-09 11:52:26 -07003645 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3646 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3647 LOGE("Invalid metadata");
3648 if (free_and_bufdone_meta_buf) {
3649 mMetadataChannel->bufDone(metadata_buf);
3650 free(metadata_buf);
3651 }
3652 goto done_metadata;
3653 }
3654 frame_number_valid = *p_frame_number_valid;
3655 frame_number = *p_frame_number;
3656 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003657 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003658 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3659 urgent_frame_number = *p_urgent_frame_number;
3660 currentSysTime = systemTime(CLOCK_MONOTONIC);
3661
Jason Lee603176d2017-05-31 11:43:27 -07003662 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3663 const int tries = 3;
3664 nsecs_t bestGap, measured;
3665 for (int i = 0; i < tries; ++i) {
3666 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3667 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3668 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3669 const nsecs_t gap = tmono2 - tmono;
3670 if (i == 0 || gap < bestGap) {
3671 bestGap = gap;
3672 measured = tbase - ((tmono + tmono2) >> 1);
3673 }
3674 }
3675 capture_time -= measured;
3676 }
3677
Thierry Strudel3d639192016-09-09 11:52:26 -07003678 // Detect if buffers from any requests are overdue
3679 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003680 int64_t timeout;
3681 {
3682 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3683 // If there is a pending HDR+ request, the following requests may be blocked until the
3684 // HDR+ request is done. So allow a longer timeout.
3685 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3686 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003687 if (timeout < mExpectedInflightDuration) {
3688 timeout = mExpectedInflightDuration;
3689 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003690 }
3691
3692 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003693 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003694 assert(missed.stream->priv);
3695 if (missed.stream->priv) {
3696 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3697 assert(ch->mStreams[0]);
3698 if (ch->mStreams[0]) {
3699 LOGE("Cancel missing frame = %d, buffer = %p,"
3700 "stream type = %d, stream format = %d",
3701 req.frame_number, missed.buffer,
3702 ch->mStreams[0]->getMyType(), missed.stream->format);
3703 ch->timeoutFrame(req.frame_number);
3704 }
3705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003706 }
3707 }
3708 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003709 //For the very first metadata callback, regardless whether it contains valid
3710 //frame number, send the partial metadata for the jumpstarting requests.
3711 //Note that this has to be done even if the metadata doesn't contain valid
3712 //urgent frame number, because in the case only 1 request is ever submitted
3713 //to HAL, there won't be subsequent valid urgent frame number.
3714 if (mFirstMetadataCallback) {
3715 for (pendingRequestIterator i =
3716 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3717 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003718 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3719 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003720 }
3721 }
3722 mFirstMetadataCallback = false;
3723 }
3724
Thierry Strudel3d639192016-09-09 11:52:26 -07003725 //Partial result on process_capture_result for timestamp
3726 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003727 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003728
3729 //Recieved an urgent Frame Number, handle it
3730 //using partial results
3731 for (pendingRequestIterator i =
3732 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3733 LOGD("Iterator Frame = %d urgent frame = %d",
3734 i->frame_number, urgent_frame_number);
3735
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003736 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003737 (i->partial_result_cnt == 0)) {
3738 LOGE("Error: HAL missed urgent metadata for frame number %d",
3739 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003740 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003741 }
3742
3743 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003744 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003745 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3746 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003747 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3748 // Instant AEC settled for this frame.
3749 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3750 mInstantAECSettledFrameNumber = urgent_frame_number;
3751 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003752 break;
3753 }
3754 }
3755 }
3756
3757 if (!frame_number_valid) {
3758 LOGD("Not a valid normal frame number, used as SOF only");
3759 if (free_and_bufdone_meta_buf) {
3760 mMetadataChannel->bufDone(metadata_buf);
3761 free(metadata_buf);
3762 }
3763 goto done_metadata;
3764 }
3765 LOGH("valid frame_number = %u, capture_time = %lld",
3766 frame_number, capture_time);
3767
Emilian Peev4e0fe952017-06-30 12:40:09 -07003768 handleDepthDataLocked(metadata->depth_data, frame_number,
3769 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003770
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003771 // Check whether any stream buffer corresponding to this is dropped or not
3772 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3773 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3774 for (auto & pendingRequest : mPendingRequestsList) {
3775 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3776 mInstantAECSettledFrameNumber)) {
3777 camera3_notify_msg_t notify_msg = {};
3778 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003779 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003780 QCamera3ProcessingChannel *channel =
3781 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003782 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003783 if (p_cam_frame_drop) {
3784 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003785 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003786 // Got the stream ID for drop frame.
3787 dropFrame = true;
3788 break;
3789 }
3790 }
3791 } else {
3792 // This is instant AEC case.
3793 // For instant AEC drop the stream untill AEC is settled.
3794 dropFrame = true;
3795 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003796
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003797 if (dropFrame) {
3798 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3799 if (p_cam_frame_drop) {
3800 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003801 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003802 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003803 } else {
3804 // For instant AEC, inform frame drop and frame number
3805 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3806 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003807 pendingRequest.frame_number, streamID,
3808 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003809 }
3810 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003811 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003812 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003813 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003814 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003815 if (p_cam_frame_drop) {
3816 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003817 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003818 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003819 } else {
3820 // For instant AEC, inform frame drop and frame number
3821 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3822 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003823 pendingRequest.frame_number, streamID,
3824 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003825 }
3826 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003827 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003828 PendingFrameDrop.stream_ID = streamID;
3829 // Add the Frame drop info to mPendingFrameDropList
3830 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003831 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003832 }
3833 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003834 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003835
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003836 for (auto & pendingRequest : mPendingRequestsList) {
3837 // Find the pending request with the frame number.
3838 if (pendingRequest.frame_number == frame_number) {
3839 // Update the sensor timestamp.
3840 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003841
Thierry Strudel3d639192016-09-09 11:52:26 -07003842
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003843 /* Set the timestamp in display metadata so that clients aware of
3844 private_handle such as VT can use this un-modified timestamps.
3845 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003846 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003847
Thierry Strudel3d639192016-09-09 11:52:26 -07003848 // Find channel requiring metadata, meaning internal offline postprocess
3849 // is needed.
3850 //TODO: for now, we don't support two streams requiring metadata at the same time.
3851 // (because we are not making copies, and metadata buffer is not reference counted.
3852 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003853 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3854 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003855 if (iter->need_metadata) {
3856 internalPproc = true;
3857 QCamera3ProcessingChannel *channel =
3858 (QCamera3ProcessingChannel *)iter->stream->priv;
3859 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003860 if(p_is_metabuf_queued != NULL) {
3861 *p_is_metabuf_queued = true;
3862 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003863 break;
3864 }
3865 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003866 for (auto itr = pendingRequest.internalRequestList.begin();
3867 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003868 if (itr->need_metadata) {
3869 internalPproc = true;
3870 QCamera3ProcessingChannel *channel =
3871 (QCamera3ProcessingChannel *)itr->stream->priv;
3872 channel->queueReprocMetadata(metadata_buf);
3873 break;
3874 }
3875 }
3876
Thierry Strudel54dc9782017-02-15 12:12:10 -08003877 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003878
3879 bool *enableZsl = nullptr;
3880 if (gExposeEnableZslKey) {
3881 enableZsl = &pendingRequest.enableZsl;
3882 }
3883
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003884 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003885 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003886 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003887
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003888 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003889
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003890 if (pendingRequest.blob_request) {
3891 //Dump tuning metadata if enabled and available
3892 char prop[PROPERTY_VALUE_MAX];
3893 memset(prop, 0, sizeof(prop));
3894 property_get("persist.camera.dumpmetadata", prop, "0");
3895 int32_t enabled = atoi(prop);
3896 if (enabled && metadata->is_tuning_params_valid) {
3897 dumpMetadataToFile(metadata->tuning_params,
3898 mMetaFrameCount,
3899 enabled,
3900 "Snapshot",
3901 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003902 }
3903 }
3904
3905 if (!internalPproc) {
3906 LOGD("couldn't find need_metadata for this metadata");
3907 // Return metadata buffer
3908 if (free_and_bufdone_meta_buf) {
3909 mMetadataChannel->bufDone(metadata_buf);
3910 free(metadata_buf);
3911 }
3912 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003913
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003914 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003915 }
3916 }
3917
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003918 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3919
3920 // Try to send out capture result metadata.
3921 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003922 return;
3923
Thierry Strudel3d639192016-09-09 11:52:26 -07003924done_metadata:
3925 for (pendingRequestIterator i = mPendingRequestsList.begin();
3926 i != mPendingRequestsList.end() ;i++) {
3927 i->pipeline_depth++;
3928 }
3929 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3930 unblockRequestIfNecessary();
3931}
3932
3933/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003934 * FUNCTION : handleDepthDataWithLock
3935 *
3936 * DESCRIPTION: Handles incoming depth data
3937 *
3938 * PARAMETERS : @depthData : Depth data
3939 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003940 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003941 *
3942 * RETURN :
3943 *
3944 *==========================================================================*/
3945void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003946 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003947 uint32_t currentFrameNumber;
3948 buffer_handle_t *depthBuffer;
3949
3950 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003951 return;
3952 }
3953
3954 camera3_stream_buffer_t resultBuffer =
3955 {.acquire_fence = -1,
3956 .release_fence = -1,
3957 .status = CAMERA3_BUFFER_STATUS_OK,
3958 .buffer = nullptr,
3959 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003960 do {
3961 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3962 if (nullptr == depthBuffer) {
3963 break;
3964 }
3965
Emilian Peev7650c122017-01-19 08:24:33 -08003966 resultBuffer.buffer = depthBuffer;
3967 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003968 if (valid) {
3969 int32_t rc = mDepthChannel->populateDepthData(depthData,
3970 frameNumber);
3971 if (NO_ERROR != rc) {
3972 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3973 } else {
3974 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3975 }
Emilian Peev7650c122017-01-19 08:24:33 -08003976 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003977 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003978 }
3979 } else if (currentFrameNumber > frameNumber) {
3980 break;
3981 } else {
3982 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3983 {{currentFrameNumber, mDepthChannel->getStream(),
3984 CAMERA3_MSG_ERROR_BUFFER}}};
3985 orchestrateNotify(&notify_msg);
3986
3987 LOGE("Depth buffer for frame number: %d is missing "
3988 "returning back!", currentFrameNumber);
3989 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3990 }
3991 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003992 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003993 } while (currentFrameNumber < frameNumber);
3994}
3995
3996/*===========================================================================
3997 * FUNCTION : notifyErrorFoPendingDepthData
3998 *
3999 * DESCRIPTION: Returns error for any pending depth buffers
4000 *
4001 * PARAMETERS : depthCh - depth channel that needs to get flushed
4002 *
4003 * RETURN :
4004 *
4005 *==========================================================================*/
4006void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
4007 QCamera3DepthChannel *depthCh) {
4008 uint32_t currentFrameNumber;
4009 buffer_handle_t *depthBuffer;
4010
4011 if (nullptr == depthCh) {
4012 return;
4013 }
4014
4015 camera3_notify_msg_t notify_msg =
4016 {.type = CAMERA3_MSG_ERROR,
4017 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4018 camera3_stream_buffer_t resultBuffer =
4019 {.acquire_fence = -1,
4020 .release_fence = -1,
4021 .buffer = nullptr,
4022 .stream = depthCh->getStream(),
4023 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004024
4025 while (nullptr !=
4026 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4027 depthCh->unmapBuffer(currentFrameNumber);
4028
4029 notify_msg.message.error.frame_number = currentFrameNumber;
4030 orchestrateNotify(&notify_msg);
4031
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004032 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004033 };
4034}
4035
4036/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004037 * FUNCTION : hdrPlusPerfLock
4038 *
4039 * DESCRIPTION: perf lock for HDR+ using custom intent
4040 *
4041 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4042 *
4043 * RETURN : None
4044 *
4045 *==========================================================================*/
4046void QCamera3HardwareInterface::hdrPlusPerfLock(
4047 mm_camera_super_buf_t *metadata_buf)
4048{
4049 if (NULL == metadata_buf) {
4050 LOGE("metadata_buf is NULL");
4051 return;
4052 }
4053 metadata_buffer_t *metadata =
4054 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4055 int32_t *p_frame_number_valid =
4056 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4057 uint32_t *p_frame_number =
4058 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4059
4060 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4061 LOGE("%s: Invalid metadata", __func__);
4062 return;
4063 }
4064
Wei Wang01385482017-08-03 10:49:34 -07004065 //acquire perf lock for 2 secs after the last HDR frame is captured
4066 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004067 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4068 if ((p_frame_number != NULL) &&
4069 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004070 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004071 }
4072 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004073}
4074
4075/*===========================================================================
4076 * FUNCTION : handleInputBufferWithLock
4077 *
4078 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4079 *
4080 * PARAMETERS : @frame_number: frame number of the input buffer
4081 *
4082 * RETURN :
4083 *
4084 *==========================================================================*/
4085void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4086{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004087 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004088 pendingRequestIterator i = mPendingRequestsList.begin();
4089 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4090 i++;
4091 }
4092 if (i != mPendingRequestsList.end() && i->input_buffer) {
4093 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004094 CameraMetadata settings;
4095 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4096 if(i->settings) {
4097 settings = i->settings;
4098 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4099 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004100 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004101 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004102 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004103 } else {
4104 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004105 }
4106
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004107 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4108 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4109 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004110
4111 camera3_capture_result result;
4112 memset(&result, 0, sizeof(camera3_capture_result));
4113 result.frame_number = frame_number;
4114 result.result = i->settings;
4115 result.input_buffer = i->input_buffer;
4116 result.partial_result = PARTIAL_RESULT_COUNT;
4117
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004118 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004119 LOGD("Input request metadata and input buffer frame_number = %u",
4120 i->frame_number);
4121 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004122
4123 // Dispatch result metadata that may be just unblocked by this reprocess result.
4124 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004125 } else {
4126 LOGE("Could not find input request for frame number %d", frame_number);
4127 }
4128}
4129
4130/*===========================================================================
4131 * FUNCTION : handleBufferWithLock
4132 *
4133 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4134 *
4135 * PARAMETERS : @buffer: image buffer for the callback
4136 * @frame_number: frame number of the image buffer
4137 *
4138 * RETURN :
4139 *
4140 *==========================================================================*/
4141void QCamera3HardwareInterface::handleBufferWithLock(
4142 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4143{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004144 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004145
4146 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4147 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4148 }
4149
Thierry Strudel3d639192016-09-09 11:52:26 -07004150 /* Nothing to be done during error state */
4151 if ((ERROR == mState) || (DEINIT == mState)) {
4152 return;
4153 }
4154 if (mFlushPerf) {
4155 handleBuffersDuringFlushLock(buffer);
4156 return;
4157 }
4158 //not in flush
4159 // If the frame number doesn't exist in the pending request list,
4160 // directly send the buffer to the frameworks, and update pending buffers map
4161 // Otherwise, book-keep the buffer.
4162 pendingRequestIterator i = mPendingRequestsList.begin();
4163 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4164 i++;
4165 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004166
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004167 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004168 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004169 // For a reprocessing request, try to send out result metadata.
4170 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004171 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004172 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004173
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004174 // Check if this frame was dropped.
4175 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4176 m != mPendingFrameDropList.end(); m++) {
4177 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4178 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4179 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4180 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4181 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4182 frame_number, streamID);
4183 m = mPendingFrameDropList.erase(m);
4184 break;
4185 }
4186 }
4187
Binhao Lin09245482017-08-31 18:25:29 -07004188 // WAR for encoder avtimer timestamp issue
4189 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4190 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
4191 m_bAVTimerEnabled) {
4192 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
4193 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
4194 if (req->frame_number != frame_number)
4195 continue;
4196 if(req->av_timestamp == 0) {
4197 buffer->status |= CAMERA3_BUFFER_STATUS_ERROR;
4198 }
4199 else {
4200 struct private_handle_t *priv_handle =
4201 (struct private_handle_t *) (*(buffer->buffer));
4202 setMetaData(priv_handle, SET_VT_TIMESTAMP, &(req->av_timestamp));
4203 }
4204 }
4205 }
4206
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004207 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4208 LOGH("result frame_number = %d, buffer = %p",
4209 frame_number, buffer->buffer);
4210
4211 mPendingBuffersMap.removeBuf(buffer->buffer);
4212 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4213
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004214 if (mPreviewStarted == false) {
4215 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4216 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004217 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4218
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004219 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4220 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4221 mPreviewStarted = true;
4222
4223 // Set power hint for preview
4224 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4225 }
4226 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004227}
4228
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004229void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004230 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004231{
4232 // Find the pending request for this result metadata.
4233 auto requestIter = mPendingRequestsList.begin();
4234 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4235 requestIter++;
4236 }
4237
4238 if (requestIter == mPendingRequestsList.end()) {
4239 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4240 return;
4241 }
4242
4243 // Update the result metadata
4244 requestIter->resultMetadata = resultMetadata;
4245
4246 // Check what type of request this is.
4247 bool liveRequest = false;
4248 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004249 // HDR+ request doesn't have partial results.
4250 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004251 } else if (requestIter->input_buffer != nullptr) {
4252 // Reprocessing request result is the same as settings.
4253 requestIter->resultMetadata = requestIter->settings;
4254 // Reprocessing request doesn't have partial results.
4255 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4256 } else {
4257 liveRequest = true;
Chien-Yu Chen0a921f92017-08-27 17:25:33 -07004258 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004259 mPendingLiveRequest--;
4260
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004261 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004262 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004263 // For a live request, send the metadata to HDR+ client.
4264 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4265 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4266 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4267 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004268 }
4269 }
4270
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004271 // Remove len shading map if it's not requested.
4272 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4273 CameraMetadata metadata;
4274 metadata.acquire(resultMetadata);
4275 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4276 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4277 &requestIter->requestedLensShadingMapMode, 1);
4278
4279 requestIter->resultMetadata = metadata.release();
4280 }
4281
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004282 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4283}
4284
4285void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4286 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004287 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4288 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004289 bool readyToSend = true;
4290
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004291 // Iterate through the pending requests to send out result metadata that are ready. Also if
4292 // this result metadata belongs to a live request, notify errors for previous live requests
4293 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004294 auto iter = mPendingRequestsList.begin();
4295 while (iter != mPendingRequestsList.end()) {
4296 // Check if current pending request is ready. If it's not ready, the following pending
4297 // requests are also not ready.
4298 if (readyToSend && iter->resultMetadata == nullptr) {
4299 readyToSend = false;
4300 }
4301
4302 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4303
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004304 camera3_capture_result_t result = {};
4305 result.frame_number = iter->frame_number;
4306 result.result = iter->resultMetadata;
4307 result.partial_result = iter->partial_result_cnt;
4308
4309 // If this pending buffer has result metadata, we may be able to send out shutter callback
4310 // and result metadata.
4311 if (iter->resultMetadata != nullptr) {
4312 if (!readyToSend) {
4313 // If any of the previous pending request is not ready, this pending request is
4314 // also not ready to send in order to keep shutter callbacks and result metadata
4315 // in order.
4316 iter++;
4317 continue;
4318 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004319 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004320 // If the result metadata belongs to a live request, notify errors for previous pending
4321 // live requests.
4322 mPendingLiveRequest--;
4323
4324 CameraMetadata dummyMetadata;
4325 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4326 result.result = dummyMetadata.release();
4327
4328 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004329
4330 // partial_result should be PARTIAL_RESULT_CNT in case of
4331 // ERROR_RESULT.
4332 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4333 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004334 } else {
4335 iter++;
4336 continue;
4337 }
4338
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004339 result.output_buffers = nullptr;
4340 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004341 orchestrateResult(&result);
4342
4343 // For reprocessing, result metadata is the same as settings so do not free it here to
4344 // avoid double free.
4345 if (result.result != iter->settings) {
4346 free_camera_metadata((camera_metadata_t *)result.result);
4347 }
4348 iter->resultMetadata = nullptr;
4349 iter = erasePendingRequest(iter);
4350 }
4351
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004352 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004353 for (auto &iter : mPendingRequestsList) {
4354 // Increment pipeline depth for the following pending requests.
4355 if (iter.frame_number > frameNumber) {
4356 iter.pipeline_depth++;
4357 }
4358 }
4359 }
4360
4361 unblockRequestIfNecessary();
4362}
4363
Thierry Strudel3d639192016-09-09 11:52:26 -07004364/*===========================================================================
4365 * FUNCTION : unblockRequestIfNecessary
4366 *
4367 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4368 * that mMutex is held when this function is called.
4369 *
4370 * PARAMETERS :
4371 *
4372 * RETURN :
4373 *
4374 *==========================================================================*/
4375void QCamera3HardwareInterface::unblockRequestIfNecessary()
4376{
4377 // Unblock process_capture_request
4378 pthread_cond_signal(&mRequestCond);
4379}
4380
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004381/*===========================================================================
4382 * FUNCTION : isHdrSnapshotRequest
4383 *
4384 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4385 *
4386 * PARAMETERS : camera3 request structure
4387 *
4388 * RETURN : boolean decision variable
4389 *
4390 *==========================================================================*/
4391bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4392{
4393 if (request == NULL) {
4394 LOGE("Invalid request handle");
4395 assert(0);
4396 return false;
4397 }
4398
4399 if (!mForceHdrSnapshot) {
4400 CameraMetadata frame_settings;
4401 frame_settings = request->settings;
4402
4403 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4404 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4405 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4406 return false;
4407 }
4408 } else {
4409 return false;
4410 }
4411
4412 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4413 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4414 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4415 return false;
4416 }
4417 } else {
4418 return false;
4419 }
4420 }
4421
4422 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4423 if (request->output_buffers[i].stream->format
4424 == HAL_PIXEL_FORMAT_BLOB) {
4425 return true;
4426 }
4427 }
4428
4429 return false;
4430}
4431/*===========================================================================
4432 * FUNCTION : orchestrateRequest
4433 *
4434 * DESCRIPTION: Orchestrates a capture request from camera service
4435 *
4436 * PARAMETERS :
4437 * @request : request from framework to process
4438 *
4439 * RETURN : Error status codes
4440 *
4441 *==========================================================================*/
4442int32_t QCamera3HardwareInterface::orchestrateRequest(
4443 camera3_capture_request_t *request)
4444{
4445
4446 uint32_t originalFrameNumber = request->frame_number;
4447 uint32_t originalOutputCount = request->num_output_buffers;
4448 const camera_metadata_t *original_settings = request->settings;
4449 List<InternalRequest> internallyRequestedStreams;
4450 List<InternalRequest> emptyInternalList;
4451
4452 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4453 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4454 uint32_t internalFrameNumber;
4455 CameraMetadata modified_meta;
4456
4457
4458 /* Add Blob channel to list of internally requested streams */
4459 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4460 if (request->output_buffers[i].stream->format
4461 == HAL_PIXEL_FORMAT_BLOB) {
4462 InternalRequest streamRequested;
4463 streamRequested.meteringOnly = 1;
4464 streamRequested.need_metadata = 0;
4465 streamRequested.stream = request->output_buffers[i].stream;
4466 internallyRequestedStreams.push_back(streamRequested);
4467 }
4468 }
4469 request->num_output_buffers = 0;
4470 auto itr = internallyRequestedStreams.begin();
4471
4472 /* Modify setting to set compensation */
4473 modified_meta = request->settings;
4474 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4475 uint8_t aeLock = 1;
4476 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4477 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4478 camera_metadata_t *modified_settings = modified_meta.release();
4479 request->settings = modified_settings;
4480
4481 /* Capture Settling & -2x frame */
4482 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4483 request->frame_number = internalFrameNumber;
4484 processCaptureRequest(request, internallyRequestedStreams);
4485
4486 request->num_output_buffers = originalOutputCount;
4487 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4488 request->frame_number = internalFrameNumber;
4489 processCaptureRequest(request, emptyInternalList);
4490 request->num_output_buffers = 0;
4491
4492 modified_meta = modified_settings;
4493 expCompensation = 0;
4494 aeLock = 1;
4495 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4496 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4497 modified_settings = modified_meta.release();
4498 request->settings = modified_settings;
4499
4500 /* Capture Settling & 0X frame */
4501
4502 itr = internallyRequestedStreams.begin();
4503 if (itr == internallyRequestedStreams.end()) {
4504 LOGE("Error Internally Requested Stream list is empty");
4505 assert(0);
4506 } else {
4507 itr->need_metadata = 0;
4508 itr->meteringOnly = 1;
4509 }
4510
4511 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4512 request->frame_number = internalFrameNumber;
4513 processCaptureRequest(request, internallyRequestedStreams);
4514
4515 itr = internallyRequestedStreams.begin();
4516 if (itr == internallyRequestedStreams.end()) {
4517 ALOGE("Error Internally Requested Stream list is empty");
4518 assert(0);
4519 } else {
4520 itr->need_metadata = 1;
4521 itr->meteringOnly = 0;
4522 }
4523
4524 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4525 request->frame_number = internalFrameNumber;
4526 processCaptureRequest(request, internallyRequestedStreams);
4527
4528 /* Capture 2X frame*/
4529 modified_meta = modified_settings;
4530 expCompensation = GB_HDR_2X_STEP_EV;
4531 aeLock = 1;
4532 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4533 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4534 modified_settings = modified_meta.release();
4535 request->settings = modified_settings;
4536
4537 itr = internallyRequestedStreams.begin();
4538 if (itr == internallyRequestedStreams.end()) {
4539 ALOGE("Error Internally Requested Stream list is empty");
4540 assert(0);
4541 } else {
4542 itr->need_metadata = 0;
4543 itr->meteringOnly = 1;
4544 }
4545 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4546 request->frame_number = internalFrameNumber;
4547 processCaptureRequest(request, internallyRequestedStreams);
4548
4549 itr = internallyRequestedStreams.begin();
4550 if (itr == internallyRequestedStreams.end()) {
4551 ALOGE("Error Internally Requested Stream list is empty");
4552 assert(0);
4553 } else {
4554 itr->need_metadata = 1;
4555 itr->meteringOnly = 0;
4556 }
4557
4558 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4559 request->frame_number = internalFrameNumber;
4560 processCaptureRequest(request, internallyRequestedStreams);
4561
4562
4563 /* Capture 2X on original streaming config*/
4564 internallyRequestedStreams.clear();
4565
4566 /* Restore original settings pointer */
4567 request->settings = original_settings;
4568 } else {
4569 uint32_t internalFrameNumber;
4570 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4571 request->frame_number = internalFrameNumber;
4572 return processCaptureRequest(request, internallyRequestedStreams);
4573 }
4574
4575 return NO_ERROR;
4576}
4577
4578/*===========================================================================
4579 * FUNCTION : orchestrateResult
4580 *
4581 * DESCRIPTION: Orchestrates a capture result to camera service
4582 *
4583 * PARAMETERS :
4584 * @request : request from framework to process
4585 *
4586 * RETURN :
4587 *
4588 *==========================================================================*/
4589void QCamera3HardwareInterface::orchestrateResult(
4590 camera3_capture_result_t *result)
4591{
4592 uint32_t frameworkFrameNumber;
4593 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4594 frameworkFrameNumber);
4595 if (rc != NO_ERROR) {
4596 LOGE("Cannot find translated frameworkFrameNumber");
4597 assert(0);
4598 } else {
4599 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004600 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004601 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004602 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004603 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4604 camera_metadata_entry_t entry;
4605 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4606 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004607 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004608 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4609 if (ret != OK)
4610 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004611 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004612 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004613 result->frame_number = frameworkFrameNumber;
4614 mCallbackOps->process_capture_result(mCallbackOps, result);
4615 }
4616 }
4617}
4618
4619/*===========================================================================
4620 * FUNCTION : orchestrateNotify
4621 *
4622 * DESCRIPTION: Orchestrates a notify to camera service
4623 *
4624 * PARAMETERS :
4625 * @request : request from framework to process
4626 *
4627 * RETURN :
4628 *
4629 *==========================================================================*/
4630void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4631{
4632 uint32_t frameworkFrameNumber;
4633 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004634 int32_t rc = NO_ERROR;
4635
4636 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004637 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004638
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004639 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004640 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4641 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4642 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004643 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004644 LOGE("Cannot find translated frameworkFrameNumber");
4645 assert(0);
4646 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004647 }
4648 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004649
4650 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4651 LOGD("Internal Request drop the notifyCb");
4652 } else {
4653 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4654 mCallbackOps->notify(mCallbackOps, notify_msg);
4655 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004656}
4657
4658/*===========================================================================
4659 * FUNCTION : FrameNumberRegistry
4660 *
4661 * DESCRIPTION: Constructor
4662 *
4663 * PARAMETERS :
4664 *
4665 * RETURN :
4666 *
4667 *==========================================================================*/
4668FrameNumberRegistry::FrameNumberRegistry()
4669{
4670 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4671}
4672
4673/*===========================================================================
4674 * FUNCTION : ~FrameNumberRegistry
4675 *
4676 * DESCRIPTION: Destructor
4677 *
4678 * PARAMETERS :
4679 *
4680 * RETURN :
4681 *
4682 *==========================================================================*/
4683FrameNumberRegistry::~FrameNumberRegistry()
4684{
4685}
4686
4687/*===========================================================================
4688 * FUNCTION : PurgeOldEntriesLocked
4689 *
4690 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4691 *
4692 * PARAMETERS :
4693 *
4694 * RETURN : NONE
4695 *
4696 *==========================================================================*/
4697void FrameNumberRegistry::purgeOldEntriesLocked()
4698{
4699 while (_register.begin() != _register.end()) {
4700 auto itr = _register.begin();
4701 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4702 _register.erase(itr);
4703 } else {
4704 return;
4705 }
4706 }
4707}
4708
4709/*===========================================================================
4710 * FUNCTION : allocStoreInternalFrameNumber
4711 *
4712 * DESCRIPTION: Method to note down a framework request and associate a new
4713 * internal request number against it
4714 *
4715 * PARAMETERS :
4716 * @fFrameNumber: Identifier given by framework
4717 * @internalFN : Output parameter which will have the newly generated internal
4718 * entry
4719 *
4720 * RETURN : Error code
4721 *
4722 *==========================================================================*/
4723int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4724 uint32_t &internalFrameNumber)
4725{
4726 Mutex::Autolock lock(mRegistryLock);
4727 internalFrameNumber = _nextFreeInternalNumber++;
4728 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4729 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4730 purgeOldEntriesLocked();
4731 return NO_ERROR;
4732}
4733
4734/*===========================================================================
4735 * FUNCTION : generateStoreInternalFrameNumber
4736 *
4737 * DESCRIPTION: Method to associate a new internal request number independent
4738 * of any associate with framework requests
4739 *
4740 * PARAMETERS :
4741 * @internalFrame#: Output parameter which will have the newly generated internal
4742 *
4743 *
4744 * RETURN : Error code
4745 *
4746 *==========================================================================*/
4747int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4748{
4749 Mutex::Autolock lock(mRegistryLock);
4750 internalFrameNumber = _nextFreeInternalNumber++;
4751 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4752 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4753 purgeOldEntriesLocked();
4754 return NO_ERROR;
4755}
4756
4757/*===========================================================================
4758 * FUNCTION : getFrameworkFrameNumber
4759 *
4760 * DESCRIPTION: Method to query the framework framenumber given an internal #
4761 *
4762 * PARAMETERS :
4763 * @internalFrame#: Internal reference
4764 * @frameworkframenumber: Output parameter holding framework frame entry
4765 *
4766 * RETURN : Error code
4767 *
4768 *==========================================================================*/
4769int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4770 uint32_t &frameworkFrameNumber)
4771{
4772 Mutex::Autolock lock(mRegistryLock);
4773 auto itr = _register.find(internalFrameNumber);
4774 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004775 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004776 return -ENOENT;
4777 }
4778
4779 frameworkFrameNumber = itr->second;
4780 purgeOldEntriesLocked();
4781 return NO_ERROR;
4782}
Thierry Strudel3d639192016-09-09 11:52:26 -07004783
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004784status_t QCamera3HardwareInterface::fillPbStreamConfig(
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004785 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
4786 uint32_t streamIndex) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004787 if (config == nullptr) {
4788 LOGE("%s: config is null", __FUNCTION__);
4789 return BAD_VALUE;
4790 }
4791
4792 if (channel == nullptr) {
4793 LOGE("%s: channel is null", __FUNCTION__);
4794 return BAD_VALUE;
4795 }
4796
4797 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4798 if (stream == nullptr) {
4799 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4800 return NAME_NOT_FOUND;
4801 }
4802
4803 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4804 if (streamInfo == nullptr) {
4805 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4806 return NAME_NOT_FOUND;
4807 }
4808
4809 config->id = pbStreamId;
4810 config->image.width = streamInfo->dim.width;
4811 config->image.height = streamInfo->dim.height;
4812 config->image.padding = 0;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004813
4814 int bytesPerPixel = 0;
4815
4816 switch (streamInfo->fmt) {
4817 case CAM_FORMAT_YUV_420_NV21:
4818 config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
4819 bytesPerPixel = 1;
4820 break;
4821 case CAM_FORMAT_YUV_420_NV12:
4822 case CAM_FORMAT_YUV_420_NV12_VENUS:
4823 config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
4824 bytesPerPixel = 1;
4825 break;
4826 default:
4827 ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
4828 return BAD_VALUE;
4829 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004830
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004831 uint32_t totalPlaneSize = 0;
4832
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004833 // Fill plane information.
4834 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4835 pbcamera::PlaneConfiguration plane;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004836 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004837 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4838 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004839
4840 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004841 }
4842
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004843 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004844 return OK;
4845}
4846
Thierry Strudel3d639192016-09-09 11:52:26 -07004847/*===========================================================================
4848 * FUNCTION : processCaptureRequest
4849 *
4850 * DESCRIPTION: process a capture request from camera service
4851 *
4852 * PARAMETERS :
4853 * @request : request from framework to process
4854 *
4855 * RETURN :
4856 *
4857 *==========================================================================*/
4858int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004859 camera3_capture_request_t *request,
4860 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004861{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004862 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004863 int rc = NO_ERROR;
4864 int32_t request_id;
4865 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004866 bool isVidBufRequested = false;
4867 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004868 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004869
4870 pthread_mutex_lock(&mMutex);
4871
4872 // Validate current state
4873 switch (mState) {
4874 case CONFIGURED:
4875 case STARTED:
4876 /* valid state */
4877 break;
4878
4879 case ERROR:
4880 pthread_mutex_unlock(&mMutex);
4881 handleCameraDeviceError();
4882 return -ENODEV;
4883
4884 default:
4885 LOGE("Invalid state %d", mState);
4886 pthread_mutex_unlock(&mMutex);
4887 return -ENODEV;
4888 }
4889
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004890 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004891 if (rc != NO_ERROR) {
4892 LOGE("incoming request is not valid");
4893 pthread_mutex_unlock(&mMutex);
4894 return rc;
4895 }
4896
4897 meta = request->settings;
4898
4899 // For first capture request, send capture intent, and
4900 // stream on all streams
4901 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004902 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004903 // send an unconfigure to the backend so that the isp
4904 // resources are deallocated
4905 if (!mFirstConfiguration) {
4906 cam_stream_size_info_t stream_config_info;
4907 int32_t hal_version = CAM_HAL_V3;
4908 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4909 stream_config_info.buffer_info.min_buffers =
4910 MIN_INFLIGHT_REQUESTS;
4911 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004912 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004913 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004914 clear_metadata_buffer(mParameters);
4915 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4916 CAM_INTF_PARM_HAL_VERSION, hal_version);
4917 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4918 CAM_INTF_META_STREAM_INFO, stream_config_info);
4919 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4920 mParameters);
4921 if (rc < 0) {
4922 LOGE("set_parms for unconfigure failed");
4923 pthread_mutex_unlock(&mMutex);
4924 return rc;
4925 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004926
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004928 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004929 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004930 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004931 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004932 property_get("persist.camera.is_type", is_type_value, "4");
4933 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4934 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4935 property_get("persist.camera.is_type_preview", is_type_value, "4");
4936 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4937 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004938
4939 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4940 int32_t hal_version = CAM_HAL_V3;
4941 uint8_t captureIntent =
4942 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4943 mCaptureIntent = captureIntent;
4944 clear_metadata_buffer(mParameters);
4945 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4946 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4947 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004948 if (mFirstConfiguration) {
4949 // configure instant AEC
4950 // Instant AEC is a session based parameter and it is needed only
4951 // once per complete session after open camera.
4952 // i.e. This is set only once for the first capture request, after open camera.
4953 setInstantAEC(meta);
4954 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004955 uint8_t fwkVideoStabMode=0;
4956 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4957 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4958 }
4959
Xue Tuecac74e2017-04-17 13:58:15 -07004960 // If EIS setprop is enabled then only turn it on for video/preview
4961 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004962 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004963 int32_t vsMode;
4964 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4965 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4966 rc = BAD_VALUE;
4967 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004968 LOGD("setEis %d", setEis);
4969 bool eis3Supported = false;
4970 size_t count = IS_TYPE_MAX;
4971 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4972 for (size_t i = 0; i < count; i++) {
4973 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4974 eis3Supported = true;
4975 break;
4976 }
4977 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004978
4979 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004980 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004981 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4982 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004983 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4984 is_type = isTypePreview;
4985 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4986 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4987 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004988 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004989 } else {
4990 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004991 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004992 } else {
4993 is_type = IS_TYPE_NONE;
4994 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004995 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004996 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004997 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4998 }
4999 }
5000
5001 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5002 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
5003
Thierry Strudel54dc9782017-02-15 12:12:10 -08005004 //Disable tintless only if the property is set to 0
5005 memset(prop, 0, sizeof(prop));
5006 property_get("persist.camera.tintless.enable", prop, "1");
5007 int32_t tintless_value = atoi(prop);
5008
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5010 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08005011
Thierry Strudel3d639192016-09-09 11:52:26 -07005012 //Disable CDS for HFR mode or if DIS/EIS is on.
5013 //CDS is a session parameter in the backend/ISP, so need to be set/reset
5014 //after every configure_stream
5015 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
5016 (m_bIsVideo)) {
5017 int32_t cds = CAM_CDS_MODE_OFF;
5018 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5019 CAM_INTF_PARM_CDS_MODE, cds))
5020 LOGE("Failed to disable CDS for HFR mode");
5021
5022 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005023
5024 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
5025 uint8_t* use_av_timer = NULL;
5026
5027 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005028 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005029 use_av_timer = &m_debug_avtimer;
Binhao Lin09245482017-08-31 18:25:29 -07005030 m_bAVTimerEnabled = true;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005031 }
5032 else{
5033 use_av_timer =
5034 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005035 if (use_av_timer) {
Binhao Lin09245482017-08-31 18:25:29 -07005036 m_bAVTimerEnabled = true;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005037 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
5038 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005039 }
5040
5041 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
5042 rc = BAD_VALUE;
5043 }
5044 }
5045
Thierry Strudel3d639192016-09-09 11:52:26 -07005046 setMobicat();
5047
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005048 uint8_t nrMode = 0;
5049 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5050 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5051 }
5052
Thierry Strudel3d639192016-09-09 11:52:26 -07005053 /* Set fps and hfr mode while sending meta stream info so that sensor
5054 * can configure appropriate streaming mode */
5055 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005056 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5057 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005058 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5059 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005060 if (rc == NO_ERROR) {
5061 int32_t max_fps =
5062 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005063 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005064 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5065 }
5066 /* For HFR, more buffers are dequeued upfront to improve the performance */
5067 if (mBatchSize) {
5068 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5069 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5070 }
5071 }
5072 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005073 LOGE("setHalFpsRange failed");
5074 }
5075 }
5076 if (meta.exists(ANDROID_CONTROL_MODE)) {
5077 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5078 rc = extractSceneMode(meta, metaMode, mParameters);
5079 if (rc != NO_ERROR) {
5080 LOGE("extractSceneMode failed");
5081 }
5082 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005083 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005084
Thierry Strudel04e026f2016-10-10 11:27:36 -07005085 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5086 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5087 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5088 rc = setVideoHdrMode(mParameters, vhdr);
5089 if (rc != NO_ERROR) {
5090 LOGE("setVideoHDR is failed");
5091 }
5092 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005093
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005094 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005095 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005096 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005097 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5099 sensorModeFullFov)) {
5100 rc = BAD_VALUE;
5101 }
5102 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005103 //TODO: validate the arguments, HSV scenemode should have only the
5104 //advertised fps ranges
5105
5106 /*set the capture intent, hal version, tintless, stream info,
5107 *and disenable parameters to the backend*/
5108 LOGD("set_parms META_STREAM_INFO " );
5109 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005110 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5111 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005112 mStreamConfigInfo.type[i],
5113 mStreamConfigInfo.stream_sizes[i].width,
5114 mStreamConfigInfo.stream_sizes[i].height,
5115 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005116 mStreamConfigInfo.format[i],
5117 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005118 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005119
Thierry Strudel3d639192016-09-09 11:52:26 -07005120 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5121 mParameters);
5122 if (rc < 0) {
5123 LOGE("set_parms failed for hal version, stream info");
5124 }
5125
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005126 cam_sensor_mode_info_t sensorModeInfo = {};
5127 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005128 if (rc != NO_ERROR) {
5129 LOGE("Failed to get sensor output size");
5130 pthread_mutex_unlock(&mMutex);
5131 goto error_exit;
5132 }
5133
5134 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5135 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005136 sensorModeInfo.active_array_size.width,
5137 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005138
5139 /* Set batchmode before initializing channel. Since registerBuffer
5140 * internally initializes some of the channels, better set batchmode
5141 * even before first register buffer */
5142 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5143 it != mStreamInfo.end(); it++) {
5144 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5145 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5146 && mBatchSize) {
5147 rc = channel->setBatchSize(mBatchSize);
5148 //Disable per frame map unmap for HFR/batchmode case
5149 rc |= channel->setPerFrameMapUnmap(false);
5150 if (NO_ERROR != rc) {
5151 LOGE("Channel init failed %d", rc);
5152 pthread_mutex_unlock(&mMutex);
5153 goto error_exit;
5154 }
5155 }
5156 }
5157
5158 //First initialize all streams
5159 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5160 it != mStreamInfo.end(); it++) {
5161 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005162
5163 /* Initial value of NR mode is needed before stream on */
5164 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005165 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5166 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005167 setEis) {
5168 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5169 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5170 is_type = mStreamConfigInfo.is_type[i];
5171 break;
5172 }
5173 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005174 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005175 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005176 rc = channel->initialize(IS_TYPE_NONE);
5177 }
5178 if (NO_ERROR != rc) {
5179 LOGE("Channel initialization failed %d", rc);
5180 pthread_mutex_unlock(&mMutex);
5181 goto error_exit;
5182 }
5183 }
5184
5185 if (mRawDumpChannel) {
5186 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5187 if (rc != NO_ERROR) {
5188 LOGE("Error: Raw Dump Channel init failed");
5189 pthread_mutex_unlock(&mMutex);
5190 goto error_exit;
5191 }
5192 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005193 if (mHdrPlusRawSrcChannel) {
5194 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5195 if (rc != NO_ERROR) {
5196 LOGE("Error: HDR+ RAW Source Channel init failed");
5197 pthread_mutex_unlock(&mMutex);
5198 goto error_exit;
5199 }
5200 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005201 if (mSupportChannel) {
5202 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5203 if (rc < 0) {
5204 LOGE("Support channel initialization failed");
5205 pthread_mutex_unlock(&mMutex);
5206 goto error_exit;
5207 }
5208 }
5209 if (mAnalysisChannel) {
5210 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5211 if (rc < 0) {
5212 LOGE("Analysis channel initialization failed");
5213 pthread_mutex_unlock(&mMutex);
5214 goto error_exit;
5215 }
5216 }
5217 if (mDummyBatchChannel) {
5218 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5219 if (rc < 0) {
5220 LOGE("mDummyBatchChannel setBatchSize failed");
5221 pthread_mutex_unlock(&mMutex);
5222 goto error_exit;
5223 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005224 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005225 if (rc < 0) {
5226 LOGE("mDummyBatchChannel initialization failed");
5227 pthread_mutex_unlock(&mMutex);
5228 goto error_exit;
5229 }
5230 }
5231
5232 // Set bundle info
5233 rc = setBundleInfo();
5234 if (rc < 0) {
5235 LOGE("setBundleInfo failed %d", rc);
5236 pthread_mutex_unlock(&mMutex);
5237 goto error_exit;
5238 }
5239
5240 //update settings from app here
5241 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5242 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5243 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5244 }
5245 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5246 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5247 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5248 }
5249 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5250 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5251 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5252
5253 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5254 (mLinkedCameraId != mCameraId) ) {
5255 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5256 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005257 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005258 goto error_exit;
5259 }
5260 }
5261
5262 // add bundle related cameras
5263 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5264 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005265 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5266 &m_pDualCamCmdPtr->bundle_info;
5267 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005268 if (mIsDeviceLinked)
5269 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5270 else
5271 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5272
5273 pthread_mutex_lock(&gCamLock);
5274
5275 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5276 LOGE("Dualcam: Invalid Session Id ");
5277 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005278 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005279 goto error_exit;
5280 }
5281
5282 if (mIsMainCamera == 1) {
5283 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5284 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005285 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005286 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005287 // related session id should be session id of linked session
5288 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5289 } else {
5290 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5291 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005292 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005293 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005294 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5295 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005296 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005297 pthread_mutex_unlock(&gCamLock);
5298
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005299 rc = mCameraHandle->ops->set_dual_cam_cmd(
5300 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005301 if (rc < 0) {
5302 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005303 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005304 goto error_exit;
5305 }
5306 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005307 goto no_error;
5308error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005309 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005310 return rc;
5311no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005312 mWokenUpByDaemon = false;
5313 mPendingLiveRequest = 0;
5314 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005315 }
5316
5317 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005318 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005319
5320 if (mFlushPerf) {
5321 //we cannot accept any requests during flush
5322 LOGE("process_capture_request cannot proceed during flush");
5323 pthread_mutex_unlock(&mMutex);
5324 return NO_ERROR; //should return an error
5325 }
5326
5327 if (meta.exists(ANDROID_REQUEST_ID)) {
5328 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5329 mCurrentRequestId = request_id;
5330 LOGD("Received request with id: %d", request_id);
5331 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5332 LOGE("Unable to find request id field, \
5333 & no previous id available");
5334 pthread_mutex_unlock(&mMutex);
5335 return NAME_NOT_FOUND;
5336 } else {
5337 LOGD("Re-using old request id");
5338 request_id = mCurrentRequestId;
5339 }
5340
5341 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5342 request->num_output_buffers,
5343 request->input_buffer,
5344 frameNumber);
5345 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005346 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005347 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005348 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005349 uint32_t snapshotStreamId = 0;
5350 for (size_t i = 0; i < request->num_output_buffers; i++) {
5351 const camera3_stream_buffer_t& output = request->output_buffers[i];
5352 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5353
Emilian Peev7650c122017-01-19 08:24:33 -08005354 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5355 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005356 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005357 blob_request = 1;
5358 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5359 }
5360
5361 if (output.acquire_fence != -1) {
5362 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5363 close(output.acquire_fence);
5364 if (rc != OK) {
5365 LOGE("sync wait failed %d", rc);
5366 pthread_mutex_unlock(&mMutex);
5367 return rc;
5368 }
5369 }
5370
Emilian Peev0f3c3162017-03-15 12:57:46 +00005371 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5372 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005373 depthRequestPresent = true;
5374 continue;
5375 }
5376
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005377 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005378 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005379
5380 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5381 isVidBufRequested = true;
5382 }
5383 }
5384
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005385 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5386 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5387 itr++) {
5388 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5389 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5390 channel->getStreamID(channel->getStreamTypeMask());
5391
5392 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5393 isVidBufRequested = true;
5394 }
5395 }
5396
Thierry Strudel3d639192016-09-09 11:52:26 -07005397 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005398 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005399 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005400 }
5401 if (blob_request && mRawDumpChannel) {
5402 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005403 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005404 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005405 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005406 }
5407
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005408 {
5409 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5410 // Request a RAW buffer if
5411 // 1. mHdrPlusRawSrcChannel is valid.
5412 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5413 // 3. There is no pending HDR+ request.
5414 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5415 mHdrPlusPendingRequests.size() == 0) {
5416 streamsArray.stream_request[streamsArray.num_streams].streamID =
5417 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5418 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5419 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005420 }
5421
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005422 //extract capture intent
5423 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5424 mCaptureIntent =
5425 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5426 }
5427
5428 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5429 mCacMode =
5430 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5431 }
5432
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005433 uint8_t requestedLensShadingMapMode;
5434 // Get the shading map mode.
5435 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5436 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5437 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5438 } else {
5439 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5440 }
5441
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005442 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005443 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005444
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005445 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005446 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005447 // If this request has a still capture intent, try to submit an HDR+ request.
5448 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5449 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5450 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5451 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005452 }
5453
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005454 if (hdrPlusRequest) {
5455 // For a HDR+ request, just set the frame parameters.
5456 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5457 if (rc < 0) {
5458 LOGE("fail to set frame parameters");
5459 pthread_mutex_unlock(&mMutex);
5460 return rc;
5461 }
5462 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005463 /* Parse the settings:
5464 * - For every request in NORMAL MODE
5465 * - For every request in HFR mode during preview only case
5466 * - For first request of every batch in HFR mode during video
5467 * recording. In batchmode the same settings except frame number is
5468 * repeated in each request of the batch.
5469 */
5470 if (!mBatchSize ||
5471 (mBatchSize && !isVidBufRequested) ||
5472 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005473 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005474 if (rc < 0) {
5475 LOGE("fail to set frame parameters");
5476 pthread_mutex_unlock(&mMutex);
5477 return rc;
5478 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005479
5480 {
5481 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5482 // will be reported in result metadata.
5483 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5484 if (mHdrPlusModeEnabled) {
5485 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5486 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5487 }
5488 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005489 }
5490 /* For batchMode HFR, setFrameParameters is not called for every
5491 * request. But only frame number of the latest request is parsed.
5492 * Keep track of first and last frame numbers in a batch so that
5493 * metadata for the frame numbers of batch can be duplicated in
5494 * handleBatchMetadta */
5495 if (mBatchSize) {
5496 if (!mToBeQueuedVidBufs) {
5497 //start of the batch
5498 mFirstFrameNumberInBatch = request->frame_number;
5499 }
5500 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5501 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5502 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005503 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005504 return BAD_VALUE;
5505 }
5506 }
5507 if (mNeedSensorRestart) {
5508 /* Unlock the mutex as restartSensor waits on the channels to be
5509 * stopped, which in turn calls stream callback functions -
5510 * handleBufferWithLock and handleMetadataWithLock */
5511 pthread_mutex_unlock(&mMutex);
5512 rc = dynamicUpdateMetaStreamInfo();
5513 if (rc != NO_ERROR) {
5514 LOGE("Restarting the sensor failed");
5515 return BAD_VALUE;
5516 }
5517 mNeedSensorRestart = false;
5518 pthread_mutex_lock(&mMutex);
5519 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005520 if(mResetInstantAEC) {
5521 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5522 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5523 mResetInstantAEC = false;
5524 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005525 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005526 if (request->input_buffer->acquire_fence != -1) {
5527 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5528 close(request->input_buffer->acquire_fence);
5529 if (rc != OK) {
5530 LOGE("input buffer sync wait failed %d", rc);
5531 pthread_mutex_unlock(&mMutex);
5532 return rc;
5533 }
5534 }
5535 }
5536
5537 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5538 mLastCustIntentFrmNum = frameNumber;
5539 }
5540 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005541 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005542 pendingRequestIterator latestRequest;
5543 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005544 pendingRequest.num_buffers = depthRequestPresent ?
5545 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005546 pendingRequest.request_id = request_id;
5547 pendingRequest.blob_request = blob_request;
5548 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005549 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005550 if (request->input_buffer) {
5551 pendingRequest.input_buffer =
5552 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5553 *(pendingRequest.input_buffer) = *(request->input_buffer);
5554 pInputBuffer = pendingRequest.input_buffer;
5555 } else {
5556 pendingRequest.input_buffer = NULL;
5557 pInputBuffer = NULL;
5558 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005559 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005560
5561 pendingRequest.pipeline_depth = 0;
5562 pendingRequest.partial_result_cnt = 0;
5563 extractJpegMetadata(mCurJpegMeta, request);
5564 pendingRequest.jpegMetadata = mCurJpegMeta;
5565 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005566 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005567 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
Shuzhen Wang77b049a2017-08-30 12:24:36 -07005568 pendingRequest.hybrid_ae_enable =
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005569 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5570 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005571
Samuel Ha68ba5172016-12-15 18:41:12 -08005572 /* DevCamDebug metadata processCaptureRequest */
5573 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5574 mDevCamDebugMetaEnable =
5575 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5576 }
5577 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5578 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005579
5580 //extract CAC info
5581 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5582 mCacMode =
5583 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5584 }
5585 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005586 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005587 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5588 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005589
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005590 // extract enableZsl info
5591 if (gExposeEnableZslKey) {
5592 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5593 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5594 mZslEnabled = pendingRequest.enableZsl;
5595 } else {
5596 pendingRequest.enableZsl = mZslEnabled;
5597 }
5598 }
5599
Thierry Strudel3d639192016-09-09 11:52:26 -07005600 PendingBuffersInRequest bufsForCurRequest;
5601 bufsForCurRequest.frame_number = frameNumber;
5602 // Mark current timestamp for the new request
5603 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Binhao Lin09245482017-08-31 18:25:29 -07005604 bufsForCurRequest.av_timestamp = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005605 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005606
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005607 if (hdrPlusRequest) {
5608 // Save settings for this request.
5609 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5610 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5611
5612 // Add to pending HDR+ request queue.
5613 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5614 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5615
5616 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5617 }
5618
Thierry Strudel3d639192016-09-09 11:52:26 -07005619 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005620 if ((request->output_buffers[i].stream->data_space ==
5621 HAL_DATASPACE_DEPTH) &&
5622 (HAL_PIXEL_FORMAT_BLOB ==
5623 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005624 continue;
5625 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005626 RequestedBufferInfo requestedBuf;
5627 memset(&requestedBuf, 0, sizeof(requestedBuf));
5628 requestedBuf.stream = request->output_buffers[i].stream;
5629 requestedBuf.buffer = NULL;
5630 pendingRequest.buffers.push_back(requestedBuf);
5631
5632 // Add to buffer handle the pending buffers list
5633 PendingBufferInfo bufferInfo;
5634 bufferInfo.buffer = request->output_buffers[i].buffer;
5635 bufferInfo.stream = request->output_buffers[i].stream;
5636 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5637 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5638 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5639 frameNumber, bufferInfo.buffer,
5640 channel->getStreamTypeMask(), bufferInfo.stream->format);
5641 }
5642 // Add this request packet into mPendingBuffersMap
5643 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5644 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5645 mPendingBuffersMap.get_num_overall_buffers());
5646
5647 latestRequest = mPendingRequestsList.insert(
5648 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005649
5650 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5651 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005652 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005653 for (size_t i = 0; i < request->num_output_buffers; i++) {
5654 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5655 }
5656
Thierry Strudel3d639192016-09-09 11:52:26 -07005657 if(mFlush) {
5658 LOGI("mFlush is true");
5659 pthread_mutex_unlock(&mMutex);
5660 return NO_ERROR;
5661 }
5662
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005663 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5664 // channel.
5665 if (!hdrPlusRequest) {
5666 int indexUsed;
5667 // Notify metadata channel we receive a request
5668 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005669
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005670 if(request->input_buffer != NULL){
5671 LOGD("Input request, frame_number %d", frameNumber);
5672 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5673 if (NO_ERROR != rc) {
5674 LOGE("fail to set reproc parameters");
5675 pthread_mutex_unlock(&mMutex);
5676 return rc;
5677 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005678 }
5679
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005680 // Call request on other streams
5681 uint32_t streams_need_metadata = 0;
5682 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5683 for (size_t i = 0; i < request->num_output_buffers; i++) {
5684 const camera3_stream_buffer_t& output = request->output_buffers[i];
5685 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5686
5687 if (channel == NULL) {
5688 LOGW("invalid channel pointer for stream");
5689 continue;
5690 }
5691
5692 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5693 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5694 output.buffer, request->input_buffer, frameNumber);
5695 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005696 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005697 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5698 if (rc < 0) {
5699 LOGE("Fail to request on picture channel");
5700 pthread_mutex_unlock(&mMutex);
5701 return rc;
5702 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005703 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005704 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5705 assert(NULL != mDepthChannel);
5706 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005707
Emilian Peev7650c122017-01-19 08:24:33 -08005708 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5709 if (rc < 0) {
5710 LOGE("Fail to map on depth buffer");
5711 pthread_mutex_unlock(&mMutex);
5712 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005713 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005714 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005715 } else {
5716 LOGD("snapshot request with buffer %p, frame_number %d",
5717 output.buffer, frameNumber);
5718 if (!request->settings) {
5719 rc = channel->request(output.buffer, frameNumber,
5720 NULL, mPrevParameters, indexUsed);
5721 } else {
5722 rc = channel->request(output.buffer, frameNumber,
5723 NULL, mParameters, indexUsed);
5724 }
5725 if (rc < 0) {
5726 LOGE("Fail to request on picture channel");
5727 pthread_mutex_unlock(&mMutex);
5728 return rc;
5729 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005730
Emilian Peev7650c122017-01-19 08:24:33 -08005731 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5732 uint32_t j = 0;
5733 for (j = 0; j < streamsArray.num_streams; j++) {
5734 if (streamsArray.stream_request[j].streamID == streamId) {
5735 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5736 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5737 else
5738 streamsArray.stream_request[j].buf_index = indexUsed;
5739 break;
5740 }
5741 }
5742 if (j == streamsArray.num_streams) {
5743 LOGE("Did not find matching stream to update index");
5744 assert(0);
5745 }
5746
5747 pendingBufferIter->need_metadata = true;
5748 streams_need_metadata++;
5749 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005750 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005751 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5752 bool needMetadata = false;
5753 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5754 rc = yuvChannel->request(output.buffer, frameNumber,
5755 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5756 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005757 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005758 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005759 pthread_mutex_unlock(&mMutex);
5760 return rc;
5761 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005762
5763 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5764 uint32_t j = 0;
5765 for (j = 0; j < streamsArray.num_streams; j++) {
5766 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005767 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5768 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5769 else
5770 streamsArray.stream_request[j].buf_index = indexUsed;
5771 break;
5772 }
5773 }
5774 if (j == streamsArray.num_streams) {
5775 LOGE("Did not find matching stream to update index");
5776 assert(0);
5777 }
5778
5779 pendingBufferIter->need_metadata = needMetadata;
5780 if (needMetadata)
5781 streams_need_metadata += 1;
5782 LOGD("calling YUV channel request, need_metadata is %d",
5783 needMetadata);
5784 } else {
5785 LOGD("request with buffer %p, frame_number %d",
5786 output.buffer, frameNumber);
5787
5788 rc = channel->request(output.buffer, frameNumber, indexUsed);
5789
5790 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5791 uint32_t j = 0;
5792 for (j = 0; j < streamsArray.num_streams; j++) {
5793 if (streamsArray.stream_request[j].streamID == streamId) {
5794 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5795 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5796 else
5797 streamsArray.stream_request[j].buf_index = indexUsed;
5798 break;
5799 }
5800 }
5801 if (j == streamsArray.num_streams) {
5802 LOGE("Did not find matching stream to update index");
5803 assert(0);
5804 }
5805
5806 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5807 && mBatchSize) {
5808 mToBeQueuedVidBufs++;
5809 if (mToBeQueuedVidBufs == mBatchSize) {
5810 channel->queueBatchBuf();
5811 }
5812 }
5813 if (rc < 0) {
5814 LOGE("request failed");
5815 pthread_mutex_unlock(&mMutex);
5816 return rc;
5817 }
5818 }
5819 pendingBufferIter++;
5820 }
5821
5822 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5823 itr++) {
5824 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5825
5826 if (channel == NULL) {
5827 LOGE("invalid channel pointer for stream");
5828 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005829 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005830 return BAD_VALUE;
5831 }
5832
5833 InternalRequest requestedStream;
5834 requestedStream = (*itr);
5835
5836
5837 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5838 LOGD("snapshot request internally input buffer %p, frame_number %d",
5839 request->input_buffer, frameNumber);
5840 if(request->input_buffer != NULL){
5841 rc = channel->request(NULL, frameNumber,
5842 pInputBuffer, &mReprocMeta, indexUsed, true,
5843 requestedStream.meteringOnly);
5844 if (rc < 0) {
5845 LOGE("Fail to request on picture channel");
5846 pthread_mutex_unlock(&mMutex);
5847 return rc;
5848 }
5849 } else {
5850 LOGD("snapshot request with frame_number %d", frameNumber);
5851 if (!request->settings) {
5852 rc = channel->request(NULL, frameNumber,
5853 NULL, mPrevParameters, indexUsed, true,
5854 requestedStream.meteringOnly);
5855 } else {
5856 rc = channel->request(NULL, frameNumber,
5857 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5858 }
5859 if (rc < 0) {
5860 LOGE("Fail to request on picture channel");
5861 pthread_mutex_unlock(&mMutex);
5862 return rc;
5863 }
5864
5865 if ((*itr).meteringOnly != 1) {
5866 requestedStream.need_metadata = 1;
5867 streams_need_metadata++;
5868 }
5869 }
5870
5871 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5872 uint32_t j = 0;
5873 for (j = 0; j < streamsArray.num_streams; j++) {
5874 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005875 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5876 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5877 else
5878 streamsArray.stream_request[j].buf_index = indexUsed;
5879 break;
5880 }
5881 }
5882 if (j == streamsArray.num_streams) {
5883 LOGE("Did not find matching stream to update index");
5884 assert(0);
5885 }
5886
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005887 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005888 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005889 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005890 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005891 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005892 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005893 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005894 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005895
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005896 //If 2 streams have need_metadata set to true, fail the request, unless
5897 //we copy/reference count the metadata buffer
5898 if (streams_need_metadata > 1) {
5899 LOGE("not supporting request in which two streams requires"
5900 " 2 HAL metadata for reprocessing");
5901 pthread_mutex_unlock(&mMutex);
5902 return -EINVAL;
5903 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005904
Emilian Peev656e4fa2017-06-02 16:47:04 +01005905 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5906 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5907 if (depthRequestPresent && mDepthChannel) {
5908 if (request->settings) {
5909 camera_metadata_ro_entry entry;
5910 if (find_camera_metadata_ro_entry(request->settings,
5911 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5912 if (entry.data.u8[0]) {
5913 pdafEnable = CAM_PD_DATA_ENABLED;
5914 } else {
5915 pdafEnable = CAM_PD_DATA_SKIP;
5916 }
5917 mDepthCloudMode = pdafEnable;
5918 } else {
5919 pdafEnable = mDepthCloudMode;
5920 }
5921 } else {
5922 pdafEnable = mDepthCloudMode;
5923 }
5924 }
5925
Emilian Peev7650c122017-01-19 08:24:33 -08005926 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5927 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5928 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5929 pthread_mutex_unlock(&mMutex);
5930 return BAD_VALUE;
5931 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005932
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005933 if (request->input_buffer == NULL) {
5934 /* Set the parameters to backend:
5935 * - For every request in NORMAL MODE
5936 * - For every request in HFR mode during preview only case
5937 * - Once every batch in HFR mode during video recording
5938 */
5939 if (!mBatchSize ||
5940 (mBatchSize && !isVidBufRequested) ||
5941 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5942 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5943 mBatchSize, isVidBufRequested,
5944 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005945
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005946 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5947 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5948 uint32_t m = 0;
5949 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5950 if (streamsArray.stream_request[k].streamID ==
5951 mBatchedStreamsArray.stream_request[m].streamID)
5952 break;
5953 }
5954 if (m == mBatchedStreamsArray.num_streams) {
5955 mBatchedStreamsArray.stream_request\
5956 [mBatchedStreamsArray.num_streams].streamID =
5957 streamsArray.stream_request[k].streamID;
5958 mBatchedStreamsArray.stream_request\
5959 [mBatchedStreamsArray.num_streams].buf_index =
5960 streamsArray.stream_request[k].buf_index;
5961 mBatchedStreamsArray.num_streams =
5962 mBatchedStreamsArray.num_streams + 1;
5963 }
5964 }
5965 streamsArray = mBatchedStreamsArray;
5966 }
5967 /* Update stream id of all the requested buffers */
5968 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5969 streamsArray)) {
5970 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005971 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005972 return BAD_VALUE;
5973 }
5974
5975 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5976 mParameters);
5977 if (rc < 0) {
5978 LOGE("set_parms failed");
5979 }
5980 /* reset to zero coz, the batch is queued */
5981 mToBeQueuedVidBufs = 0;
5982 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5983 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5984 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005985 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5986 uint32_t m = 0;
5987 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5988 if (streamsArray.stream_request[k].streamID ==
5989 mBatchedStreamsArray.stream_request[m].streamID)
5990 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005991 }
5992 if (m == mBatchedStreamsArray.num_streams) {
5993 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5994 streamID = streamsArray.stream_request[k].streamID;
5995 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5996 buf_index = streamsArray.stream_request[k].buf_index;
5997 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5998 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005999 }
6000 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006001 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006002
6003 // Start all streams after the first setting is sent, so that the
6004 // setting can be applied sooner: (0 + apply_delay)th frame.
6005 if (mState == CONFIGURED && mChannelHandle) {
6006 //Then start them.
6007 LOGH("Start META Channel");
6008 rc = mMetadataChannel->start();
6009 if (rc < 0) {
6010 LOGE("META channel start failed");
6011 pthread_mutex_unlock(&mMutex);
6012 return rc;
6013 }
6014
6015 if (mAnalysisChannel) {
6016 rc = mAnalysisChannel->start();
6017 if (rc < 0) {
6018 LOGE("Analysis channel start failed");
6019 mMetadataChannel->stop();
6020 pthread_mutex_unlock(&mMutex);
6021 return rc;
6022 }
6023 }
6024
6025 if (mSupportChannel) {
6026 rc = mSupportChannel->start();
6027 if (rc < 0) {
6028 LOGE("Support channel start failed");
6029 mMetadataChannel->stop();
6030 /* Although support and analysis are mutually exclusive today
6031 adding it in anycase for future proofing */
6032 if (mAnalysisChannel) {
6033 mAnalysisChannel->stop();
6034 }
6035 pthread_mutex_unlock(&mMutex);
6036 return rc;
6037 }
6038 }
6039 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6040 it != mStreamInfo.end(); it++) {
6041 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
6042 LOGH("Start Processing Channel mask=%d",
6043 channel->getStreamTypeMask());
6044 rc = channel->start();
6045 if (rc < 0) {
6046 LOGE("channel start failed");
6047 pthread_mutex_unlock(&mMutex);
6048 return rc;
6049 }
6050 }
6051
6052 if (mRawDumpChannel) {
6053 LOGD("Starting raw dump stream");
6054 rc = mRawDumpChannel->start();
6055 if (rc != NO_ERROR) {
6056 LOGE("Error Starting Raw Dump Channel");
6057 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6058 it != mStreamInfo.end(); it++) {
6059 QCamera3Channel *channel =
6060 (QCamera3Channel *)(*it)->stream->priv;
6061 LOGH("Stopping Processing Channel mask=%d",
6062 channel->getStreamTypeMask());
6063 channel->stop();
6064 }
6065 if (mSupportChannel)
6066 mSupportChannel->stop();
6067 if (mAnalysisChannel) {
6068 mAnalysisChannel->stop();
6069 }
6070 mMetadataChannel->stop();
6071 pthread_mutex_unlock(&mMutex);
6072 return rc;
6073 }
6074 }
6075
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006076 // Configure modules for stream on.
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006077 rc = startChannelLocked();
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006078 if (rc != NO_ERROR) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006079 LOGE("startChannelLocked failed %d", rc);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006080 pthread_mutex_unlock(&mMutex);
6081 return rc;
6082 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006083 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006084 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006085 }
6086
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -07006087 // Enable HDR+ mode for the first PREVIEW_INTENT request that doesn't disable HDR+.
Chenjie Luo4a761802017-06-13 17:35:54 +00006088 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006089 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006090 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006091 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6092 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6093 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -07006094 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
6095 meta.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
6096 meta.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 0) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006097
6098 if (isSessionHdrPlusModeCompatible()) {
6099 rc = enableHdrPlusModeLocked();
6100 if (rc != OK) {
6101 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6102 pthread_mutex_unlock(&mMutex);
6103 return rc;
6104 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006105 }
6106
6107 mFirstPreviewIntentSeen = true;
6108 }
6109 }
6110
Thierry Strudel3d639192016-09-09 11:52:26 -07006111 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6112
6113 mState = STARTED;
6114 // Added a timed condition wait
6115 struct timespec ts;
6116 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006117 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006118 if (rc < 0) {
6119 isValidTimeout = 0;
6120 LOGE("Error reading the real time clock!!");
6121 }
6122 else {
6123 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006124 int64_t timeout = 5;
6125 {
6126 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6127 // If there is a pending HDR+ request, the following requests may be blocked until the
6128 // HDR+ request is done. So allow a longer timeout.
6129 if (mHdrPlusPendingRequests.size() > 0) {
6130 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6131 }
6132 }
6133 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006134 }
6135 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006136 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006137 (mState != ERROR) && (mState != DEINIT)) {
6138 if (!isValidTimeout) {
6139 LOGD("Blocking on conditional wait");
6140 pthread_cond_wait(&mRequestCond, &mMutex);
6141 }
6142 else {
6143 LOGD("Blocking on timed conditional wait");
6144 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6145 if (rc == ETIMEDOUT) {
6146 rc = -ENODEV;
6147 LOGE("Unblocked on timeout!!!!");
6148 break;
6149 }
6150 }
6151 LOGD("Unblocked");
6152 if (mWokenUpByDaemon) {
6153 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006154 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006155 break;
6156 }
6157 }
6158 pthread_mutex_unlock(&mMutex);
6159
6160 return rc;
6161}
6162
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006163int32_t QCamera3HardwareInterface::startChannelLocked()
6164{
6165 // Configure modules for stream on.
6166 int32_t rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6167 mChannelHandle, /*start_sensor_streaming*/false);
6168 if (rc != NO_ERROR) {
6169 LOGE("start_channel failed %d", rc);
6170 return rc;
6171 }
6172
6173 {
6174 // Configure Easel for stream on.
6175 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6176
6177 // Now that sensor mode should have been selected, get the selected sensor mode
6178 // info.
6179 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6180 getCurrentSensorModeInfo(mSensorModeInfo);
6181
6182 if (EaselManagerClientOpened) {
6183 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
6184 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6185 /*enableCapture*/true);
6186 if (rc != OK) {
6187 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6188 mCameraId, mSensorModeInfo.op_pixel_clk);
6189 return rc;
6190 }
6191 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
6192 mEaselMipiStarted = true;
6193 }
6194 }
6195
6196 // Start sensor streaming.
6197 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6198 mChannelHandle);
6199 if (rc != NO_ERROR) {
6200 LOGE("start_sensor_stream_on failed %d", rc);
6201 return rc;
6202 }
6203
6204 return 0;
6205}
6206
6207void QCamera3HardwareInterface::stopChannelLocked(bool stopChannelImmediately)
6208{
6209 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6210 mChannelHandle, stopChannelImmediately);
6211
6212 {
6213 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6214 if (EaselManagerClientOpened && mEaselMipiStarted) {
6215 int32_t rc = gEaselManagerClient->stopMipi(mCameraId);
6216 if (rc != 0) {
6217 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
6218 }
6219 mEaselMipiStarted = false;
6220 }
6221 }
6222}
6223
Thierry Strudel3d639192016-09-09 11:52:26 -07006224/*===========================================================================
6225 * FUNCTION : dump
6226 *
6227 * DESCRIPTION:
6228 *
6229 * PARAMETERS :
6230 *
6231 *
6232 * RETURN :
6233 *==========================================================================*/
6234void QCamera3HardwareInterface::dump(int fd)
6235{
6236 pthread_mutex_lock(&mMutex);
6237 dprintf(fd, "\n Camera HAL3 information Begin \n");
6238
6239 dprintf(fd, "\nNumber of pending requests: %zu \n",
6240 mPendingRequestsList.size());
6241 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6242 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6243 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6244 for(pendingRequestIterator i = mPendingRequestsList.begin();
6245 i != mPendingRequestsList.end(); i++) {
6246 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6247 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6248 i->input_buffer);
6249 }
6250 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6251 mPendingBuffersMap.get_num_overall_buffers());
6252 dprintf(fd, "-------+------------------\n");
6253 dprintf(fd, " Frame | Stream type mask \n");
6254 dprintf(fd, "-------+------------------\n");
6255 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6256 for(auto &j : req.mPendingBufferList) {
6257 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6258 dprintf(fd, " %5d | %11d \n",
6259 req.frame_number, channel->getStreamTypeMask());
6260 }
6261 }
6262 dprintf(fd, "-------+------------------\n");
6263
6264 dprintf(fd, "\nPending frame drop list: %zu\n",
6265 mPendingFrameDropList.size());
6266 dprintf(fd, "-------+-----------\n");
6267 dprintf(fd, " Frame | Stream ID \n");
6268 dprintf(fd, "-------+-----------\n");
6269 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6270 i != mPendingFrameDropList.end(); i++) {
6271 dprintf(fd, " %5d | %9d \n",
6272 i->frame_number, i->stream_ID);
6273 }
6274 dprintf(fd, "-------+-----------\n");
6275
6276 dprintf(fd, "\n Camera HAL3 information End \n");
6277
6278 /* use dumpsys media.camera as trigger to send update debug level event */
6279 mUpdateDebugLevel = true;
6280 pthread_mutex_unlock(&mMutex);
6281 return;
6282}
6283
6284/*===========================================================================
6285 * FUNCTION : flush
6286 *
6287 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6288 * conditionally restarts channels
6289 *
6290 * PARAMETERS :
6291 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006292 * @ stopChannelImmediately: stop the channel immediately. This should be used
6293 * when device encountered an error and MIPI may has
6294 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006295 *
6296 * RETURN :
6297 * 0 on success
6298 * Error code on failure
6299 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006300int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006301{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006302 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006303 int32_t rc = NO_ERROR;
6304
6305 LOGD("Unblocking Process Capture Request");
6306 pthread_mutex_lock(&mMutex);
6307 mFlush = true;
6308 pthread_mutex_unlock(&mMutex);
6309
Chien-Yu Chen11c8edc2017-09-11 20:54:24 -07006310 // Disable HDR+ if it's enabled;
6311 {
6312 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6313 finishHdrPlusClientOpeningLocked(l);
6314 disableHdrPlusModeLocked();
6315 }
6316
Thierry Strudel3d639192016-09-09 11:52:26 -07006317 rc = stopAllChannels();
6318 // unlink of dualcam
6319 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006320 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6321 &m_pDualCamCmdPtr->bundle_info;
6322 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006323 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6324 pthread_mutex_lock(&gCamLock);
6325
6326 if (mIsMainCamera == 1) {
6327 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6328 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006329 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006330 // related session id should be session id of linked session
6331 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6332 } else {
6333 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6334 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006335 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006336 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6337 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006338 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006339 pthread_mutex_unlock(&gCamLock);
6340
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006341 rc = mCameraHandle->ops->set_dual_cam_cmd(
6342 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006343 if (rc < 0) {
6344 LOGE("Dualcam: Unlink failed, but still proceed to close");
6345 }
6346 }
6347
6348 if (rc < 0) {
6349 LOGE("stopAllChannels failed");
6350 return rc;
6351 }
6352 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006353 stopChannelLocked(stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006354 }
6355
6356 // Reset bundle info
6357 rc = setBundleInfo();
6358 if (rc < 0) {
6359 LOGE("setBundleInfo failed %d", rc);
6360 return rc;
6361 }
6362
6363 // Mutex Lock
6364 pthread_mutex_lock(&mMutex);
6365
6366 // Unblock process_capture_request
6367 mPendingLiveRequest = 0;
6368 pthread_cond_signal(&mRequestCond);
6369
6370 rc = notifyErrorForPendingRequests();
6371 if (rc < 0) {
6372 LOGE("notifyErrorForPendingRequests failed");
6373 pthread_mutex_unlock(&mMutex);
6374 return rc;
6375 }
6376
6377 mFlush = false;
6378
6379 // Start the Streams/Channels
6380 if (restartChannels) {
6381 rc = startAllChannels();
6382 if (rc < 0) {
6383 LOGE("startAllChannels failed");
6384 pthread_mutex_unlock(&mMutex);
6385 return rc;
6386 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006387 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006388 // Configure modules for stream on.
6389 rc = startChannelLocked();
Thierry Strudel2896d122017-02-23 19:18:03 -08006390 if (rc < 0) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006391 LOGE("startChannelLocked failed");
Thierry Strudel2896d122017-02-23 19:18:03 -08006392 pthread_mutex_unlock(&mMutex);
6393 return rc;
6394 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006395 }
6396 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006397 pthread_mutex_unlock(&mMutex);
6398
6399 return 0;
6400}
6401
6402/*===========================================================================
6403 * FUNCTION : flushPerf
6404 *
6405 * DESCRIPTION: This is the performance optimization version of flush that does
6406 * not use stream off, rather flushes the system
6407 *
6408 * PARAMETERS :
6409 *
6410 *
6411 * RETURN : 0 : success
6412 * -EINVAL: input is malformed (device is not valid)
6413 * -ENODEV: if the device has encountered a serious error
6414 *==========================================================================*/
6415int QCamera3HardwareInterface::flushPerf()
6416{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006417 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006418 int32_t rc = 0;
6419 struct timespec timeout;
6420 bool timed_wait = false;
6421
6422 pthread_mutex_lock(&mMutex);
6423 mFlushPerf = true;
6424 mPendingBuffersMap.numPendingBufsAtFlush =
6425 mPendingBuffersMap.get_num_overall_buffers();
6426 LOGD("Calling flush. Wait for %d buffers to return",
6427 mPendingBuffersMap.numPendingBufsAtFlush);
6428
6429 /* send the flush event to the backend */
6430 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6431 if (rc < 0) {
6432 LOGE("Error in flush: IOCTL failure");
6433 mFlushPerf = false;
6434 pthread_mutex_unlock(&mMutex);
6435 return -ENODEV;
6436 }
6437
6438 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6439 LOGD("No pending buffers in HAL, return flush");
6440 mFlushPerf = false;
6441 pthread_mutex_unlock(&mMutex);
6442 return rc;
6443 }
6444
6445 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006446 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006447 if (rc < 0) {
6448 LOGE("Error reading the real time clock, cannot use timed wait");
6449 } else {
6450 timeout.tv_sec += FLUSH_TIMEOUT;
6451 timed_wait = true;
6452 }
6453
6454 //Block on conditional variable
6455 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6456 LOGD("Waiting on mBuffersCond");
6457 if (!timed_wait) {
6458 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6459 if (rc != 0) {
6460 LOGE("pthread_cond_wait failed due to rc = %s",
6461 strerror(rc));
6462 break;
6463 }
6464 } else {
6465 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6466 if (rc != 0) {
6467 LOGE("pthread_cond_timedwait failed due to rc = %s",
6468 strerror(rc));
6469 break;
6470 }
6471 }
6472 }
6473 if (rc != 0) {
6474 mFlushPerf = false;
6475 pthread_mutex_unlock(&mMutex);
6476 return -ENODEV;
6477 }
6478
6479 LOGD("Received buffers, now safe to return them");
6480
6481 //make sure the channels handle flush
6482 //currently only required for the picture channel to release snapshot resources
6483 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6484 it != mStreamInfo.end(); it++) {
6485 QCamera3Channel *channel = (*it)->channel;
6486 if (channel) {
6487 rc = channel->flush();
6488 if (rc) {
6489 LOGE("Flushing the channels failed with error %d", rc);
6490 // even though the channel flush failed we need to continue and
6491 // return the buffers we have to the framework, however the return
6492 // value will be an error
6493 rc = -ENODEV;
6494 }
6495 }
6496 }
6497
6498 /* notify the frameworks and send errored results */
6499 rc = notifyErrorForPendingRequests();
6500 if (rc < 0) {
6501 LOGE("notifyErrorForPendingRequests failed");
6502 pthread_mutex_unlock(&mMutex);
6503 return rc;
6504 }
6505
6506 //unblock process_capture_request
6507 mPendingLiveRequest = 0;
6508 unblockRequestIfNecessary();
6509
6510 mFlushPerf = false;
6511 pthread_mutex_unlock(&mMutex);
6512 LOGD ("Flush Operation complete. rc = %d", rc);
6513 return rc;
6514}
6515
6516/*===========================================================================
6517 * FUNCTION : handleCameraDeviceError
6518 *
6519 * DESCRIPTION: This function calls internal flush and notifies the error to
6520 * framework and updates the state variable.
6521 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006522 * PARAMETERS :
6523 * @stopChannelImmediately : stop channels immediately without waiting for
6524 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006525 *
6526 * RETURN : NO_ERROR on Success
6527 * Error code on failure
6528 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006529int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006530{
6531 int32_t rc = NO_ERROR;
6532
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006533 {
6534 Mutex::Autolock lock(mFlushLock);
6535 pthread_mutex_lock(&mMutex);
6536 if (mState != ERROR) {
6537 //if mState != ERROR, nothing to be done
6538 pthread_mutex_unlock(&mMutex);
6539 return NO_ERROR;
6540 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006541 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006542
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006543 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006544 if (NO_ERROR != rc) {
6545 LOGE("internal flush to handle mState = ERROR failed");
6546 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006547
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006548 pthread_mutex_lock(&mMutex);
6549 mState = DEINIT;
6550 pthread_mutex_unlock(&mMutex);
6551 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006552
6553 camera3_notify_msg_t notify_msg;
6554 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6555 notify_msg.type = CAMERA3_MSG_ERROR;
6556 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6557 notify_msg.message.error.error_stream = NULL;
6558 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006559 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006560
6561 return rc;
6562}
6563
6564/*===========================================================================
6565 * FUNCTION : captureResultCb
6566 *
6567 * DESCRIPTION: Callback handler for all capture result
6568 * (streams, as well as metadata)
6569 *
6570 * PARAMETERS :
6571 * @metadata : metadata information
6572 * @buffer : actual gralloc buffer to be returned to frameworks.
6573 * NULL if metadata.
6574 *
6575 * RETURN : NONE
6576 *==========================================================================*/
6577void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6578 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6579{
6580 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006581 pthread_mutex_lock(&mMutex);
6582 uint8_t batchSize = mBatchSize;
6583 pthread_mutex_unlock(&mMutex);
6584 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006585 handleBatchMetadata(metadata_buf,
6586 true /* free_and_bufdone_meta_buf */);
6587 } else { /* mBatchSize = 0 */
6588 hdrPlusPerfLock(metadata_buf);
6589 pthread_mutex_lock(&mMutex);
6590 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006591 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006592 true /* last urgent frame of batch metadata */,
6593 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006594 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006595 pthread_mutex_unlock(&mMutex);
6596 }
6597 } else if (isInputBuffer) {
6598 pthread_mutex_lock(&mMutex);
6599 handleInputBufferWithLock(frame_number);
6600 pthread_mutex_unlock(&mMutex);
6601 } else {
6602 pthread_mutex_lock(&mMutex);
6603 handleBufferWithLock(buffer, frame_number);
6604 pthread_mutex_unlock(&mMutex);
6605 }
6606 return;
6607}
6608
6609/*===========================================================================
6610 * FUNCTION : getReprocessibleOutputStreamId
6611 *
6612 * DESCRIPTION: Get source output stream id for the input reprocess stream
6613 * based on size and format, which would be the largest
6614 * output stream if an input stream exists.
6615 *
6616 * PARAMETERS :
6617 * @id : return the stream id if found
6618 *
6619 * RETURN : int32_t type of status
6620 * NO_ERROR -- success
6621 * none-zero failure code
6622 *==========================================================================*/
6623int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6624{
6625 /* check if any output or bidirectional stream with the same size and format
6626 and return that stream */
6627 if ((mInputStreamInfo.dim.width > 0) &&
6628 (mInputStreamInfo.dim.height > 0)) {
6629 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6630 it != mStreamInfo.end(); it++) {
6631
6632 camera3_stream_t *stream = (*it)->stream;
6633 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6634 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6635 (stream->format == mInputStreamInfo.format)) {
6636 // Usage flag for an input stream and the source output stream
6637 // may be different.
6638 LOGD("Found reprocessible output stream! %p", *it);
6639 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6640 stream->usage, mInputStreamInfo.usage);
6641
6642 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6643 if (channel != NULL && channel->mStreams[0]) {
6644 id = channel->mStreams[0]->getMyServerID();
6645 return NO_ERROR;
6646 }
6647 }
6648 }
6649 } else {
6650 LOGD("No input stream, so no reprocessible output stream");
6651 }
6652 return NAME_NOT_FOUND;
6653}
6654
6655/*===========================================================================
6656 * FUNCTION : lookupFwkName
6657 *
6658 * DESCRIPTION: In case the enum is not same in fwk and backend
6659 * make sure the parameter is correctly propogated
6660 *
6661 * PARAMETERS :
6662 * @arr : map between the two enums
6663 * @len : len of the map
6664 * @hal_name : name of the hal_parm to map
6665 *
6666 * RETURN : int type of status
6667 * fwk_name -- success
6668 * none-zero failure code
6669 *==========================================================================*/
6670template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6671 size_t len, halType hal_name)
6672{
6673
6674 for (size_t i = 0; i < len; i++) {
6675 if (arr[i].hal_name == hal_name) {
6676 return arr[i].fwk_name;
6677 }
6678 }
6679
6680 /* Not able to find matching framework type is not necessarily
6681 * an error case. This happens when mm-camera supports more attributes
6682 * than the frameworks do */
6683 LOGH("Cannot find matching framework type");
6684 return NAME_NOT_FOUND;
6685}
6686
6687/*===========================================================================
6688 * FUNCTION : lookupHalName
6689 *
6690 * DESCRIPTION: In case the enum is not same in fwk and backend
6691 * make sure the parameter is correctly propogated
6692 *
6693 * PARAMETERS :
6694 * @arr : map between the two enums
6695 * @len : len of the map
6696 * @fwk_name : name of the hal_parm to map
6697 *
6698 * RETURN : int32_t type of status
6699 * hal_name -- success
6700 * none-zero failure code
6701 *==========================================================================*/
6702template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6703 size_t len, fwkType fwk_name)
6704{
6705 for (size_t i = 0; i < len; i++) {
6706 if (arr[i].fwk_name == fwk_name) {
6707 return arr[i].hal_name;
6708 }
6709 }
6710
6711 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6712 return NAME_NOT_FOUND;
6713}
6714
6715/*===========================================================================
6716 * FUNCTION : lookupProp
6717 *
6718 * DESCRIPTION: lookup a value by its name
6719 *
6720 * PARAMETERS :
6721 * @arr : map between the two enums
6722 * @len : size of the map
6723 * @name : name to be looked up
6724 *
6725 * RETURN : Value if found
6726 * CAM_CDS_MODE_MAX if not found
6727 *==========================================================================*/
6728template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6729 size_t len, const char *name)
6730{
6731 if (name) {
6732 for (size_t i = 0; i < len; i++) {
6733 if (!strcmp(arr[i].desc, name)) {
6734 return arr[i].val;
6735 }
6736 }
6737 }
6738 return CAM_CDS_MODE_MAX;
6739}
6740
6741/*===========================================================================
6742 *
6743 * DESCRIPTION:
6744 *
6745 * PARAMETERS :
6746 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006747 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006748 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006749 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6750 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006751 *
6752 * RETURN : camera_metadata_t*
6753 * metadata in a format specified by fwk
6754 *==========================================================================*/
6755camera_metadata_t*
6756QCamera3HardwareInterface::translateFromHalMetadata(
6757 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006758 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006759 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006760 bool lastMetadataInBatch,
6761 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006762{
6763 CameraMetadata camMetadata;
6764 camera_metadata_t *resultMetadata;
6765
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006766 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006767 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6768 * Timestamp is needed because it's used for shutter notify calculation.
6769 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006770 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006771 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006772 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006773 }
6774
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006775 if (pendingRequest.jpegMetadata.entryCount())
6776 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006777
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006778 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6779 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6780 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6781 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6782 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006783 if (mBatchSize == 0) {
6784 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006785 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006786 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006787
Samuel Ha68ba5172016-12-15 18:41:12 -08006788 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6789 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006790 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006791 // DevCamDebug metadata translateFromHalMetadata AF
6792 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6793 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6794 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6795 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6796 }
6797 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07006798 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006799 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6800 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6801 }
6802 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07006803 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006804 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6805 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6806 }
6807 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6808 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6809 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6810 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6811 }
6812 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6813 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6814 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6815 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6816 }
6817 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6818 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6819 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6820 *DevCamDebug_af_monitor_pdaf_target_pos;
6821 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6822 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6823 }
6824 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6825 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6826 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6827 *DevCamDebug_af_monitor_pdaf_confidence;
6828 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6829 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6830 }
6831 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6832 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6833 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6834 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6835 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6836 }
6837 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6838 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6839 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6840 *DevCamDebug_af_monitor_tof_target_pos;
6841 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6842 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6843 }
6844 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6845 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6846 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6847 *DevCamDebug_af_monitor_tof_confidence;
6848 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6849 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6850 }
6851 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6852 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6853 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6854 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6855 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6856 }
6857 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6858 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6859 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6860 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6861 &fwk_DevCamDebug_af_monitor_type_select, 1);
6862 }
6863 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6864 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6865 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6866 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6867 &fwk_DevCamDebug_af_monitor_refocus, 1);
6868 }
6869 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6870 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6871 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6872 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6873 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6874 }
6875 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6876 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6877 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6878 *DevCamDebug_af_search_pdaf_target_pos;
6879 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6880 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6881 }
6882 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6883 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6884 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6885 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6886 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6887 }
6888 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6889 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6890 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6891 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6892 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6893 }
6894 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6895 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6896 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6897 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6898 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6899 }
6900 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6901 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6902 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6903 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6904 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6905 }
6906 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6907 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6908 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6909 *DevCamDebug_af_search_tof_target_pos;
6910 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6911 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6912 }
6913 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6914 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6915 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6916 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6917 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6918 }
6919 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6920 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6921 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6922 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6923 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6924 }
6925 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6926 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6927 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6928 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6929 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6930 }
6931 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6932 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6933 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6934 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6935 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6936 }
6937 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6938 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6939 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6940 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6941 &fwk_DevCamDebug_af_search_type_select, 1);
6942 }
6943 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6944 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6945 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6946 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6947 &fwk_DevCamDebug_af_search_next_pos, 1);
6948 }
6949 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6950 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6951 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6952 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6953 &fwk_DevCamDebug_af_search_target_pos, 1);
6954 }
6955 // DevCamDebug metadata translateFromHalMetadata AEC
6956 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6957 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6958 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6959 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6960 }
6961 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6962 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6963 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6964 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6965 }
6966 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6967 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6968 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6969 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6970 }
6971 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6972 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6973 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6974 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6975 }
6976 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6977 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6978 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6979 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6980 }
6981 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6982 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6983 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6984 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6985 }
6986 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6987 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6988 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6989 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6990 }
6991 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6992 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6993 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6994 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6995 }
Samuel Ha34229982017-02-17 13:51:11 -08006996 // DevCamDebug metadata translateFromHalMetadata zzHDR
6997 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6998 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6999 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
7000 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
7001 }
7002 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
7003 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07007004 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08007005 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
7006 }
7007 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
7008 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
7009 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
7010 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
7011 }
7012 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
7013 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07007014 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08007015 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
7016 }
7017 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
7018 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
7019 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
7020 *DevCamDebug_aec_hdr_sensitivity_ratio;
7021 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
7022 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
7023 }
7024 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
7025 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
7026 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
7027 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
7028 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
7029 }
7030 // DevCamDebug metadata translateFromHalMetadata ADRC
7031 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
7032 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
7033 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
7034 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
7035 &fwk_DevCamDebug_aec_total_drc_gain, 1);
7036 }
7037 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
7038 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
7039 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
7040 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
7041 &fwk_DevCamDebug_aec_color_drc_gain, 1);
7042 }
7043 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
7044 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
7045 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
7046 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
7047 }
7048 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
7049 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
7050 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
7051 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
7052 }
7053 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
7054 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
7055 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
7056 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
7057 }
7058 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
7059 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
7060 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
7061 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
7062 }
Samuel Habdf4fac2017-07-28 17:21:18 -07007063 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
7064 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
7065 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
7066 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
7067 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
7068 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
7069 }
7070 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
7071 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
7072 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
7073 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
7074 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
7075 }
7076 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
7077 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
7078 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
7079 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
7080 &fwk_DevCamDebug_aec_subject_motion, 1);
7081 }
Samuel Ha68ba5172016-12-15 18:41:12 -08007082 // DevCamDebug metadata translateFromHalMetadata AWB
7083 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
7084 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
7085 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
7086 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7087 }
7088 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7089 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7090 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7091 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7092 }
7093 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7094 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7095 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7096 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7097 }
7098 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7099 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7100 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7101 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7102 }
7103 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7104 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7105 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7106 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7107 }
7108 }
7109 // atrace_end(ATRACE_TAG_ALWAYS);
7110
Thierry Strudel3d639192016-09-09 11:52:26 -07007111 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7112 int64_t fwk_frame_number = *frame_number;
7113 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7114 }
7115
7116 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7117 int32_t fps_range[2];
7118 fps_range[0] = (int32_t)float_range->min_fps;
7119 fps_range[1] = (int32_t)float_range->max_fps;
7120 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7121 fps_range, 2);
7122 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7123 fps_range[0], fps_range[1]);
7124 }
7125
7126 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7127 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7128 }
7129
7130 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7131 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7132 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7133 *sceneMode);
7134 if (NAME_NOT_FOUND != val) {
7135 uint8_t fwkSceneMode = (uint8_t)val;
7136 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7137 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7138 fwkSceneMode);
7139 }
7140 }
7141
7142 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7143 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7144 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7145 }
7146
7147 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7148 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7149 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7150 }
7151
7152 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7153 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7154 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7155 }
7156
7157 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7158 CAM_INTF_META_EDGE_MODE, metadata) {
7159 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7160 }
7161
7162 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7163 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7164 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7165 }
7166
7167 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7168 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7169 }
7170
7171 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7172 if (0 <= *flashState) {
7173 uint8_t fwk_flashState = (uint8_t) *flashState;
7174 if (!gCamCapability[mCameraId]->flash_available) {
7175 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7176 }
7177 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7178 }
7179 }
7180
7181 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7182 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7183 if (NAME_NOT_FOUND != val) {
7184 uint8_t fwk_flashMode = (uint8_t)val;
7185 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7186 }
7187 }
7188
7189 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7190 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7191 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7192 }
7193
7194 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7195 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7196 }
7197
7198 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7199 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7200 }
7201
7202 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7203 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7204 }
7205
7206 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7207 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7208 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7209 }
7210
7211 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7212 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7213 LOGD("fwk_videoStab = %d", fwk_videoStab);
7214 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7215 } else {
7216 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7217 // and so hardcoding the Video Stab result to OFF mode.
7218 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7219 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007220 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007221 }
7222
7223 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7224 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7225 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7226 }
7227
7228 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7229 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7230 }
7231
Thierry Strudel3d639192016-09-09 11:52:26 -07007232 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7233 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007234 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007235
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007236 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7237 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007238
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007239 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007240 blackLevelAppliedPattern->cam_black_level[0],
7241 blackLevelAppliedPattern->cam_black_level[1],
7242 blackLevelAppliedPattern->cam_black_level[2],
7243 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007244 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7245 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007246
7247#ifndef USE_HAL_3_3
7248 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307249 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007250 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307251 fwk_blackLevelInd[0] /= 16.0;
7252 fwk_blackLevelInd[1] /= 16.0;
7253 fwk_blackLevelInd[2] /= 16.0;
7254 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007255 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7256 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007257#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007258 }
7259
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007260#ifndef USE_HAL_3_3
7261 // Fixed whitelevel is used by ISP/Sensor
7262 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7263 &gCamCapability[mCameraId]->white_level, 1);
7264#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007265
7266 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7267 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7268 int32_t scalerCropRegion[4];
7269 scalerCropRegion[0] = hScalerCropRegion->left;
7270 scalerCropRegion[1] = hScalerCropRegion->top;
7271 scalerCropRegion[2] = hScalerCropRegion->width;
7272 scalerCropRegion[3] = hScalerCropRegion->height;
7273
7274 // Adjust crop region from sensor output coordinate system to active
7275 // array coordinate system.
7276 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7277 scalerCropRegion[2], scalerCropRegion[3]);
7278
7279 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7280 }
7281
7282 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7283 LOGD("sensorExpTime = %lld", *sensorExpTime);
7284 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7285 }
7286
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007287 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7288 LOGD("expTimeBoost = %f", *expTimeBoost);
7289 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7290 }
7291
Thierry Strudel3d639192016-09-09 11:52:26 -07007292 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7293 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7294 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7295 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7296 }
7297
7298 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7299 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7300 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7301 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7302 sensorRollingShutterSkew, 1);
7303 }
7304
7305 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7306 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7307 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7308
7309 //calculate the noise profile based on sensitivity
7310 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7311 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7312 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7313 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7314 noise_profile[i] = noise_profile_S;
7315 noise_profile[i+1] = noise_profile_O;
7316 }
7317 LOGD("noise model entry (S, O) is (%f, %f)",
7318 noise_profile_S, noise_profile_O);
7319 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7320 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7321 }
7322
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007323#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007324 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007325 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007326 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007327 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007328 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7329 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7330 }
7331 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007332#endif
7333
Thierry Strudel3d639192016-09-09 11:52:26 -07007334 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7335 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7336 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7337 }
7338
7339 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7340 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7341 *faceDetectMode);
7342 if (NAME_NOT_FOUND != val) {
7343 uint8_t fwk_faceDetectMode = (uint8_t)val;
7344 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7345
7346 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7347 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7348 CAM_INTF_META_FACE_DETECTION, metadata) {
7349 uint8_t numFaces = MIN(
7350 faceDetectionInfo->num_faces_detected, MAX_ROI);
7351 int32_t faceIds[MAX_ROI];
7352 uint8_t faceScores[MAX_ROI];
7353 int32_t faceRectangles[MAX_ROI * 4];
7354 int32_t faceLandmarks[MAX_ROI * 6];
7355 size_t j = 0, k = 0;
7356
7357 for (size_t i = 0; i < numFaces; i++) {
7358 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7359 // Adjust crop region from sensor output coordinate system to active
7360 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007361 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007362 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7363 rect.width, rect.height);
7364
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007365 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007366
Jason Lee8ce36fa2017-04-19 19:40:37 -07007367 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7368 "bottom-right (%d, %d)",
7369 faceDetectionInfo->frame_id, i,
7370 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7371 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7372
Thierry Strudel3d639192016-09-09 11:52:26 -07007373 j+= 4;
7374 }
7375 if (numFaces <= 0) {
7376 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7377 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7378 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7379 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7380 }
7381
7382 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7383 numFaces);
7384 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7385 faceRectangles, numFaces * 4U);
7386 if (fwk_faceDetectMode ==
7387 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7388 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7389 CAM_INTF_META_FACE_LANDMARK, metadata) {
7390
7391 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007392 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007393 // Map the co-ordinate sensor output coordinate system to active
7394 // array coordinate system.
7395 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007396 face_landmarks.left_eye_center.x,
7397 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007398 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007399 face_landmarks.right_eye_center.x,
7400 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007401 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007402 face_landmarks.mouth_center.x,
7403 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007404
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007405 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007406
7407 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7408 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7409 faceDetectionInfo->frame_id, i,
7410 faceLandmarks[k + LEFT_EYE_X],
7411 faceLandmarks[k + LEFT_EYE_Y],
7412 faceLandmarks[k + RIGHT_EYE_X],
7413 faceLandmarks[k + RIGHT_EYE_Y],
7414 faceLandmarks[k + MOUTH_X],
7415 faceLandmarks[k + MOUTH_Y]);
7416
Thierry Strudel04e026f2016-10-10 11:27:36 -07007417 k+= TOTAL_LANDMARK_INDICES;
7418 }
7419 } else {
7420 for (size_t i = 0; i < numFaces; i++) {
7421 setInvalidLandmarks(faceLandmarks+k);
7422 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007423 }
7424 }
7425
Jason Lee49619db2017-04-13 12:07:22 -07007426 for (size_t i = 0; i < numFaces; i++) {
7427 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7428
7429 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7430 faceDetectionInfo->frame_id, i, faceIds[i]);
7431 }
7432
Thierry Strudel3d639192016-09-09 11:52:26 -07007433 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7434 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7435 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007436 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007437 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7438 CAM_INTF_META_FACE_BLINK, metadata) {
7439 uint8_t detected[MAX_ROI];
7440 uint8_t degree[MAX_ROI * 2];
7441 for (size_t i = 0; i < numFaces; i++) {
7442 detected[i] = blinks->blink[i].blink_detected;
7443 degree[2 * i] = blinks->blink[i].left_blink;
7444 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007445
Jason Lee49619db2017-04-13 12:07:22 -07007446 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7447 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7448 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7449 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007450 }
7451 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7452 detected, numFaces);
7453 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7454 degree, numFaces * 2);
7455 }
7456 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7457 CAM_INTF_META_FACE_SMILE, metadata) {
7458 uint8_t degree[MAX_ROI];
7459 uint8_t confidence[MAX_ROI];
7460 for (size_t i = 0; i < numFaces; i++) {
7461 degree[i] = smiles->smile[i].smile_degree;
7462 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007463
Jason Lee49619db2017-04-13 12:07:22 -07007464 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7465 "smile_degree=%d, smile_score=%d",
7466 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007467 }
7468 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7469 degree, numFaces);
7470 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7471 confidence, numFaces);
7472 }
7473 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7474 CAM_INTF_META_FACE_GAZE, metadata) {
7475 int8_t angle[MAX_ROI];
7476 int32_t direction[MAX_ROI * 3];
7477 int8_t degree[MAX_ROI * 2];
7478 for (size_t i = 0; i < numFaces; i++) {
7479 angle[i] = gazes->gaze[i].gaze_angle;
7480 direction[3 * i] = gazes->gaze[i].updown_dir;
7481 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7482 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7483 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7484 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007485
7486 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7487 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7488 "left_right_gaze=%d, top_bottom_gaze=%d",
7489 faceDetectionInfo->frame_id, i, angle[i],
7490 direction[3 * i], direction[3 * i + 1],
7491 direction[3 * i + 2],
7492 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007493 }
7494 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7495 (uint8_t *)angle, numFaces);
7496 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7497 direction, numFaces * 3);
7498 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7499 (uint8_t *)degree, numFaces * 2);
7500 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007501 }
7502 }
7503 }
7504 }
7505
7506 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7507 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007508 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007509 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007510 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007511
Shuzhen Wang14415f52016-11-16 18:26:18 -08007512 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7513 histogramBins = *histBins;
7514 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7515 }
7516
7517 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007518 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7519 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007520 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007521
7522 switch (stats_data->type) {
7523 case CAM_HISTOGRAM_TYPE_BAYER:
7524 switch (stats_data->bayer_stats.data_type) {
7525 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007526 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7527 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007528 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007529 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7530 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007531 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007532 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7533 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007534 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007535 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007536 case CAM_STATS_CHANNEL_R:
7537 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007538 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7539 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007540 }
7541 break;
7542 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007543 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007544 break;
7545 }
7546
Shuzhen Wang14415f52016-11-16 18:26:18 -08007547 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007548 }
7549 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007550 }
7551
7552 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7553 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7554 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7555 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7556 }
7557
7558 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7559 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7560 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7561 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7562 }
7563
7564 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7565 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7566 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7567 CAM_MAX_SHADING_MAP_HEIGHT);
7568 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7569 CAM_MAX_SHADING_MAP_WIDTH);
7570 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7571 lensShadingMap->lens_shading, 4U * map_width * map_height);
7572 }
7573
7574 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7575 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7576 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7577 }
7578
7579 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7580 //Populate CAM_INTF_META_TONEMAP_CURVES
7581 /* ch0 = G, ch 1 = B, ch 2 = R*/
7582 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7583 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7584 tonemap->tonemap_points_cnt,
7585 CAM_MAX_TONEMAP_CURVE_SIZE);
7586 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7587 }
7588
7589 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7590 &tonemap->curves[0].tonemap_points[0][0],
7591 tonemap->tonemap_points_cnt * 2);
7592
7593 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7594 &tonemap->curves[1].tonemap_points[0][0],
7595 tonemap->tonemap_points_cnt * 2);
7596
7597 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7598 &tonemap->curves[2].tonemap_points[0][0],
7599 tonemap->tonemap_points_cnt * 2);
7600 }
7601
7602 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7603 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7604 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7605 CC_GAIN_MAX);
7606 }
7607
7608 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7609 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7610 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7611 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7612 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7613 }
7614
7615 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7616 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7617 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7618 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7619 toneCurve->tonemap_points_cnt,
7620 CAM_MAX_TONEMAP_CURVE_SIZE);
7621 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7622 }
7623 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7624 (float*)toneCurve->curve.tonemap_points,
7625 toneCurve->tonemap_points_cnt * 2);
7626 }
7627
7628 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7629 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7630 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7631 predColorCorrectionGains->gains, 4);
7632 }
7633
7634 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7635 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7636 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7637 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7638 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7639 }
7640
7641 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7642 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7643 }
7644
7645 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7646 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7647 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7648 }
7649
7650 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7651 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7652 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7653 }
7654
7655 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7656 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7657 *effectMode);
7658 if (NAME_NOT_FOUND != val) {
7659 uint8_t fwk_effectMode = (uint8_t)val;
7660 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7661 }
7662 }
7663
7664 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7665 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7666 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7667 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7668 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7669 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7670 }
7671 int32_t fwk_testPatternData[4];
7672 fwk_testPatternData[0] = testPatternData->r;
7673 fwk_testPatternData[3] = testPatternData->b;
7674 switch (gCamCapability[mCameraId]->color_arrangement) {
7675 case CAM_FILTER_ARRANGEMENT_RGGB:
7676 case CAM_FILTER_ARRANGEMENT_GRBG:
7677 fwk_testPatternData[1] = testPatternData->gr;
7678 fwk_testPatternData[2] = testPatternData->gb;
7679 break;
7680 case CAM_FILTER_ARRANGEMENT_GBRG:
7681 case CAM_FILTER_ARRANGEMENT_BGGR:
7682 fwk_testPatternData[2] = testPatternData->gr;
7683 fwk_testPatternData[1] = testPatternData->gb;
7684 break;
7685 default:
7686 LOGE("color arrangement %d is not supported",
7687 gCamCapability[mCameraId]->color_arrangement);
7688 break;
7689 }
7690 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7691 }
7692
7693 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7694 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7695 }
7696
7697 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7698 String8 str((const char *)gps_methods);
7699 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7700 }
7701
7702 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7703 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7704 }
7705
7706 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7707 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7708 }
7709
7710 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7711 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7712 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7713 }
7714
7715 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7716 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7717 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7718 }
7719
7720 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7721 int32_t fwk_thumb_size[2];
7722 fwk_thumb_size[0] = thumb_size->width;
7723 fwk_thumb_size[1] = thumb_size->height;
7724 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7725 }
7726
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007727 // Skip reprocess metadata if there is no input stream.
7728 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7729 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7730 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7731 privateData,
7732 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7733 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007734 }
7735
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007736 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007737 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007738 meteringMode, 1);
7739 }
7740
Thierry Strudel54dc9782017-02-15 12:12:10 -08007741 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7742 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7743 LOGD("hdr_scene_data: %d %f\n",
7744 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7745 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7746 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7747 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7748 &isHdr, 1);
7749 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7750 &isHdrConfidence, 1);
7751 }
7752
7753
7754
Thierry Strudel3d639192016-09-09 11:52:26 -07007755 if (metadata->is_tuning_params_valid) {
7756 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7757 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7758 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7759
7760
7761 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7762 sizeof(uint32_t));
7763 data += sizeof(uint32_t);
7764
7765 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7766 sizeof(uint32_t));
7767 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7768 data += sizeof(uint32_t);
7769
7770 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7771 sizeof(uint32_t));
7772 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7773 data += sizeof(uint32_t);
7774
7775 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7776 sizeof(uint32_t));
7777 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7778 data += sizeof(uint32_t);
7779
7780 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7781 sizeof(uint32_t));
7782 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7783 data += sizeof(uint32_t);
7784
7785 metadata->tuning_params.tuning_mod3_data_size = 0;
7786 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7787 sizeof(uint32_t));
7788 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7789 data += sizeof(uint32_t);
7790
7791 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7792 TUNING_SENSOR_DATA_MAX);
7793 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7794 count);
7795 data += count;
7796
7797 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7798 TUNING_VFE_DATA_MAX);
7799 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7800 count);
7801 data += count;
7802
7803 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7804 TUNING_CPP_DATA_MAX);
7805 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7806 count);
7807 data += count;
7808
7809 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7810 TUNING_CAC_DATA_MAX);
7811 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7812 count);
7813 data += count;
7814
7815 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7816 (int32_t *)(void *)tuning_meta_data_blob,
7817 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7818 }
7819
7820 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7821 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7822 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7823 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7824 NEUTRAL_COL_POINTS);
7825 }
7826
7827 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7828 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7829 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7830 }
7831
7832 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7833 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7834 // Adjust crop region from sensor output coordinate system to active
7835 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007836 cam_rect_t hAeRect = hAeRegions->rect;
7837 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7838 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007839
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007840 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007841 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7842 REGIONS_TUPLE_COUNT);
7843 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7844 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007845 hAeRect.left, hAeRect.top, hAeRect.width,
7846 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007847 }
7848
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007849 if (!pendingRequest.focusStateSent) {
7850 if (pendingRequest.focusStateValid) {
7851 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7852 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007853 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007854 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7855 uint8_t fwk_afState = (uint8_t) *afState;
7856 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7857 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7858 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007859 }
7860 }
7861
Thierry Strudel3d639192016-09-09 11:52:26 -07007862 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7863 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7864 }
7865
7866 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7867 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7868 }
7869
7870 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7871 uint8_t fwk_lensState = *lensState;
7872 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7873 }
7874
Thierry Strudel3d639192016-09-09 11:52:26 -07007875 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007876 uint32_t ab_mode = *hal_ab_mode;
7877 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7878 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7879 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7880 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007881 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007882 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007883 if (NAME_NOT_FOUND != val) {
7884 uint8_t fwk_ab_mode = (uint8_t)val;
7885 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7886 }
7887 }
7888
7889 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7890 int val = lookupFwkName(SCENE_MODES_MAP,
7891 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7892 if (NAME_NOT_FOUND != val) {
7893 uint8_t fwkBestshotMode = (uint8_t)val;
7894 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7895 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7896 } else {
7897 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7898 }
7899 }
7900
7901 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7902 uint8_t fwk_mode = (uint8_t) *mode;
7903 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7904 }
7905
7906 /* Constant metadata values to be update*/
7907 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7908 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7909
7910 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7911 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7912
7913 int32_t hotPixelMap[2];
7914 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7915
7916 // CDS
7917 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7918 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7919 }
7920
Thierry Strudel04e026f2016-10-10 11:27:36 -07007921 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7922 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007923 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007924 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7925 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7926 } else {
7927 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7928 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007929
7930 if(fwk_hdr != curr_hdr_state) {
7931 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7932 if(fwk_hdr)
7933 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7934 else
7935 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7936 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007937 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7938 }
7939
Thierry Strudel54dc9782017-02-15 12:12:10 -08007940 //binning correction
7941 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7942 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7943 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7944 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7945 }
7946
Thierry Strudel04e026f2016-10-10 11:27:36 -07007947 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007948 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007949 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7950 int8_t is_ir_on = 0;
7951
7952 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7953 if(is_ir_on != curr_ir_state) {
7954 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7955 if(is_ir_on)
7956 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7957 else
7958 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7959 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007960 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007961 }
7962
Thierry Strudel269c81a2016-10-12 12:13:59 -07007963 // AEC SPEED
7964 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7965 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7966 }
7967
7968 // AWB SPEED
7969 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7970 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7971 }
7972
Thierry Strudel3d639192016-09-09 11:52:26 -07007973 // TNR
7974 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7975 uint8_t tnr_enable = tnr->denoise_enable;
7976 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007977 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7978 int8_t is_tnr_on = 0;
7979
7980 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7981 if(is_tnr_on != curr_tnr_state) {
7982 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7983 if(is_tnr_on)
7984 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7985 else
7986 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7987 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007988
7989 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7990 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7991 }
7992
7993 // Reprocess crop data
7994 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7995 uint8_t cnt = crop_data->num_of_streams;
7996 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7997 // mm-qcamera-daemon only posts crop_data for streams
7998 // not linked to pproc. So no valid crop metadata is not
7999 // necessarily an error case.
8000 LOGD("No valid crop metadata entries");
8001 } else {
8002 uint32_t reproc_stream_id;
8003 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8004 LOGD("No reprocessible stream found, ignore crop data");
8005 } else {
8006 int rc = NO_ERROR;
8007 Vector<int32_t> roi_map;
8008 int32_t *crop = new int32_t[cnt*4];
8009 if (NULL == crop) {
8010 rc = NO_MEMORY;
8011 }
8012 if (NO_ERROR == rc) {
8013 int32_t streams_found = 0;
8014 for (size_t i = 0; i < cnt; i++) {
8015 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
8016 if (pprocDone) {
8017 // HAL already does internal reprocessing,
8018 // either via reprocessing before JPEG encoding,
8019 // or offline postprocessing for pproc bypass case.
8020 crop[0] = 0;
8021 crop[1] = 0;
8022 crop[2] = mInputStreamInfo.dim.width;
8023 crop[3] = mInputStreamInfo.dim.height;
8024 } else {
8025 crop[0] = crop_data->crop_info[i].crop.left;
8026 crop[1] = crop_data->crop_info[i].crop.top;
8027 crop[2] = crop_data->crop_info[i].crop.width;
8028 crop[3] = crop_data->crop_info[i].crop.height;
8029 }
8030 roi_map.add(crop_data->crop_info[i].roi_map.left);
8031 roi_map.add(crop_data->crop_info[i].roi_map.top);
8032 roi_map.add(crop_data->crop_info[i].roi_map.width);
8033 roi_map.add(crop_data->crop_info[i].roi_map.height);
8034 streams_found++;
8035 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
8036 crop[0], crop[1], crop[2], crop[3]);
8037 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
8038 crop_data->crop_info[i].roi_map.left,
8039 crop_data->crop_info[i].roi_map.top,
8040 crop_data->crop_info[i].roi_map.width,
8041 crop_data->crop_info[i].roi_map.height);
8042 break;
8043
8044 }
8045 }
8046 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
8047 &streams_found, 1);
8048 camMetadata.update(QCAMERA3_CROP_REPROCESS,
8049 crop, (size_t)(streams_found * 4));
8050 if (roi_map.array()) {
8051 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
8052 roi_map.array(), roi_map.size());
8053 }
8054 }
8055 if (crop) {
8056 delete [] crop;
8057 }
8058 }
8059 }
8060 }
8061
8062 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8063 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
8064 // so hardcoding the CAC result to OFF mode.
8065 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8066 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
8067 } else {
8068 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
8069 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8070 *cacMode);
8071 if (NAME_NOT_FOUND != val) {
8072 uint8_t resultCacMode = (uint8_t)val;
8073 // check whether CAC result from CB is equal to Framework set CAC mode
8074 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008075 if (pendingRequest.fwkCacMode != resultCacMode) {
8076 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07008077 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08008078 //Check if CAC is disabled by property
8079 if (m_cacModeDisabled) {
8080 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8081 }
8082
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008083 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07008084 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
8085 } else {
8086 LOGE("Invalid CAC camera parameter: %d", *cacMode);
8087 }
8088 }
8089 }
8090
8091 // Post blob of cam_cds_data through vendor tag.
8092 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8093 uint8_t cnt = cdsInfo->num_of_streams;
8094 cam_cds_data_t cdsDataOverride;
8095 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8096 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8097 cdsDataOverride.num_of_streams = 1;
8098 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8099 uint32_t reproc_stream_id;
8100 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8101 LOGD("No reprocessible stream found, ignore cds data");
8102 } else {
8103 for (size_t i = 0; i < cnt; i++) {
8104 if (cdsInfo->cds_info[i].stream_id ==
8105 reproc_stream_id) {
8106 cdsDataOverride.cds_info[0].cds_enable =
8107 cdsInfo->cds_info[i].cds_enable;
8108 break;
8109 }
8110 }
8111 }
8112 } else {
8113 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8114 }
8115 camMetadata.update(QCAMERA3_CDS_INFO,
8116 (uint8_t *)&cdsDataOverride,
8117 sizeof(cam_cds_data_t));
8118 }
8119
8120 // Ldaf calibration data
8121 if (!mLdafCalibExist) {
8122 IF_META_AVAILABLE(uint32_t, ldafCalib,
8123 CAM_INTF_META_LDAF_EXIF, metadata) {
8124 mLdafCalibExist = true;
8125 mLdafCalib[0] = ldafCalib[0];
8126 mLdafCalib[1] = ldafCalib[1];
8127 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8128 ldafCalib[0], ldafCalib[1]);
8129 }
8130 }
8131
Thierry Strudel54dc9782017-02-15 12:12:10 -08008132 // EXIF debug data through vendor tag
8133 /*
8134 * Mobicat Mask can assume 3 values:
8135 * 1 refers to Mobicat data,
8136 * 2 refers to Stats Debug and Exif Debug Data
8137 * 3 refers to Mobicat and Stats Debug Data
8138 * We want to make sure that we are sending Exif debug data
8139 * only when Mobicat Mask is 2.
8140 */
8141 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8142 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8143 (uint8_t *)(void *)mExifParams.debug_params,
8144 sizeof(mm_jpeg_debug_exif_params_t));
8145 }
8146
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008147 // Reprocess and DDM debug data through vendor tag
8148 cam_reprocess_info_t repro_info;
8149 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008150 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8151 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008152 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008153 }
8154 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8155 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008156 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008157 }
8158 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8159 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008160 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008161 }
8162 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8163 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008164 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008165 }
8166 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8167 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008168 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008169 }
8170 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008171 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008172 }
8173 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8174 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008175 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008176 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008177 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8178 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8179 }
8180 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8181 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8182 }
8183 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8184 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008185
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008186 // INSTANT AEC MODE
8187 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8188 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8189 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8190 }
8191
Shuzhen Wange763e802016-03-31 10:24:29 -07008192 // AF scene change
8193 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8194 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8195 }
8196
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008197 // Enable ZSL
8198 if (enableZsl != nullptr) {
8199 uint8_t value = *enableZsl ?
8200 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8201 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8202 }
8203
Xu Han821ea9c2017-05-23 09:00:40 -07008204 // OIS Data
8205 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8206 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8207 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8208 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8209 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8210 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8211 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8212 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8213 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8214 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8215 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
Xue Tu2c3e9142017-08-18 16:23:52 -07008216 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8217 frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8218 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8219 frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
Xu Han821ea9c2017-05-23 09:00:40 -07008220 }
8221
Thierry Strudel3d639192016-09-09 11:52:26 -07008222 resultMetadata = camMetadata.release();
8223 return resultMetadata;
8224}
8225
8226/*===========================================================================
8227 * FUNCTION : saveExifParams
8228 *
8229 * DESCRIPTION:
8230 *
8231 * PARAMETERS :
8232 * @metadata : metadata information from callback
8233 *
8234 * RETURN : none
8235 *
8236 *==========================================================================*/
8237void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8238{
8239 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8240 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8241 if (mExifParams.debug_params) {
8242 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8243 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8244 }
8245 }
8246 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8247 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8248 if (mExifParams.debug_params) {
8249 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8250 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8251 }
8252 }
8253 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8254 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8255 if (mExifParams.debug_params) {
8256 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8257 mExifParams.debug_params->af_debug_params_valid = TRUE;
8258 }
8259 }
8260 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8261 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8262 if (mExifParams.debug_params) {
8263 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8264 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8265 }
8266 }
8267 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8268 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8269 if (mExifParams.debug_params) {
8270 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8271 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8272 }
8273 }
8274 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8275 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8276 if (mExifParams.debug_params) {
8277 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8278 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8279 }
8280 }
8281 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8282 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8283 if (mExifParams.debug_params) {
8284 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8285 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8286 }
8287 }
8288 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8289 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8290 if (mExifParams.debug_params) {
8291 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8292 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8293 }
8294 }
8295}
8296
8297/*===========================================================================
8298 * FUNCTION : get3AExifParams
8299 *
8300 * DESCRIPTION:
8301 *
8302 * PARAMETERS : none
8303 *
8304 *
8305 * RETURN : mm_jpeg_exif_params_t
8306 *
8307 *==========================================================================*/
8308mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8309{
8310 return mExifParams;
8311}
8312
8313/*===========================================================================
8314 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8315 *
8316 * DESCRIPTION:
8317 *
8318 * PARAMETERS :
8319 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008320 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8321 * urgent metadata in a batch. Always true for
8322 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008323 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008324 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8325 * i.e. even though it doesn't map to a valid partial
8326 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008327 * RETURN : camera_metadata_t*
8328 * metadata in a format specified by fwk
8329 *==========================================================================*/
8330camera_metadata_t*
8331QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008332 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008333 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008334{
8335 CameraMetadata camMetadata;
8336 camera_metadata_t *resultMetadata;
8337
Shuzhen Wang485e2442017-08-02 12:21:08 -07008338 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008339 /* In batch mode, use empty metadata if this is not the last in batch
8340 */
8341 resultMetadata = allocate_camera_metadata(0, 0);
8342 return resultMetadata;
8343 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008344
8345 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8346 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8347 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8348 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8349 }
8350
8351 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8352 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8353 &aecTrigger->trigger, 1);
8354 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8355 &aecTrigger->trigger_id, 1);
8356 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8357 aecTrigger->trigger);
8358 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8359 aecTrigger->trigger_id);
8360 }
8361
8362 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8363 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8364 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8365 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8366 }
8367
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008368 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8369 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8370 if (NAME_NOT_FOUND != val) {
8371 uint8_t fwkAfMode = (uint8_t)val;
8372 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8373 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8374 } else {
8375 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8376 val);
8377 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008378 }
8379
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008380 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8381 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8382 af_trigger->trigger);
8383 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8384 af_trigger->trigger_id);
8385
8386 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8387 mAfTrigger = *af_trigger;
8388 uint32_t fwk_AfState = (uint32_t) *afState;
8389
8390 // If this is the result for a new trigger, check if there is new early
8391 // af state. If there is, use the last af state for all results
8392 // preceding current partial frame number.
8393 for (auto & pendingRequest : mPendingRequestsList) {
8394 if (pendingRequest.frame_number < frame_number) {
8395 pendingRequest.focusStateValid = true;
8396 pendingRequest.focusState = fwk_AfState;
8397 } else if (pendingRequest.frame_number == frame_number) {
8398 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8399 // Check if early AF state for trigger exists. If yes, send AF state as
8400 // partial result for better latency.
8401 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8402 pendingRequest.focusStateSent = true;
8403 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8404 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8405 frame_number, fwkEarlyAfState);
8406 }
8407 }
8408 }
8409 }
8410 }
8411 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8412 &mAfTrigger.trigger, 1);
8413 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8414
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008415 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8416 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008417 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008418 int32_t afRegions[REGIONS_TUPLE_COUNT];
8419 // Adjust crop region from sensor output coordinate system to active
8420 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008421 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8422 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008423
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008424 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008425 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8426 REGIONS_TUPLE_COUNT);
8427 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8428 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008429 hAfRect.left, hAfRect.top, hAfRect.width,
8430 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008431 }
8432
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008433 // AF region confidence
8434 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8435 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8436 }
8437
Thierry Strudel3d639192016-09-09 11:52:26 -07008438 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8439 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8440 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8441 if (NAME_NOT_FOUND != val) {
8442 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8443 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8444 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8445 } else {
8446 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8447 }
8448 }
8449
8450 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8451 uint32_t aeMode = CAM_AE_MODE_MAX;
8452 int32_t flashMode = CAM_FLASH_MODE_MAX;
8453 int32_t redeye = -1;
8454 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8455 aeMode = *pAeMode;
8456 }
8457 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8458 flashMode = *pFlashMode;
8459 }
8460 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8461 redeye = *pRedeye;
8462 }
8463
8464 if (1 == redeye) {
8465 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8466 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8467 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8468 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8469 flashMode);
8470 if (NAME_NOT_FOUND != val) {
8471 fwk_aeMode = (uint8_t)val;
8472 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8473 } else {
8474 LOGE("Unsupported flash mode %d", flashMode);
8475 }
8476 } else if (aeMode == CAM_AE_MODE_ON) {
8477 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8478 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8479 } else if (aeMode == CAM_AE_MODE_OFF) {
8480 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8481 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008482 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8483 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8484 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008485 } else {
8486 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8487 "flashMode:%d, aeMode:%u!!!",
8488 redeye, flashMode, aeMode);
8489 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008490 if (mInstantAEC) {
8491 // Increment frame Idx count untill a bound reached for instant AEC.
8492 mInstantAecFrameIdxCount++;
8493 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8494 CAM_INTF_META_AEC_INFO, metadata) {
8495 LOGH("ae_params->settled = %d",ae_params->settled);
8496 // If AEC settled, or if number of frames reached bound value,
8497 // should reset instant AEC.
8498 if (ae_params->settled ||
8499 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8500 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8501 mInstantAEC = false;
8502 mResetInstantAEC = true;
8503 mInstantAecFrameIdxCount = 0;
8504 }
8505 }
8506 }
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07008507
8508 IF_META_AVAILABLE(int32_t, af_tof_confidence,
8509 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
8510 IF_META_AVAILABLE(int32_t, af_tof_distance,
8511 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
8512 int32_t fwk_af_tof_confidence = *af_tof_confidence;
8513 int32_t fwk_af_tof_distance = *af_tof_distance;
8514 if (fwk_af_tof_confidence == 1) {
8515 mSceneDistance = fwk_af_tof_distance;
8516 } else {
8517 mSceneDistance = -1;
8518 }
8519 LOGD("tof_distance %d, tof_confidence %d, mSceneDistance %d",
8520 fwk_af_tof_distance, fwk_af_tof_confidence, mSceneDistance);
8521 }
8522 }
8523 camMetadata.update(NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE, &mSceneDistance, 1);
8524
Thierry Strudel3d639192016-09-09 11:52:26 -07008525 resultMetadata = camMetadata.release();
8526 return resultMetadata;
8527}
8528
8529/*===========================================================================
8530 * FUNCTION : dumpMetadataToFile
8531 *
8532 * DESCRIPTION: Dumps tuning metadata to file system
8533 *
8534 * PARAMETERS :
8535 * @meta : tuning metadata
8536 * @dumpFrameCount : current dump frame count
8537 * @enabled : Enable mask
8538 *
8539 *==========================================================================*/
8540void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8541 uint32_t &dumpFrameCount,
8542 bool enabled,
8543 const char *type,
8544 uint32_t frameNumber)
8545{
8546 //Some sanity checks
8547 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8548 LOGE("Tuning sensor data size bigger than expected %d: %d",
8549 meta.tuning_sensor_data_size,
8550 TUNING_SENSOR_DATA_MAX);
8551 return;
8552 }
8553
8554 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8555 LOGE("Tuning VFE data size bigger than expected %d: %d",
8556 meta.tuning_vfe_data_size,
8557 TUNING_VFE_DATA_MAX);
8558 return;
8559 }
8560
8561 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8562 LOGE("Tuning CPP data size bigger than expected %d: %d",
8563 meta.tuning_cpp_data_size,
8564 TUNING_CPP_DATA_MAX);
8565 return;
8566 }
8567
8568 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8569 LOGE("Tuning CAC data size bigger than expected %d: %d",
8570 meta.tuning_cac_data_size,
8571 TUNING_CAC_DATA_MAX);
8572 return;
8573 }
8574 //
8575
8576 if(enabled){
8577 char timeBuf[FILENAME_MAX];
8578 char buf[FILENAME_MAX];
8579 memset(buf, 0, sizeof(buf));
8580 memset(timeBuf, 0, sizeof(timeBuf));
8581 time_t current_time;
8582 struct tm * timeinfo;
8583 time (&current_time);
8584 timeinfo = localtime (&current_time);
8585 if (timeinfo != NULL) {
8586 strftime (timeBuf, sizeof(timeBuf),
8587 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8588 }
8589 String8 filePath(timeBuf);
8590 snprintf(buf,
8591 sizeof(buf),
8592 "%dm_%s_%d.bin",
8593 dumpFrameCount,
8594 type,
8595 frameNumber);
8596 filePath.append(buf);
8597 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8598 if (file_fd >= 0) {
8599 ssize_t written_len = 0;
8600 meta.tuning_data_version = TUNING_DATA_VERSION;
8601 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8602 written_len += write(file_fd, data, sizeof(uint32_t));
8603 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8604 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8605 written_len += write(file_fd, data, sizeof(uint32_t));
8606 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8607 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8608 written_len += write(file_fd, data, sizeof(uint32_t));
8609 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8610 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8611 written_len += write(file_fd, data, sizeof(uint32_t));
8612 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8613 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8614 written_len += write(file_fd, data, sizeof(uint32_t));
8615 meta.tuning_mod3_data_size = 0;
8616 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8617 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8618 written_len += write(file_fd, data, sizeof(uint32_t));
8619 size_t total_size = meta.tuning_sensor_data_size;
8620 data = (void *)((uint8_t *)&meta.data);
8621 written_len += write(file_fd, data, total_size);
8622 total_size = meta.tuning_vfe_data_size;
8623 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8624 written_len += write(file_fd, data, total_size);
8625 total_size = meta.tuning_cpp_data_size;
8626 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8627 written_len += write(file_fd, data, total_size);
8628 total_size = meta.tuning_cac_data_size;
8629 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8630 written_len += write(file_fd, data, total_size);
8631 close(file_fd);
8632 }else {
8633 LOGE("fail to open file for metadata dumping");
8634 }
8635 }
8636}
8637
8638/*===========================================================================
8639 * FUNCTION : cleanAndSortStreamInfo
8640 *
8641 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8642 * and sort them such that raw stream is at the end of the list
8643 * This is a workaround for camera daemon constraint.
8644 *
8645 * PARAMETERS : None
8646 *
8647 *==========================================================================*/
8648void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8649{
8650 List<stream_info_t *> newStreamInfo;
8651
8652 /*clean up invalid streams*/
8653 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8654 it != mStreamInfo.end();) {
8655 if(((*it)->status) == INVALID){
8656 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8657 delete channel;
8658 free(*it);
8659 it = mStreamInfo.erase(it);
8660 } else {
8661 it++;
8662 }
8663 }
8664
8665 // Move preview/video/callback/snapshot streams into newList
8666 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8667 it != mStreamInfo.end();) {
8668 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8669 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8670 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8671 newStreamInfo.push_back(*it);
8672 it = mStreamInfo.erase(it);
8673 } else
8674 it++;
8675 }
8676 // Move raw streams into newList
8677 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8678 it != mStreamInfo.end();) {
8679 newStreamInfo.push_back(*it);
8680 it = mStreamInfo.erase(it);
8681 }
8682
8683 mStreamInfo = newStreamInfo;
8684}
8685
8686/*===========================================================================
8687 * FUNCTION : extractJpegMetadata
8688 *
8689 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8690 * JPEG metadata is cached in HAL, and return as part of capture
8691 * result when metadata is returned from camera daemon.
8692 *
8693 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8694 * @request: capture request
8695 *
8696 *==========================================================================*/
8697void QCamera3HardwareInterface::extractJpegMetadata(
8698 CameraMetadata& jpegMetadata,
8699 const camera3_capture_request_t *request)
8700{
8701 CameraMetadata frame_settings;
8702 frame_settings = request->settings;
8703
8704 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8705 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8706 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8707 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8708
8709 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8710 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8711 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8712 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8713
8714 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8715 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8716 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8717 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8718
8719 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8720 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8721 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8722 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8723
8724 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8725 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8726 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8727 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8728
8729 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8730 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8731 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8732 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8733
8734 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8735 int32_t thumbnail_size[2];
8736 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8737 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8738 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8739 int32_t orientation =
8740 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008741 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008742 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8743 int32_t temp;
8744 temp = thumbnail_size[0];
8745 thumbnail_size[0] = thumbnail_size[1];
8746 thumbnail_size[1] = temp;
8747 }
8748 }
8749 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8750 thumbnail_size,
8751 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8752 }
8753
8754}
8755
8756/*===========================================================================
8757 * FUNCTION : convertToRegions
8758 *
8759 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8760 *
8761 * PARAMETERS :
8762 * @rect : cam_rect_t struct to convert
8763 * @region : int32_t destination array
8764 * @weight : if we are converting from cam_area_t, weight is valid
8765 * else weight = -1
8766 *
8767 *==========================================================================*/
8768void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8769 int32_t *region, int weight)
8770{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008771 region[FACE_LEFT] = rect.left;
8772 region[FACE_TOP] = rect.top;
8773 region[FACE_RIGHT] = rect.left + rect.width;
8774 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008775 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008776 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008777 }
8778}
8779
8780/*===========================================================================
8781 * FUNCTION : convertFromRegions
8782 *
8783 * DESCRIPTION: helper method to convert from array to cam_rect_t
8784 *
8785 * PARAMETERS :
8786 * @rect : cam_rect_t struct to convert
8787 * @region : int32_t destination array
8788 * @weight : if we are converting from cam_area_t, weight is valid
8789 * else weight = -1
8790 *
8791 *==========================================================================*/
8792void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008793 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008794{
Thierry Strudel3d639192016-09-09 11:52:26 -07008795 int32_t x_min = frame_settings.find(tag).data.i32[0];
8796 int32_t y_min = frame_settings.find(tag).data.i32[1];
8797 int32_t x_max = frame_settings.find(tag).data.i32[2];
8798 int32_t y_max = frame_settings.find(tag).data.i32[3];
8799 roi.weight = frame_settings.find(tag).data.i32[4];
8800 roi.rect.left = x_min;
8801 roi.rect.top = y_min;
8802 roi.rect.width = x_max - x_min;
8803 roi.rect.height = y_max - y_min;
8804}
8805
8806/*===========================================================================
8807 * FUNCTION : resetIfNeededROI
8808 *
8809 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8810 * crop region
8811 *
8812 * PARAMETERS :
8813 * @roi : cam_area_t struct to resize
8814 * @scalerCropRegion : cam_crop_region_t region to compare against
8815 *
8816 *
8817 *==========================================================================*/
8818bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8819 const cam_crop_region_t* scalerCropRegion)
8820{
8821 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8822 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8823 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8824 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8825
8826 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8827 * without having this check the calculations below to validate if the roi
8828 * is inside scalar crop region will fail resulting in the roi not being
8829 * reset causing algorithm to continue to use stale roi window
8830 */
8831 if (roi->weight == 0) {
8832 return true;
8833 }
8834
8835 if ((roi_x_max < scalerCropRegion->left) ||
8836 // right edge of roi window is left of scalar crop's left edge
8837 (roi_y_max < scalerCropRegion->top) ||
8838 // bottom edge of roi window is above scalar crop's top edge
8839 (roi->rect.left > crop_x_max) ||
8840 // left edge of roi window is beyond(right) of scalar crop's right edge
8841 (roi->rect.top > crop_y_max)){
8842 // top edge of roi windo is above scalar crop's top edge
8843 return false;
8844 }
8845 if (roi->rect.left < scalerCropRegion->left) {
8846 roi->rect.left = scalerCropRegion->left;
8847 }
8848 if (roi->rect.top < scalerCropRegion->top) {
8849 roi->rect.top = scalerCropRegion->top;
8850 }
8851 if (roi_x_max > crop_x_max) {
8852 roi_x_max = crop_x_max;
8853 }
8854 if (roi_y_max > crop_y_max) {
8855 roi_y_max = crop_y_max;
8856 }
8857 roi->rect.width = roi_x_max - roi->rect.left;
8858 roi->rect.height = roi_y_max - roi->rect.top;
8859 return true;
8860}
8861
8862/*===========================================================================
8863 * FUNCTION : convertLandmarks
8864 *
8865 * DESCRIPTION: helper method to extract the landmarks from face detection info
8866 *
8867 * PARAMETERS :
8868 * @landmark_data : input landmark data to be converted
8869 * @landmarks : int32_t destination array
8870 *
8871 *
8872 *==========================================================================*/
8873void QCamera3HardwareInterface::convertLandmarks(
8874 cam_face_landmarks_info_t landmark_data,
8875 int32_t *landmarks)
8876{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008877 if (landmark_data.is_left_eye_valid) {
8878 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8879 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8880 } else {
8881 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8882 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8883 }
8884
8885 if (landmark_data.is_right_eye_valid) {
8886 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8887 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8888 } else {
8889 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8890 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8891 }
8892
8893 if (landmark_data.is_mouth_valid) {
8894 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8895 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8896 } else {
8897 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8898 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8899 }
8900}
8901
8902/*===========================================================================
8903 * FUNCTION : setInvalidLandmarks
8904 *
8905 * DESCRIPTION: helper method to set invalid landmarks
8906 *
8907 * PARAMETERS :
8908 * @landmarks : int32_t destination array
8909 *
8910 *
8911 *==========================================================================*/
8912void QCamera3HardwareInterface::setInvalidLandmarks(
8913 int32_t *landmarks)
8914{
8915 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8916 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8917 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8918 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8919 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8920 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008921}
8922
8923#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008924
8925/*===========================================================================
8926 * FUNCTION : getCapabilities
8927 *
8928 * DESCRIPTION: query camera capability from back-end
8929 *
8930 * PARAMETERS :
8931 * @ops : mm-interface ops structure
8932 * @cam_handle : camera handle for which we need capability
8933 *
8934 * RETURN : ptr type of capability structure
8935 * capability for success
8936 * NULL for failure
8937 *==========================================================================*/
8938cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8939 uint32_t cam_handle)
8940{
8941 int rc = NO_ERROR;
8942 QCamera3HeapMemory *capabilityHeap = NULL;
8943 cam_capability_t *cap_ptr = NULL;
8944
8945 if (ops == NULL) {
8946 LOGE("Invalid arguments");
8947 return NULL;
8948 }
8949
8950 capabilityHeap = new QCamera3HeapMemory(1);
8951 if (capabilityHeap == NULL) {
8952 LOGE("creation of capabilityHeap failed");
8953 return NULL;
8954 }
8955
8956 /* Allocate memory for capability buffer */
8957 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8958 if(rc != OK) {
8959 LOGE("No memory for cappability");
8960 goto allocate_failed;
8961 }
8962
8963 /* Map memory for capability buffer */
8964 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8965
8966 rc = ops->map_buf(cam_handle,
8967 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8968 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8969 if(rc < 0) {
8970 LOGE("failed to map capability buffer");
8971 rc = FAILED_TRANSACTION;
8972 goto map_failed;
8973 }
8974
8975 /* Query Capability */
8976 rc = ops->query_capability(cam_handle);
8977 if(rc < 0) {
8978 LOGE("failed to query capability");
8979 rc = FAILED_TRANSACTION;
8980 goto query_failed;
8981 }
8982
8983 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8984 if (cap_ptr == NULL) {
8985 LOGE("out of memory");
8986 rc = NO_MEMORY;
8987 goto query_failed;
8988 }
8989
8990 memset(cap_ptr, 0, sizeof(cam_capability_t));
8991 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8992
8993 int index;
8994 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8995 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8996 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8997 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8998 }
8999
9000query_failed:
9001 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
9002map_failed:
9003 capabilityHeap->deallocate();
9004allocate_failed:
9005 delete capabilityHeap;
9006
9007 if (rc != NO_ERROR) {
9008 return NULL;
9009 } else {
9010 return cap_ptr;
9011 }
9012}
9013
Thierry Strudel3d639192016-09-09 11:52:26 -07009014/*===========================================================================
9015 * FUNCTION : initCapabilities
9016 *
9017 * DESCRIPTION: initialize camera capabilities in static data struct
9018 *
9019 * PARAMETERS :
9020 * @cameraId : camera Id
9021 *
9022 * RETURN : int32_t type of status
9023 * NO_ERROR -- success
9024 * none-zero failure code
9025 *==========================================================================*/
9026int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
9027{
9028 int rc = 0;
9029 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009030 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07009031
9032 rc = camera_open((uint8_t)cameraId, &cameraHandle);
9033 if (rc) {
9034 LOGE("camera_open failed. rc = %d", rc);
9035 goto open_failed;
9036 }
9037 if (!cameraHandle) {
9038 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
9039 goto open_failed;
9040 }
9041
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009042 handle = get_main_camera_handle(cameraHandle->camera_handle);
9043 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
9044 if (gCamCapability[cameraId] == NULL) {
9045 rc = FAILED_TRANSACTION;
9046 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07009047 }
9048
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009049 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009050 if (is_dual_camera_by_idx(cameraId)) {
9051 handle = get_aux_camera_handle(cameraHandle->camera_handle);
9052 gCamCapability[cameraId]->aux_cam_cap =
9053 getCapabilities(cameraHandle->ops, handle);
9054 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
9055 rc = FAILED_TRANSACTION;
9056 free(gCamCapability[cameraId]);
9057 goto failed_op;
9058 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009059
9060 // Copy the main camera capability to main_cam_cap struct
9061 gCamCapability[cameraId]->main_cam_cap =
9062 (cam_capability_t *)malloc(sizeof(cam_capability_t));
9063 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
9064 LOGE("out of memory");
9065 rc = NO_MEMORY;
9066 goto failed_op;
9067 }
9068 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
9069 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07009070 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009071failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07009072 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
9073 cameraHandle = NULL;
9074open_failed:
9075 return rc;
9076}
9077
9078/*==========================================================================
9079 * FUNCTION : get3Aversion
9080 *
9081 * DESCRIPTION: get the Q3A S/W version
9082 *
9083 * PARAMETERS :
9084 * @sw_version: Reference of Q3A structure which will hold version info upon
9085 * return
9086 *
9087 * RETURN : None
9088 *
9089 *==========================================================================*/
9090void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
9091{
9092 if(gCamCapability[mCameraId])
9093 sw_version = gCamCapability[mCameraId]->q3a_version;
9094 else
9095 LOGE("Capability structure NULL!");
9096}
9097
9098
9099/*===========================================================================
9100 * FUNCTION : initParameters
9101 *
9102 * DESCRIPTION: initialize camera parameters
9103 *
9104 * PARAMETERS :
9105 *
9106 * RETURN : int32_t type of status
9107 * NO_ERROR -- success
9108 * none-zero failure code
9109 *==========================================================================*/
9110int QCamera3HardwareInterface::initParameters()
9111{
9112 int rc = 0;
9113
9114 //Allocate Set Param Buffer
9115 mParamHeap = new QCamera3HeapMemory(1);
9116 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9117 if(rc != OK) {
9118 rc = NO_MEMORY;
9119 LOGE("Failed to allocate SETPARM Heap memory");
9120 delete mParamHeap;
9121 mParamHeap = NULL;
9122 return rc;
9123 }
9124
9125 //Map memory for parameters buffer
9126 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9127 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9128 mParamHeap->getFd(0),
9129 sizeof(metadata_buffer_t),
9130 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9131 if(rc < 0) {
9132 LOGE("failed to map SETPARM buffer");
9133 rc = FAILED_TRANSACTION;
9134 mParamHeap->deallocate();
9135 delete mParamHeap;
9136 mParamHeap = NULL;
9137 return rc;
9138 }
9139
9140 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9141
9142 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9143 return rc;
9144}
9145
9146/*===========================================================================
9147 * FUNCTION : deinitParameters
9148 *
9149 * DESCRIPTION: de-initialize camera parameters
9150 *
9151 * PARAMETERS :
9152 *
9153 * RETURN : NONE
9154 *==========================================================================*/
9155void QCamera3HardwareInterface::deinitParameters()
9156{
9157 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9158 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9159
9160 mParamHeap->deallocate();
9161 delete mParamHeap;
9162 mParamHeap = NULL;
9163
9164 mParameters = NULL;
9165
9166 free(mPrevParameters);
9167 mPrevParameters = NULL;
9168}
9169
9170/*===========================================================================
9171 * FUNCTION : calcMaxJpegSize
9172 *
9173 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9174 *
9175 * PARAMETERS :
9176 *
9177 * RETURN : max_jpeg_size
9178 *==========================================================================*/
9179size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9180{
9181 size_t max_jpeg_size = 0;
9182 size_t temp_width, temp_height;
9183 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9184 MAX_SIZES_CNT);
9185 for (size_t i = 0; i < count; i++) {
9186 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9187 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9188 if (temp_width * temp_height > max_jpeg_size ) {
9189 max_jpeg_size = temp_width * temp_height;
9190 }
9191 }
9192 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9193 return max_jpeg_size;
9194}
9195
9196/*===========================================================================
9197 * FUNCTION : getMaxRawSize
9198 *
9199 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9200 *
9201 * PARAMETERS :
9202 *
9203 * RETURN : Largest supported Raw Dimension
9204 *==========================================================================*/
9205cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9206{
9207 int max_width = 0;
9208 cam_dimension_t maxRawSize;
9209
9210 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9211 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9212 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9213 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9214 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9215 }
9216 }
9217 return maxRawSize;
9218}
9219
9220
9221/*===========================================================================
9222 * FUNCTION : calcMaxJpegDim
9223 *
9224 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9225 *
9226 * PARAMETERS :
9227 *
9228 * RETURN : max_jpeg_dim
9229 *==========================================================================*/
9230cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9231{
9232 cam_dimension_t max_jpeg_dim;
9233 cam_dimension_t curr_jpeg_dim;
9234 max_jpeg_dim.width = 0;
9235 max_jpeg_dim.height = 0;
9236 curr_jpeg_dim.width = 0;
9237 curr_jpeg_dim.height = 0;
9238 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9239 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9240 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9241 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9242 max_jpeg_dim.width * max_jpeg_dim.height ) {
9243 max_jpeg_dim.width = curr_jpeg_dim.width;
9244 max_jpeg_dim.height = curr_jpeg_dim.height;
9245 }
9246 }
9247 return max_jpeg_dim;
9248}
9249
9250/*===========================================================================
9251 * FUNCTION : addStreamConfig
9252 *
9253 * DESCRIPTION: adds the stream configuration to the array
9254 *
9255 * PARAMETERS :
9256 * @available_stream_configs : pointer to stream configuration array
9257 * @scalar_format : scalar format
9258 * @dim : configuration dimension
9259 * @config_type : input or output configuration type
9260 *
9261 * RETURN : NONE
9262 *==========================================================================*/
9263void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9264 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9265{
9266 available_stream_configs.add(scalar_format);
9267 available_stream_configs.add(dim.width);
9268 available_stream_configs.add(dim.height);
9269 available_stream_configs.add(config_type);
9270}
9271
9272/*===========================================================================
9273 * FUNCTION : suppportBurstCapture
9274 *
9275 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9276 *
9277 * PARAMETERS :
9278 * @cameraId : camera Id
9279 *
9280 * RETURN : true if camera supports BURST_CAPTURE
9281 * false otherwise
9282 *==========================================================================*/
9283bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9284{
9285 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9286 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9287 const int32_t highResWidth = 3264;
9288 const int32_t highResHeight = 2448;
9289
9290 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9291 // Maximum resolution images cannot be captured at >= 10fps
9292 // -> not supporting BURST_CAPTURE
9293 return false;
9294 }
9295
9296 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9297 // Maximum resolution images can be captured at >= 20fps
9298 // --> supporting BURST_CAPTURE
9299 return true;
9300 }
9301
9302 // Find the smallest highRes resolution, or largest resolution if there is none
9303 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9304 MAX_SIZES_CNT);
9305 size_t highRes = 0;
9306 while ((highRes + 1 < totalCnt) &&
9307 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9308 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9309 highResWidth * highResHeight)) {
9310 highRes++;
9311 }
9312 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9313 return true;
9314 } else {
9315 return false;
9316 }
9317}
9318
9319/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009320 * FUNCTION : getPDStatIndex
9321 *
9322 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9323 *
9324 * PARAMETERS :
9325 * @caps : camera capabilities
9326 *
9327 * RETURN : int32_t type
9328 * non-negative - on success
9329 * -1 - on failure
9330 *==========================================================================*/
9331int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9332 if (nullptr == caps) {
9333 return -1;
9334 }
9335
9336 uint32_t metaRawCount = caps->meta_raw_channel_count;
9337 int32_t ret = -1;
9338 for (size_t i = 0; i < metaRawCount; i++) {
9339 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9340 ret = i;
9341 break;
9342 }
9343 }
9344
9345 return ret;
9346}
9347
9348/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009349 * FUNCTION : initStaticMetadata
9350 *
9351 * DESCRIPTION: initialize the static metadata
9352 *
9353 * PARAMETERS :
9354 * @cameraId : camera Id
9355 *
9356 * RETURN : int32_t type of status
9357 * 0 -- success
9358 * non-zero failure code
9359 *==========================================================================*/
9360int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9361{
9362 int rc = 0;
9363 CameraMetadata staticInfo;
9364 size_t count = 0;
9365 bool limitedDevice = false;
9366 char prop[PROPERTY_VALUE_MAX];
9367 bool supportBurst = false;
9368
9369 supportBurst = supportBurstCapture(cameraId);
9370
9371 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9372 * guaranteed or if min fps of max resolution is less than 20 fps, its
9373 * advertised as limited device*/
9374 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9375 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9376 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9377 !supportBurst;
9378
9379 uint8_t supportedHwLvl = limitedDevice ?
9380 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009381#ifndef USE_HAL_3_3
9382 // LEVEL_3 - This device will support level 3.
9383 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9384#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009385 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009386#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009387
9388 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9389 &supportedHwLvl, 1);
9390
9391 bool facingBack = false;
9392 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9393 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9394 facingBack = true;
9395 }
9396 /*HAL 3 only*/
9397 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9398 &gCamCapability[cameraId]->min_focus_distance, 1);
9399
9400 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9401 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9402
9403 /*should be using focal lengths but sensor doesn't provide that info now*/
9404 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9405 &gCamCapability[cameraId]->focal_length,
9406 1);
9407
9408 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9409 gCamCapability[cameraId]->apertures,
9410 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9411
9412 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9413 gCamCapability[cameraId]->filter_densities,
9414 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9415
9416
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009417 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9418 size_t mode_count =
9419 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9420 for (size_t i = 0; i < mode_count; i++) {
9421 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9422 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009423 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009424 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009425
9426 int32_t lens_shading_map_size[] = {
9427 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9428 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9429 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9430 lens_shading_map_size,
9431 sizeof(lens_shading_map_size)/sizeof(int32_t));
9432
9433 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9434 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9435
9436 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9437 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9438
9439 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9440 &gCamCapability[cameraId]->max_frame_duration, 1);
9441
9442 camera_metadata_rational baseGainFactor = {
9443 gCamCapability[cameraId]->base_gain_factor.numerator,
9444 gCamCapability[cameraId]->base_gain_factor.denominator};
9445 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9446 &baseGainFactor, 1);
9447
9448 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9449 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9450
9451 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9452 gCamCapability[cameraId]->pixel_array_size.height};
9453 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9454 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9455
9456 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9457 gCamCapability[cameraId]->active_array_size.top,
9458 gCamCapability[cameraId]->active_array_size.width,
9459 gCamCapability[cameraId]->active_array_size.height};
9460 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9461 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9462
9463 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9464 &gCamCapability[cameraId]->white_level, 1);
9465
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009466 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9467 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9468 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009469 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009470 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009471
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009472#ifndef USE_HAL_3_3
9473 bool hasBlackRegions = false;
9474 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9475 LOGW("black_region_count: %d is bounded to %d",
9476 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9477 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9478 }
9479 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9480 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9481 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9482 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9483 }
9484 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9485 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9486 hasBlackRegions = true;
9487 }
9488#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009489 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9490 &gCamCapability[cameraId]->flash_charge_duration, 1);
9491
9492 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9493 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9494
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009495 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9496 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9497 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009498 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9499 &timestampSource, 1);
9500
Thierry Strudel54dc9782017-02-15 12:12:10 -08009501 //update histogram vendor data
9502 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009503 &gCamCapability[cameraId]->histogram_size, 1);
9504
Thierry Strudel54dc9782017-02-15 12:12:10 -08009505 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009506 &gCamCapability[cameraId]->max_histogram_count, 1);
9507
Shuzhen Wang14415f52016-11-16 18:26:18 -08009508 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9509 //so that app can request fewer number of bins than the maximum supported.
9510 std::vector<int32_t> histBins;
9511 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9512 histBins.push_back(maxHistBins);
9513 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9514 (maxHistBins & 0x1) == 0) {
9515 histBins.push_back(maxHistBins >> 1);
9516 maxHistBins >>= 1;
9517 }
9518 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9519 histBins.data(), histBins.size());
9520
Thierry Strudel3d639192016-09-09 11:52:26 -07009521 int32_t sharpness_map_size[] = {
9522 gCamCapability[cameraId]->sharpness_map_size.width,
9523 gCamCapability[cameraId]->sharpness_map_size.height};
9524
9525 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9526 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9527
9528 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9529 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9530
Emilian Peev0f3c3162017-03-15 12:57:46 +00009531 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9532 if (0 <= indexPD) {
9533 // Advertise PD stats data as part of the Depth capabilities
9534 int32_t depthWidth =
9535 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9536 int32_t depthHeight =
9537 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009538 int32_t depthStride =
9539 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009540 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9541 assert(0 < depthSamplesCount);
9542 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9543 &depthSamplesCount, 1);
9544
9545 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9546 depthHeight,
9547 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9548 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9549 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9550 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9551 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9552
9553 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9554 depthHeight, 33333333,
9555 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9556 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9557 depthMinDuration,
9558 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9559
9560 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9561 depthHeight, 0,
9562 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9563 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9564 depthStallDuration,
9565 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9566
9567 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9568 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009569
9570 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9571 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9572 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009573 }
9574
Thierry Strudel3d639192016-09-09 11:52:26 -07009575 int32_t scalar_formats[] = {
9576 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9577 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9578 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9579 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9580 HAL_PIXEL_FORMAT_RAW10,
9581 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009582 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9583 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9584 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009585
9586 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9587 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9588 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9589 count, MAX_SIZES_CNT, available_processed_sizes);
9590 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9591 available_processed_sizes, count * 2);
9592
9593 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9594 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9595 makeTable(gCamCapability[cameraId]->raw_dim,
9596 count, MAX_SIZES_CNT, available_raw_sizes);
9597 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9598 available_raw_sizes, count * 2);
9599
9600 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9601 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9602 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9603 count, MAX_SIZES_CNT, available_fps_ranges);
9604 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9605 available_fps_ranges, count * 2);
9606
9607 camera_metadata_rational exposureCompensationStep = {
9608 gCamCapability[cameraId]->exp_compensation_step.numerator,
9609 gCamCapability[cameraId]->exp_compensation_step.denominator};
9610 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9611 &exposureCompensationStep, 1);
9612
9613 Vector<uint8_t> availableVstabModes;
9614 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9615 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009616 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009617 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009618 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009619 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009620 count = IS_TYPE_MAX;
9621 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9622 for (size_t i = 0; i < count; i++) {
9623 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9624 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9625 eisSupported = true;
9626 break;
9627 }
9628 }
9629 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009630 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9631 }
9632 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9633 availableVstabModes.array(), availableVstabModes.size());
9634
9635 /*HAL 1 and HAL 3 common*/
9636 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9637 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9638 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009639 // Cap the max zoom to the max preferred value
9640 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009641 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9642 &maxZoom, 1);
9643
9644 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9645 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9646
9647 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9648 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9649 max3aRegions[2] = 0; /* AF not supported */
9650 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9651 max3aRegions, 3);
9652
9653 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9654 memset(prop, 0, sizeof(prop));
9655 property_get("persist.camera.facedetect", prop, "1");
9656 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9657 LOGD("Support face detection mode: %d",
9658 supportedFaceDetectMode);
9659
9660 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009661 /* support mode should be OFF if max number of face is 0 */
9662 if (maxFaces <= 0) {
9663 supportedFaceDetectMode = 0;
9664 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009665 Vector<uint8_t> availableFaceDetectModes;
9666 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9667 if (supportedFaceDetectMode == 1) {
9668 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9669 } else if (supportedFaceDetectMode == 2) {
9670 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9671 } else if (supportedFaceDetectMode == 3) {
9672 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9673 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9674 } else {
9675 maxFaces = 0;
9676 }
9677 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9678 availableFaceDetectModes.array(),
9679 availableFaceDetectModes.size());
9680 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9681 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009682 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9683 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9684 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009685
9686 int32_t exposureCompensationRange[] = {
9687 gCamCapability[cameraId]->exposure_compensation_min,
9688 gCamCapability[cameraId]->exposure_compensation_max};
9689 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9690 exposureCompensationRange,
9691 sizeof(exposureCompensationRange)/sizeof(int32_t));
9692
9693 uint8_t lensFacing = (facingBack) ?
9694 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9695 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9696
9697 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9698 available_thumbnail_sizes,
9699 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9700
9701 /*all sizes will be clubbed into this tag*/
9702 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9703 /*android.scaler.availableStreamConfigurations*/
9704 Vector<int32_t> available_stream_configs;
9705 cam_dimension_t active_array_dim;
9706 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9707 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009708
9709 /*advertise list of input dimensions supported based on below property.
9710 By default all sizes upto 5MP will be advertised.
9711 Note that the setprop resolution format should be WxH.
9712 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9713 To list all supported sizes, setprop needs to be set with "0x0" */
9714 cam_dimension_t minInputSize = {2592,1944}; //5MP
9715 memset(prop, 0, sizeof(prop));
9716 property_get("persist.camera.input.minsize", prop, "2592x1944");
9717 if (strlen(prop) > 0) {
9718 char *saveptr = NULL;
9719 char *token = strtok_r(prop, "x", &saveptr);
9720 if (token != NULL) {
9721 minInputSize.width = atoi(token);
9722 }
9723 token = strtok_r(NULL, "x", &saveptr);
9724 if (token != NULL) {
9725 minInputSize.height = atoi(token);
9726 }
9727 }
9728
Thierry Strudel3d639192016-09-09 11:52:26 -07009729 /* Add input/output stream configurations for each scalar formats*/
9730 for (size_t j = 0; j < scalar_formats_count; j++) {
9731 switch (scalar_formats[j]) {
9732 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9733 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9734 case HAL_PIXEL_FORMAT_RAW10:
9735 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9736 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9737 addStreamConfig(available_stream_configs, scalar_formats[j],
9738 gCamCapability[cameraId]->raw_dim[i],
9739 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9740 }
9741 break;
9742 case HAL_PIXEL_FORMAT_BLOB:
9743 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9744 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9745 addStreamConfig(available_stream_configs, scalar_formats[j],
9746 gCamCapability[cameraId]->picture_sizes_tbl[i],
9747 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9748 }
9749 break;
9750 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9751 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9752 default:
9753 cam_dimension_t largest_picture_size;
9754 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9755 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9756 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9757 addStreamConfig(available_stream_configs, scalar_formats[j],
9758 gCamCapability[cameraId]->picture_sizes_tbl[i],
9759 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009760 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009761 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9762 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009763 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9764 >= minInputSize.width) || (gCamCapability[cameraId]->
9765 picture_sizes_tbl[i].height >= minInputSize.height)) {
9766 addStreamConfig(available_stream_configs, scalar_formats[j],
9767 gCamCapability[cameraId]->picture_sizes_tbl[i],
9768 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9769 }
9770 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009771 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009772
Thierry Strudel3d639192016-09-09 11:52:26 -07009773 break;
9774 }
9775 }
9776
9777 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9778 available_stream_configs.array(), available_stream_configs.size());
9779 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9780 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9781
9782 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9783 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9784
9785 /* android.scaler.availableMinFrameDurations */
9786 Vector<int64_t> available_min_durations;
9787 for (size_t j = 0; j < scalar_formats_count; j++) {
9788 switch (scalar_formats[j]) {
9789 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9790 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9791 case HAL_PIXEL_FORMAT_RAW10:
9792 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9793 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9794 available_min_durations.add(scalar_formats[j]);
9795 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9796 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9797 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9798 }
9799 break;
9800 default:
9801 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9802 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9803 available_min_durations.add(scalar_formats[j]);
9804 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9805 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9806 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9807 }
9808 break;
9809 }
9810 }
9811 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9812 available_min_durations.array(), available_min_durations.size());
9813
9814 Vector<int32_t> available_hfr_configs;
9815 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9816 int32_t fps = 0;
9817 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9818 case CAM_HFR_MODE_60FPS:
9819 fps = 60;
9820 break;
9821 case CAM_HFR_MODE_90FPS:
9822 fps = 90;
9823 break;
9824 case CAM_HFR_MODE_120FPS:
9825 fps = 120;
9826 break;
9827 case CAM_HFR_MODE_150FPS:
9828 fps = 150;
9829 break;
9830 case CAM_HFR_MODE_180FPS:
9831 fps = 180;
9832 break;
9833 case CAM_HFR_MODE_210FPS:
9834 fps = 210;
9835 break;
9836 case CAM_HFR_MODE_240FPS:
9837 fps = 240;
9838 break;
9839 case CAM_HFR_MODE_480FPS:
9840 fps = 480;
9841 break;
9842 case CAM_HFR_MODE_OFF:
9843 case CAM_HFR_MODE_MAX:
9844 default:
9845 break;
9846 }
9847
9848 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9849 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9850 /* For each HFR frame rate, need to advertise one variable fps range
9851 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9852 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9853 * set by the app. When video recording is started, [120, 120] is
9854 * set. This way sensor configuration does not change when recording
9855 * is started */
9856
9857 /* (width, height, fps_min, fps_max, batch_size_max) */
9858 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9859 j < MAX_SIZES_CNT; j++) {
9860 available_hfr_configs.add(
9861 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9862 available_hfr_configs.add(
9863 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9864 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9865 available_hfr_configs.add(fps);
9866 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9867
9868 /* (width, height, fps_min, fps_max, batch_size_max) */
9869 available_hfr_configs.add(
9870 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9871 available_hfr_configs.add(
9872 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9873 available_hfr_configs.add(fps);
9874 available_hfr_configs.add(fps);
9875 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9876 }
9877 }
9878 }
9879 //Advertise HFR capability only if the property is set
9880 memset(prop, 0, sizeof(prop));
9881 property_get("persist.camera.hal3hfr.enable", prop, "1");
9882 uint8_t hfrEnable = (uint8_t)atoi(prop);
9883
9884 if(hfrEnable && available_hfr_configs.array()) {
9885 staticInfo.update(
9886 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9887 available_hfr_configs.array(), available_hfr_configs.size());
9888 }
9889
9890 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9891 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9892 &max_jpeg_size, 1);
9893
9894 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9895 size_t size = 0;
9896 count = CAM_EFFECT_MODE_MAX;
9897 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9898 for (size_t i = 0; i < count; i++) {
9899 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9900 gCamCapability[cameraId]->supported_effects[i]);
9901 if (NAME_NOT_FOUND != val) {
9902 avail_effects[size] = (uint8_t)val;
9903 size++;
9904 }
9905 }
9906 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9907 avail_effects,
9908 size);
9909
9910 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9911 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9912 size_t supported_scene_modes_cnt = 0;
9913 count = CAM_SCENE_MODE_MAX;
9914 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9915 for (size_t i = 0; i < count; i++) {
9916 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9917 CAM_SCENE_MODE_OFF) {
9918 int val = lookupFwkName(SCENE_MODES_MAP,
9919 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9920 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009921
Thierry Strudel3d639192016-09-09 11:52:26 -07009922 if (NAME_NOT_FOUND != val) {
9923 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9924 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9925 supported_scene_modes_cnt++;
9926 }
9927 }
9928 }
9929 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9930 avail_scene_modes,
9931 supported_scene_modes_cnt);
9932
9933 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9934 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9935 supported_scene_modes_cnt,
9936 CAM_SCENE_MODE_MAX,
9937 scene_mode_overrides,
9938 supported_indexes,
9939 cameraId);
9940
9941 if (supported_scene_modes_cnt == 0) {
9942 supported_scene_modes_cnt = 1;
9943 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9944 }
9945
9946 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9947 scene_mode_overrides, supported_scene_modes_cnt * 3);
9948
9949 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9950 ANDROID_CONTROL_MODE_AUTO,
9951 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9952 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9953 available_control_modes,
9954 3);
9955
9956 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9957 size = 0;
9958 count = CAM_ANTIBANDING_MODE_MAX;
9959 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9960 for (size_t i = 0; i < count; i++) {
9961 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9962 gCamCapability[cameraId]->supported_antibandings[i]);
9963 if (NAME_NOT_FOUND != val) {
9964 avail_antibanding_modes[size] = (uint8_t)val;
9965 size++;
9966 }
9967
9968 }
9969 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9970 avail_antibanding_modes,
9971 size);
9972
9973 uint8_t avail_abberation_modes[] = {
9974 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9975 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9976 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9977 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9978 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9979 if (0 == count) {
9980 // If no aberration correction modes are available for a device, this advertise OFF mode
9981 size = 1;
9982 } else {
9983 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9984 // So, advertize all 3 modes if atleast any one mode is supported as per the
9985 // new M requirement
9986 size = 3;
9987 }
9988 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9989 avail_abberation_modes,
9990 size);
9991
9992 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9993 size = 0;
9994 count = CAM_FOCUS_MODE_MAX;
9995 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9996 for (size_t i = 0; i < count; i++) {
9997 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9998 gCamCapability[cameraId]->supported_focus_modes[i]);
9999 if (NAME_NOT_FOUND != val) {
10000 avail_af_modes[size] = (uint8_t)val;
10001 size++;
10002 }
10003 }
10004 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
10005 avail_af_modes,
10006 size);
10007
10008 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
10009 size = 0;
10010 count = CAM_WB_MODE_MAX;
10011 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
10012 for (size_t i = 0; i < count; i++) {
10013 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10014 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10015 gCamCapability[cameraId]->supported_white_balances[i]);
10016 if (NAME_NOT_FOUND != val) {
10017 avail_awb_modes[size] = (uint8_t)val;
10018 size++;
10019 }
10020 }
10021 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
10022 avail_awb_modes,
10023 size);
10024
10025 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
10026 count = CAM_FLASH_FIRING_LEVEL_MAX;
10027 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
10028 count);
10029 for (size_t i = 0; i < count; i++) {
10030 available_flash_levels[i] =
10031 gCamCapability[cameraId]->supported_firing_levels[i];
10032 }
10033 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
10034 available_flash_levels, count);
10035
10036 uint8_t flashAvailable;
10037 if (gCamCapability[cameraId]->flash_available)
10038 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
10039 else
10040 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
10041 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
10042 &flashAvailable, 1);
10043
10044 Vector<uint8_t> avail_ae_modes;
10045 count = CAM_AE_MODE_MAX;
10046 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
10047 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080010048 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
10049 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
10050 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
10051 }
10052 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070010053 }
10054 if (flashAvailable) {
10055 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
10056 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
10057 }
10058 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
10059 avail_ae_modes.array(),
10060 avail_ae_modes.size());
10061
10062 int32_t sensitivity_range[2];
10063 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
10064 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
10065 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
10066 sensitivity_range,
10067 sizeof(sensitivity_range) / sizeof(int32_t));
10068
10069 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10070 &gCamCapability[cameraId]->max_analog_sensitivity,
10071 1);
10072
10073 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
10074 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
10075 &sensor_orientation,
10076 1);
10077
10078 int32_t max_output_streams[] = {
10079 MAX_STALLING_STREAMS,
10080 MAX_PROCESSED_STREAMS,
10081 MAX_RAW_STREAMS};
10082 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
10083 max_output_streams,
10084 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
10085
10086 uint8_t avail_leds = 0;
10087 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
10088 &avail_leds, 0);
10089
10090 uint8_t focus_dist_calibrated;
10091 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
10092 gCamCapability[cameraId]->focus_dist_calibrated);
10093 if (NAME_NOT_FOUND != val) {
10094 focus_dist_calibrated = (uint8_t)val;
10095 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10096 &focus_dist_calibrated, 1);
10097 }
10098
10099 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
10100 size = 0;
10101 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
10102 MAX_TEST_PATTERN_CNT);
10103 for (size_t i = 0; i < count; i++) {
10104 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10105 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10106 if (NAME_NOT_FOUND != testpatternMode) {
10107 avail_testpattern_modes[size] = testpatternMode;
10108 size++;
10109 }
10110 }
10111 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10112 avail_testpattern_modes,
10113 size);
10114
10115 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10116 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10117 &max_pipeline_depth,
10118 1);
10119
10120 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10121 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10122 &partial_result_count,
10123 1);
10124
10125 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10126 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10127
10128 Vector<uint8_t> available_capabilities;
10129 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10130 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10131 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10132 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10133 if (supportBurst) {
10134 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10135 }
10136 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10137 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10138 if (hfrEnable && available_hfr_configs.array()) {
10139 available_capabilities.add(
10140 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10141 }
10142
10143 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10144 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10145 }
10146 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10147 available_capabilities.array(),
10148 available_capabilities.size());
10149
10150 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10151 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10152 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10153 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10154
10155 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10156 &aeLockAvailable, 1);
10157
10158 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10159 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10160 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10161 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10162
10163 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10164 &awbLockAvailable, 1);
10165
10166 int32_t max_input_streams = 1;
10167 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10168 &max_input_streams,
10169 1);
10170
10171 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10172 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10173 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10174 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10175 HAL_PIXEL_FORMAT_YCbCr_420_888};
10176 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10177 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10178
10179 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10180 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10181 &max_latency,
10182 1);
10183
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010184#ifndef USE_HAL_3_3
10185 int32_t isp_sensitivity_range[2];
10186 isp_sensitivity_range[0] =
10187 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10188 isp_sensitivity_range[1] =
10189 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10190 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10191 isp_sensitivity_range,
10192 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10193#endif
10194
Thierry Strudel3d639192016-09-09 11:52:26 -070010195 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10196 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10197 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10198 available_hot_pixel_modes,
10199 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10200
10201 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10202 ANDROID_SHADING_MODE_FAST,
10203 ANDROID_SHADING_MODE_HIGH_QUALITY};
10204 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10205 available_shading_modes,
10206 3);
10207
10208 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10209 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10210 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10211 available_lens_shading_map_modes,
10212 2);
10213
10214 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10215 ANDROID_EDGE_MODE_FAST,
10216 ANDROID_EDGE_MODE_HIGH_QUALITY,
10217 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10218 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10219 available_edge_modes,
10220 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10221
10222 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10223 ANDROID_NOISE_REDUCTION_MODE_FAST,
10224 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10225 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10226 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10227 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10228 available_noise_red_modes,
10229 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10230
10231 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10232 ANDROID_TONEMAP_MODE_FAST,
10233 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10234 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10235 available_tonemap_modes,
10236 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10237
10238 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10239 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10240 available_hot_pixel_map_modes,
10241 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10242
10243 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10244 gCamCapability[cameraId]->reference_illuminant1);
10245 if (NAME_NOT_FOUND != val) {
10246 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10247 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10248 }
10249
10250 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10251 gCamCapability[cameraId]->reference_illuminant2);
10252 if (NAME_NOT_FOUND != val) {
10253 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10254 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10255 }
10256
10257 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10258 (void *)gCamCapability[cameraId]->forward_matrix1,
10259 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10260
10261 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10262 (void *)gCamCapability[cameraId]->forward_matrix2,
10263 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10264
10265 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10266 (void *)gCamCapability[cameraId]->color_transform1,
10267 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10268
10269 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10270 (void *)gCamCapability[cameraId]->color_transform2,
10271 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10272
10273 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10274 (void *)gCamCapability[cameraId]->calibration_transform1,
10275 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10276
10277 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10278 (void *)gCamCapability[cameraId]->calibration_transform2,
10279 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10280
10281 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10282 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10283 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10284 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10285 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10286 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10287 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10288 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10289 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10290 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10291 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10292 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10293 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10294 ANDROID_JPEG_GPS_COORDINATES,
10295 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10296 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10297 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10298 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10299 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10300 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10301 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10302 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10303 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10304 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010305#ifndef USE_HAL_3_3
10306 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10307#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010308 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010309 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010310 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10311 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010312 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010313 /* DevCamDebug metadata request_keys_basic */
10314 DEVCAMDEBUG_META_ENABLE,
10315 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010316 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010317 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010318 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010319 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010320 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010321 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010322
10323 size_t request_keys_cnt =
10324 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10325 Vector<int32_t> available_request_keys;
10326 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10327 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10328 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10329 }
10330
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010331 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010332 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010333 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070010334 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
Chien-Yu Chenec328c82017-08-30 16:41:08 -070010335 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010336 }
10337
Thierry Strudel3d639192016-09-09 11:52:26 -070010338 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10339 available_request_keys.array(), available_request_keys.size());
10340
10341 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10342 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10343 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10344 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10345 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10346 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10347 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10348 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10349 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10350 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10351 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10352 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10353 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10354 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10355 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10356 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10357 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010358 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010359 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10360 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10361 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010362 ANDROID_STATISTICS_FACE_SCORES,
10363#ifndef USE_HAL_3_3
10364 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10365#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010366 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010367 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010368 // DevCamDebug metadata result_keys_basic
10369 DEVCAMDEBUG_META_ENABLE,
10370 // DevCamDebug metadata result_keys AF
10371 DEVCAMDEBUG_AF_LENS_POSITION,
10372 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10373 DEVCAMDEBUG_AF_TOF_DISTANCE,
10374 DEVCAMDEBUG_AF_LUMA,
10375 DEVCAMDEBUG_AF_HAF_STATE,
10376 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10377 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10378 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10379 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10380 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10381 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10382 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10383 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10384 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10385 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10386 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10387 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10388 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10389 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10390 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10391 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10392 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10393 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10394 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10395 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10396 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10397 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10398 // DevCamDebug metadata result_keys AEC
10399 DEVCAMDEBUG_AEC_TARGET_LUMA,
10400 DEVCAMDEBUG_AEC_COMP_LUMA,
10401 DEVCAMDEBUG_AEC_AVG_LUMA,
10402 DEVCAMDEBUG_AEC_CUR_LUMA,
10403 DEVCAMDEBUG_AEC_LINECOUNT,
10404 DEVCAMDEBUG_AEC_REAL_GAIN,
10405 DEVCAMDEBUG_AEC_EXP_INDEX,
10406 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010407 // DevCamDebug metadata result_keys zzHDR
10408 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10409 DEVCAMDEBUG_AEC_L_LINECOUNT,
10410 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10411 DEVCAMDEBUG_AEC_S_LINECOUNT,
10412 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10413 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10414 // DevCamDebug metadata result_keys ADRC
10415 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10416 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10417 DEVCAMDEBUG_AEC_GTM_RATIO,
10418 DEVCAMDEBUG_AEC_LTM_RATIO,
10419 DEVCAMDEBUG_AEC_LA_RATIO,
10420 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010421 // DevCamDebug metadata result_keys AEC MOTION
10422 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10423 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10424 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010425 // DevCamDebug metadata result_keys AWB
10426 DEVCAMDEBUG_AWB_R_GAIN,
10427 DEVCAMDEBUG_AWB_G_GAIN,
10428 DEVCAMDEBUG_AWB_B_GAIN,
10429 DEVCAMDEBUG_AWB_CCT,
10430 DEVCAMDEBUG_AWB_DECISION,
10431 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010432 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10433 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10434 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010435 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010436 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -070010437 NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010438 };
10439
Thierry Strudel3d639192016-09-09 11:52:26 -070010440 size_t result_keys_cnt =
10441 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10442
10443 Vector<int32_t> available_result_keys;
10444 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10445 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10446 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10447 }
10448 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10449 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10450 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10451 }
10452 if (supportedFaceDetectMode == 1) {
10453 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10454 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10455 } else if ((supportedFaceDetectMode == 2) ||
10456 (supportedFaceDetectMode == 3)) {
10457 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10458 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10459 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010460#ifndef USE_HAL_3_3
10461 if (hasBlackRegions) {
10462 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10463 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10464 }
10465#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010466
10467 if (gExposeEnableZslKey) {
10468 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chendaf68892017-08-25 12:56:40 -070010469 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010470 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
10471 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010472 }
10473
Thierry Strudel3d639192016-09-09 11:52:26 -070010474 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10475 available_result_keys.array(), available_result_keys.size());
10476
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010477 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010478 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10479 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10480 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10481 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10482 ANDROID_SCALER_CROPPING_TYPE,
10483 ANDROID_SYNC_MAX_LATENCY,
10484 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10485 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10486 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10487 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10488 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10489 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10490 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10491 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10492 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10493 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10494 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10495 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10496 ANDROID_LENS_FACING,
10497 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10498 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10499 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10500 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10501 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10502 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10503 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10504 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10505 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10506 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10507 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10508 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10509 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10510 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10511 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10512 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10513 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10514 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10515 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10516 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010517 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010518 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10519 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10520 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10521 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10522 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10523 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10524 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10525 ANDROID_CONTROL_AVAILABLE_MODES,
10526 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10527 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10528 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10529 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010530 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10531#ifndef USE_HAL_3_3
10532 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10533 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10534#endif
10535 };
10536
10537 Vector<int32_t> available_characteristics_keys;
10538 available_characteristics_keys.appendArray(characteristics_keys_basic,
10539 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10540#ifndef USE_HAL_3_3
10541 if (hasBlackRegions) {
10542 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10543 }
10544#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010545
10546 if (0 <= indexPD) {
10547 int32_t depthKeys[] = {
10548 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10549 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10550 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10551 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10552 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10553 };
10554 available_characteristics_keys.appendArray(depthKeys,
10555 sizeof(depthKeys) / sizeof(depthKeys[0]));
10556 }
10557
Thierry Strudel3d639192016-09-09 11:52:26 -070010558 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010559 available_characteristics_keys.array(),
10560 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010561
10562 /*available stall durations depend on the hw + sw and will be different for different devices */
10563 /*have to add for raw after implementation*/
10564 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10565 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10566
10567 Vector<int64_t> available_stall_durations;
10568 for (uint32_t j = 0; j < stall_formats_count; j++) {
10569 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10570 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10571 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10572 available_stall_durations.add(stall_formats[j]);
10573 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10574 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10575 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10576 }
10577 } else {
10578 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10579 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10580 available_stall_durations.add(stall_formats[j]);
10581 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10582 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10583 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10584 }
10585 }
10586 }
10587 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10588 available_stall_durations.array(),
10589 available_stall_durations.size());
10590
10591 //QCAMERA3_OPAQUE_RAW
10592 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10593 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10594 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10595 case LEGACY_RAW:
10596 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10597 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10598 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10599 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10600 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10601 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10602 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10603 break;
10604 case MIPI_RAW:
10605 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10606 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10607 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10608 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10609 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10610 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10611 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10612 break;
10613 default:
10614 LOGE("unknown opaque_raw_format %d",
10615 gCamCapability[cameraId]->opaque_raw_fmt);
10616 break;
10617 }
10618 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10619
10620 Vector<int32_t> strides;
10621 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10622 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10623 cam_stream_buf_plane_info_t buf_planes;
10624 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10625 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10626 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10627 &gCamCapability[cameraId]->padding_info, &buf_planes);
10628 strides.add(buf_planes.plane_info.mp[0].stride);
10629 }
10630 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10631 strides.size());
10632
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010633 //TBD: remove the following line once backend advertises zzHDR in feature mask
10634 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010635 //Video HDR default
10636 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10637 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010638 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010639 int32_t vhdr_mode[] = {
10640 QCAMERA3_VIDEO_HDR_MODE_OFF,
10641 QCAMERA3_VIDEO_HDR_MODE_ON};
10642
10643 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10644 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10645 vhdr_mode, vhdr_mode_count);
10646 }
10647
Thierry Strudel3d639192016-09-09 11:52:26 -070010648 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10649 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10650 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10651
10652 uint8_t isMonoOnly =
10653 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10654 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10655 &isMonoOnly, 1);
10656
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010657#ifndef USE_HAL_3_3
10658 Vector<int32_t> opaque_size;
10659 for (size_t j = 0; j < scalar_formats_count; j++) {
10660 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10661 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10662 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10663 cam_stream_buf_plane_info_t buf_planes;
10664
10665 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10666 &gCamCapability[cameraId]->padding_info, &buf_planes);
10667
10668 if (rc == 0) {
10669 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10670 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10671 opaque_size.add(buf_planes.plane_info.frame_len);
10672 }else {
10673 LOGE("raw frame calculation failed!");
10674 }
10675 }
10676 }
10677 }
10678
10679 if ((opaque_size.size() > 0) &&
10680 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10681 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10682 else
10683 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10684#endif
10685
Thierry Strudel04e026f2016-10-10 11:27:36 -070010686 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10687 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10688 size = 0;
10689 count = CAM_IR_MODE_MAX;
10690 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10691 for (size_t i = 0; i < count; i++) {
10692 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10693 gCamCapability[cameraId]->supported_ir_modes[i]);
10694 if (NAME_NOT_FOUND != val) {
10695 avail_ir_modes[size] = (int32_t)val;
10696 size++;
10697 }
10698 }
10699 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10700 avail_ir_modes, size);
10701 }
10702
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010703 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10704 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10705 size = 0;
10706 count = CAM_AEC_CONVERGENCE_MAX;
10707 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10708 for (size_t i = 0; i < count; i++) {
10709 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10710 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10711 if (NAME_NOT_FOUND != val) {
10712 available_instant_aec_modes[size] = (int32_t)val;
10713 size++;
10714 }
10715 }
10716 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10717 available_instant_aec_modes, size);
10718 }
10719
Thierry Strudel54dc9782017-02-15 12:12:10 -080010720 int32_t sharpness_range[] = {
10721 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10722 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10723 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10724
10725 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10726 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10727 size = 0;
10728 count = CAM_BINNING_CORRECTION_MODE_MAX;
10729 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10730 for (size_t i = 0; i < count; i++) {
10731 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10732 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10733 gCamCapability[cameraId]->supported_binning_modes[i]);
10734 if (NAME_NOT_FOUND != val) {
10735 avail_binning_modes[size] = (int32_t)val;
10736 size++;
10737 }
10738 }
10739 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10740 avail_binning_modes, size);
10741 }
10742
10743 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10744 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10745 size = 0;
10746 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10747 for (size_t i = 0; i < count; i++) {
10748 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10749 gCamCapability[cameraId]->supported_aec_modes[i]);
10750 if (NAME_NOT_FOUND != val)
10751 available_aec_modes[size++] = val;
10752 }
10753 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10754 available_aec_modes, size);
10755 }
10756
10757 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10758 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10759 size = 0;
10760 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10761 for (size_t i = 0; i < count; i++) {
10762 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10763 gCamCapability[cameraId]->supported_iso_modes[i]);
10764 if (NAME_NOT_FOUND != val)
10765 available_iso_modes[size++] = val;
10766 }
10767 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10768 available_iso_modes, size);
10769 }
10770
10771 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010772 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010773 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10774 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10775 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10776
10777 int32_t available_saturation_range[4];
10778 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10779 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10780 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10781 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10782 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10783 available_saturation_range, 4);
10784
10785 uint8_t is_hdr_values[2];
10786 is_hdr_values[0] = 0;
10787 is_hdr_values[1] = 1;
10788 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10789 is_hdr_values, 2);
10790
10791 float is_hdr_confidence_range[2];
10792 is_hdr_confidence_range[0] = 0.0;
10793 is_hdr_confidence_range[1] = 1.0;
10794 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10795 is_hdr_confidence_range, 2);
10796
Emilian Peev0a972ef2017-03-16 10:25:53 +000010797 size_t eepromLength = strnlen(
10798 reinterpret_cast<const char *>(
10799 gCamCapability[cameraId]->eeprom_version_info),
10800 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10801 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010802 char easelInfo[] = ",E:N";
10803 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10804 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10805 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010806 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
Arnd Geis082a4d72017-08-24 10:33:07 -070010807 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010808 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010809 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010810 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10811 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10812 }
10813
Thierry Strudel3d639192016-09-09 11:52:26 -070010814 gStaticMetadata[cameraId] = staticInfo.release();
10815 return rc;
10816}
10817
10818/*===========================================================================
10819 * FUNCTION : makeTable
10820 *
10821 * DESCRIPTION: make a table of sizes
10822 *
10823 * PARAMETERS :
10824 *
10825 *
10826 *==========================================================================*/
10827void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10828 size_t max_size, int32_t *sizeTable)
10829{
10830 size_t j = 0;
10831 if (size > max_size) {
10832 size = max_size;
10833 }
10834 for (size_t i = 0; i < size; i++) {
10835 sizeTable[j] = dimTable[i].width;
10836 sizeTable[j+1] = dimTable[i].height;
10837 j+=2;
10838 }
10839}
10840
10841/*===========================================================================
10842 * FUNCTION : makeFPSTable
10843 *
10844 * DESCRIPTION: make a table of fps ranges
10845 *
10846 * PARAMETERS :
10847 *
10848 *==========================================================================*/
10849void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10850 size_t max_size, int32_t *fpsRangesTable)
10851{
10852 size_t j = 0;
10853 if (size > max_size) {
10854 size = max_size;
10855 }
10856 for (size_t i = 0; i < size; i++) {
10857 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10858 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10859 j+=2;
10860 }
10861}
10862
10863/*===========================================================================
10864 * FUNCTION : makeOverridesList
10865 *
10866 * DESCRIPTION: make a list of scene mode overrides
10867 *
10868 * PARAMETERS :
10869 *
10870 *
10871 *==========================================================================*/
10872void QCamera3HardwareInterface::makeOverridesList(
10873 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10874 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10875{
10876 /*daemon will give a list of overrides for all scene modes.
10877 However we should send the fwk only the overrides for the scene modes
10878 supported by the framework*/
10879 size_t j = 0;
10880 if (size > max_size) {
10881 size = max_size;
10882 }
10883 size_t focus_count = CAM_FOCUS_MODE_MAX;
10884 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10885 focus_count);
10886 for (size_t i = 0; i < size; i++) {
10887 bool supt = false;
10888 size_t index = supported_indexes[i];
10889 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10890 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10891 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10892 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10893 overridesTable[index].awb_mode);
10894 if (NAME_NOT_FOUND != val) {
10895 overridesList[j+1] = (uint8_t)val;
10896 }
10897 uint8_t focus_override = overridesTable[index].af_mode;
10898 for (size_t k = 0; k < focus_count; k++) {
10899 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10900 supt = true;
10901 break;
10902 }
10903 }
10904 if (supt) {
10905 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10906 focus_override);
10907 if (NAME_NOT_FOUND != val) {
10908 overridesList[j+2] = (uint8_t)val;
10909 }
10910 } else {
10911 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10912 }
10913 j+=3;
10914 }
10915}
10916
10917/*===========================================================================
10918 * FUNCTION : filterJpegSizes
10919 *
10920 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10921 * could be downscaled to
10922 *
10923 * PARAMETERS :
10924 *
10925 * RETURN : length of jpegSizes array
10926 *==========================================================================*/
10927
10928size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10929 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10930 uint8_t downscale_factor)
10931{
10932 if (0 == downscale_factor) {
10933 downscale_factor = 1;
10934 }
10935
10936 int32_t min_width = active_array_size.width / downscale_factor;
10937 int32_t min_height = active_array_size.height / downscale_factor;
10938 size_t jpegSizesCnt = 0;
10939 if (processedSizesCnt > maxCount) {
10940 processedSizesCnt = maxCount;
10941 }
10942 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10943 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10944 jpegSizes[jpegSizesCnt] = processedSizes[i];
10945 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10946 jpegSizesCnt += 2;
10947 }
10948 }
10949 return jpegSizesCnt;
10950}
10951
10952/*===========================================================================
10953 * FUNCTION : computeNoiseModelEntryS
10954 *
10955 * DESCRIPTION: function to map a given sensitivity to the S noise
10956 * model parameters in the DNG noise model.
10957 *
10958 * PARAMETERS : sens : the sensor sensitivity
10959 *
10960 ** RETURN : S (sensor amplification) noise
10961 *
10962 *==========================================================================*/
10963double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10964 double s = gCamCapability[mCameraId]->gradient_S * sens +
10965 gCamCapability[mCameraId]->offset_S;
10966 return ((s < 0.0) ? 0.0 : s);
10967}
10968
10969/*===========================================================================
10970 * FUNCTION : computeNoiseModelEntryO
10971 *
10972 * DESCRIPTION: function to map a given sensitivity to the O noise
10973 * model parameters in the DNG noise model.
10974 *
10975 * PARAMETERS : sens : the sensor sensitivity
10976 *
10977 ** RETURN : O (sensor readout) noise
10978 *
10979 *==========================================================================*/
10980double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10981 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10982 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10983 1.0 : (1.0 * sens / max_analog_sens);
10984 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10985 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10986 return ((o < 0.0) ? 0.0 : o);
10987}
10988
10989/*===========================================================================
10990 * FUNCTION : getSensorSensitivity
10991 *
10992 * DESCRIPTION: convert iso_mode to an integer value
10993 *
10994 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10995 *
10996 ** RETURN : sensitivity supported by sensor
10997 *
10998 *==========================================================================*/
10999int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
11000{
11001 int32_t sensitivity;
11002
11003 switch (iso_mode) {
11004 case CAM_ISO_MODE_100:
11005 sensitivity = 100;
11006 break;
11007 case CAM_ISO_MODE_200:
11008 sensitivity = 200;
11009 break;
11010 case CAM_ISO_MODE_400:
11011 sensitivity = 400;
11012 break;
11013 case CAM_ISO_MODE_800:
11014 sensitivity = 800;
11015 break;
11016 case CAM_ISO_MODE_1600:
11017 sensitivity = 1600;
11018 break;
11019 default:
11020 sensitivity = -1;
11021 break;
11022 }
11023 return sensitivity;
11024}
11025
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011026int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011027 if (gEaselManagerClient == nullptr) {
11028 gEaselManagerClient = EaselManagerClient::create();
11029 if (gEaselManagerClient == nullptr) {
11030 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
11031 return -ENODEV;
11032 }
11033 }
11034
11035 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070011036 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
11037 // to connect to Easel.
11038 bool doNotpowerOnEasel =
11039 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
11040
11041 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070011042 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
11043 return OK;
11044 }
11045
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011046 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011047 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011048 if (res != OK) {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070011049 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
11050 res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011051 return res;
11052 }
11053
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011054 EaselManagerClientOpened = true;
11055
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011056 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011057 if (res != OK) {
11058 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
11059 }
11060
Zhijun Hedaacd8a2017-09-14 12:07:42 -070011061 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070011062 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070011063 gEnableMultipleHdrplusOutputs =
11064 property_get_bool("persist.camera.hdrplus.multiple_outputs", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011065
11066 // Expose enableZsl key only when HDR+ mode is enabled.
11067 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011068 }
11069
11070 return OK;
11071}
11072
Thierry Strudel3d639192016-09-09 11:52:26 -070011073/*===========================================================================
11074 * FUNCTION : getCamInfo
11075 *
11076 * DESCRIPTION: query camera capabilities
11077 *
11078 * PARAMETERS :
11079 * @cameraId : camera Id
11080 * @info : camera info struct to be filled in with camera capabilities
11081 *
11082 * RETURN : int type of status
11083 * NO_ERROR -- success
11084 * none-zero failure code
11085 *==========================================================================*/
11086int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
11087 struct camera_info *info)
11088{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011089 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070011090 int rc = 0;
11091
11092 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070011093
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011094 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070011095 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011096 rc = initHdrPlusClientLocked();
11097 if (rc != OK) {
11098 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
11099 pthread_mutex_unlock(&gCamLock);
11100 return rc;
11101 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070011102 }
11103
Thierry Strudel3d639192016-09-09 11:52:26 -070011104 if (NULL == gCamCapability[cameraId]) {
11105 rc = initCapabilities(cameraId);
11106 if (rc < 0) {
11107 pthread_mutex_unlock(&gCamLock);
11108 return rc;
11109 }
11110 }
11111
11112 if (NULL == gStaticMetadata[cameraId]) {
11113 rc = initStaticMetadata(cameraId);
11114 if (rc < 0) {
11115 pthread_mutex_unlock(&gCamLock);
11116 return rc;
11117 }
11118 }
11119
11120 switch(gCamCapability[cameraId]->position) {
11121 case CAM_POSITION_BACK:
11122 case CAM_POSITION_BACK_AUX:
11123 info->facing = CAMERA_FACING_BACK;
11124 break;
11125
11126 case CAM_POSITION_FRONT:
11127 case CAM_POSITION_FRONT_AUX:
11128 info->facing = CAMERA_FACING_FRONT;
11129 break;
11130
11131 default:
11132 LOGE("Unknown position type %d for camera id:%d",
11133 gCamCapability[cameraId]->position, cameraId);
11134 rc = -1;
11135 break;
11136 }
11137
11138
11139 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011140#ifndef USE_HAL_3_3
11141 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11142#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011143 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011144#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011145 info->static_camera_characteristics = gStaticMetadata[cameraId];
11146
11147 //For now assume both cameras can operate independently.
11148 info->conflicting_devices = NULL;
11149 info->conflicting_devices_length = 0;
11150
11151 //resource cost is 100 * MIN(1.0, m/M),
11152 //where m is throughput requirement with maximum stream configuration
11153 //and M is CPP maximum throughput.
11154 float max_fps = 0.0;
11155 for (uint32_t i = 0;
11156 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11157 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11158 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11159 }
11160 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11161 gCamCapability[cameraId]->active_array_size.width *
11162 gCamCapability[cameraId]->active_array_size.height * max_fps /
11163 gCamCapability[cameraId]->max_pixel_bandwidth;
11164 info->resource_cost = 100 * MIN(1.0, ratio);
11165 LOGI("camera %d resource cost is %d", cameraId,
11166 info->resource_cost);
11167
11168 pthread_mutex_unlock(&gCamLock);
11169 return rc;
11170}
11171
11172/*===========================================================================
11173 * FUNCTION : translateCapabilityToMetadata
11174 *
11175 * DESCRIPTION: translate the capability into camera_metadata_t
11176 *
11177 * PARAMETERS : type of the request
11178 *
11179 *
11180 * RETURN : success: camera_metadata_t*
11181 * failure: NULL
11182 *
11183 *==========================================================================*/
11184camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11185{
11186 if (mDefaultMetadata[type] != NULL) {
11187 return mDefaultMetadata[type];
11188 }
11189 //first time we are handling this request
11190 //fill up the metadata structure using the wrapper class
11191 CameraMetadata settings;
11192 //translate from cam_capability_t to camera_metadata_tag_t
11193 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11194 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11195 int32_t defaultRequestID = 0;
11196 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11197
11198 /* OIS disable */
11199 char ois_prop[PROPERTY_VALUE_MAX];
11200 memset(ois_prop, 0, sizeof(ois_prop));
11201 property_get("persist.camera.ois.disable", ois_prop, "0");
11202 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11203
11204 /* Force video to use OIS */
11205 char videoOisProp[PROPERTY_VALUE_MAX];
11206 memset(videoOisProp, 0, sizeof(videoOisProp));
11207 property_get("persist.camera.ois.video", videoOisProp, "1");
11208 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011209
11210 // Hybrid AE enable/disable
11211 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11212 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11213 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011214 uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011215
Thierry Strudel3d639192016-09-09 11:52:26 -070011216 uint8_t controlIntent = 0;
11217 uint8_t focusMode;
11218 uint8_t vsMode;
11219 uint8_t optStabMode;
11220 uint8_t cacMode;
11221 uint8_t edge_mode;
11222 uint8_t noise_red_mode;
11223 uint8_t tonemap_mode;
11224 bool highQualityModeEntryAvailable = FALSE;
11225 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011226 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011227 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11228 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011229 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011230 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011231 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011232
Thierry Strudel3d639192016-09-09 11:52:26 -070011233 switch (type) {
11234 case CAMERA3_TEMPLATE_PREVIEW:
11235 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11236 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11237 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11238 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11239 edge_mode = ANDROID_EDGE_MODE_FAST;
11240 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11241 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11242 break;
11243 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11244 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11245 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11246 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11247 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11248 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11249 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11250 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11251 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11252 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11253 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11254 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11255 highQualityModeEntryAvailable = TRUE;
11256 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11257 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11258 fastModeEntryAvailable = TRUE;
11259 }
11260 }
11261 if (highQualityModeEntryAvailable) {
11262 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11263 } else if (fastModeEntryAvailable) {
11264 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11265 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011266 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11267 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11268 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011269 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011270 break;
11271 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11272 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11273 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11274 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011275 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11276 edge_mode = ANDROID_EDGE_MODE_FAST;
11277 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11278 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11279 if (forceVideoOis)
11280 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11281 break;
11282 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11283 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11284 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11285 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011286 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11287 edge_mode = ANDROID_EDGE_MODE_FAST;
11288 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11289 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11290 if (forceVideoOis)
11291 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11292 break;
11293 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11294 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11295 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11296 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11297 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11298 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11299 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11300 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11301 break;
11302 case CAMERA3_TEMPLATE_MANUAL:
11303 edge_mode = ANDROID_EDGE_MODE_FAST;
11304 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11305 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11306 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11307 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11308 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11309 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11310 break;
11311 default:
11312 edge_mode = ANDROID_EDGE_MODE_FAST;
11313 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11314 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11315 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11316 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11317 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11318 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11319 break;
11320 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011321 // Set CAC to OFF if underlying device doesn't support
11322 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11323 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11324 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011325 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11326 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11327 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11328 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11329 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11330 }
11331 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011332 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011333 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011334
11335 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11336 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11337 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11338 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11339 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11340 || ois_disable)
11341 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11342 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011343 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011344
11345 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11346 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11347
11348 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11349 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11350
11351 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11352 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11353
11354 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11355 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11356
11357 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11358 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11359
11360 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11361 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11362
11363 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11364 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11365
11366 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11367 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11368
11369 /*flash*/
11370 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11371 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11372
11373 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11374 settings.update(ANDROID_FLASH_FIRING_POWER,
11375 &flashFiringLevel, 1);
11376
11377 /* lens */
11378 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11379 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11380
11381 if (gCamCapability[mCameraId]->filter_densities_count) {
11382 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11383 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11384 gCamCapability[mCameraId]->filter_densities_count);
11385 }
11386
11387 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11388 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11389
Thierry Strudel3d639192016-09-09 11:52:26 -070011390 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11391 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11392
11393 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11394 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11395
11396 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11397 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11398
11399 /* face detection (default to OFF) */
11400 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11401 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11402
Thierry Strudel54dc9782017-02-15 12:12:10 -080011403 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11404 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011405
11406 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11407 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11408
11409 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11410 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11411
Thierry Strudel3d639192016-09-09 11:52:26 -070011412
11413 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11414 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11415
11416 /* Exposure time(Update the Min Exposure Time)*/
11417 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11418 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11419
11420 /* frame duration */
11421 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11422 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11423
11424 /* sensitivity */
11425 static const int32_t default_sensitivity = 100;
11426 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011427#ifndef USE_HAL_3_3
11428 static const int32_t default_isp_sensitivity =
11429 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11430 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11431#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011432
11433 /*edge mode*/
11434 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11435
11436 /*noise reduction mode*/
11437 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11438
11439 /*color correction mode*/
11440 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11441 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11442
11443 /*transform matrix mode*/
11444 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11445
11446 int32_t scaler_crop_region[4];
11447 scaler_crop_region[0] = 0;
11448 scaler_crop_region[1] = 0;
11449 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11450 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11451 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11452
11453 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11454 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11455
11456 /*focus distance*/
11457 float focus_distance = 0.0;
11458 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11459
11460 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011461 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011462 float max_range = 0.0;
11463 float max_fixed_fps = 0.0;
11464 int32_t fps_range[2] = {0, 0};
11465 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11466 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011467 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11468 TEMPLATE_MAX_PREVIEW_FPS) {
11469 continue;
11470 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011471 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11472 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11473 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11474 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11475 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11476 if (range > max_range) {
11477 fps_range[0] =
11478 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11479 fps_range[1] =
11480 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11481 max_range = range;
11482 }
11483 } else {
11484 if (range < 0.01 && max_fixed_fps <
11485 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11486 fps_range[0] =
11487 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11488 fps_range[1] =
11489 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11490 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11491 }
11492 }
11493 }
11494 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11495
11496 /*precapture trigger*/
11497 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11498 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11499
11500 /*af trigger*/
11501 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11502 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11503
11504 /* ae & af regions */
11505 int32_t active_region[] = {
11506 gCamCapability[mCameraId]->active_array_size.left,
11507 gCamCapability[mCameraId]->active_array_size.top,
11508 gCamCapability[mCameraId]->active_array_size.left +
11509 gCamCapability[mCameraId]->active_array_size.width,
11510 gCamCapability[mCameraId]->active_array_size.top +
11511 gCamCapability[mCameraId]->active_array_size.height,
11512 0};
11513 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11514 sizeof(active_region) / sizeof(active_region[0]));
11515 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11516 sizeof(active_region) / sizeof(active_region[0]));
11517
11518 /* black level lock */
11519 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11520 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11521
Thierry Strudel3d639192016-09-09 11:52:26 -070011522 //special defaults for manual template
11523 if (type == CAMERA3_TEMPLATE_MANUAL) {
11524 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11525 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11526
11527 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11528 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11529
11530 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11531 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11532
11533 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11534 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11535
11536 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11537 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11538
11539 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11540 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11541 }
11542
11543
11544 /* TNR
11545 * We'll use this location to determine which modes TNR will be set.
11546 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11547 * This is not to be confused with linking on a per stream basis that decision
11548 * is still on per-session basis and will be handled as part of config stream
11549 */
11550 uint8_t tnr_enable = 0;
11551
11552 if (m_bTnrPreview || m_bTnrVideo) {
11553
11554 switch (type) {
11555 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11556 tnr_enable = 1;
11557 break;
11558
11559 default:
11560 tnr_enable = 0;
11561 break;
11562 }
11563
11564 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11565 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11566 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11567
11568 LOGD("TNR:%d with process plate %d for template:%d",
11569 tnr_enable, tnr_process_type, type);
11570 }
11571
11572 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011573 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011574 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11575
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011576 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011577 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11578
Shuzhen Wang920ea402017-05-03 08:49:39 -070011579 uint8_t related_camera_id = mCameraId;
11580 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011581
11582 /* CDS default */
11583 char prop[PROPERTY_VALUE_MAX];
11584 memset(prop, 0, sizeof(prop));
11585 property_get("persist.camera.CDS", prop, "Auto");
11586 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11587 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11588 if (CAM_CDS_MODE_MAX == cds_mode) {
11589 cds_mode = CAM_CDS_MODE_AUTO;
11590 }
11591
11592 /* Disabling CDS in templates which have TNR enabled*/
11593 if (tnr_enable)
11594 cds_mode = CAM_CDS_MODE_OFF;
11595
11596 int32_t mode = cds_mode;
11597 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011598
Thierry Strudel269c81a2016-10-12 12:13:59 -070011599 /* Manual Convergence AEC Speed is disabled by default*/
11600 float default_aec_speed = 0;
11601 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11602
11603 /* Manual Convergence AWB Speed is disabled by default*/
11604 float default_awb_speed = 0;
11605 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11606
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011607 // Set instant AEC to normal convergence by default
11608 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11609 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11610
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011611 if (gExposeEnableZslKey) {
11612 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070011613 int32_t postview = 0;
11614 settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070011615 int32_t continuousZslCapture = 0;
11616 settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -070011617 // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE and
11618 // CAMERA3_TEMPLATE_PREVIEW.
11619 int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11620 type == CAMERA3_TEMPLATE_PREVIEW) ? 0 : 1;
Chien-Yu Chenec328c82017-08-30 16:41:08 -070011621 settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
11622
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011623 // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
11624 // hybrid ae is enabled for 3rd party app HDR+.
11625 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11626 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
11627 hybrid_ae = 1;
11628 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011629 }
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011630 /* hybrid ae */
11631 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011632
Thierry Strudel3d639192016-09-09 11:52:26 -070011633 mDefaultMetadata[type] = settings.release();
11634
11635 return mDefaultMetadata[type];
11636}
11637
11638/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011639 * FUNCTION : getExpectedFrameDuration
11640 *
11641 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11642 * duration
11643 *
11644 * PARAMETERS :
11645 * @request : request settings
11646 * @frameDuration : The maximum frame duration in nanoseconds
11647 *
11648 * RETURN : None
11649 *==========================================================================*/
11650void QCamera3HardwareInterface::getExpectedFrameDuration(
11651 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11652 if (nullptr == frameDuration) {
11653 return;
11654 }
11655
11656 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11657 find_camera_metadata_ro_entry(request,
11658 ANDROID_SENSOR_EXPOSURE_TIME,
11659 &e);
11660 if (e.count > 0) {
11661 *frameDuration = e.data.i64[0];
11662 }
11663 find_camera_metadata_ro_entry(request,
11664 ANDROID_SENSOR_FRAME_DURATION,
11665 &e);
11666 if (e.count > 0) {
11667 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11668 }
11669}
11670
11671/*===========================================================================
11672 * FUNCTION : calculateMaxExpectedDuration
11673 *
11674 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11675 * current camera settings.
11676 *
11677 * PARAMETERS :
11678 * @request : request settings
11679 *
11680 * RETURN : Expected frame duration in nanoseconds.
11681 *==========================================================================*/
11682nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11683 const camera_metadata_t *request) {
11684 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11685 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11686 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11687 if (e.count == 0) {
11688 return maxExpectedDuration;
11689 }
11690
11691 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11692 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11693 }
11694
11695 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11696 return maxExpectedDuration;
11697 }
11698
11699 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11700 if (e.count == 0) {
11701 return maxExpectedDuration;
11702 }
11703
11704 switch (e.data.u8[0]) {
11705 case ANDROID_CONTROL_AE_MODE_OFF:
11706 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11707 break;
11708 default:
11709 find_camera_metadata_ro_entry(request,
11710 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11711 &e);
11712 if (e.count > 1) {
11713 maxExpectedDuration = 1e9 / e.data.u8[0];
11714 }
11715 break;
11716 }
11717
11718 return maxExpectedDuration;
11719}
11720
11721/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011722 * FUNCTION : setFrameParameters
11723 *
11724 * DESCRIPTION: set parameters per frame as requested in the metadata from
11725 * framework
11726 *
11727 * PARAMETERS :
11728 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011729 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011730 * @blob_request: Whether this request is a blob request or not
11731 *
11732 * RETURN : success: NO_ERROR
11733 * failure:
11734 *==========================================================================*/
11735int QCamera3HardwareInterface::setFrameParameters(
11736 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011737 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011738 int blob_request,
11739 uint32_t snapshotStreamId)
11740{
11741 /*translate from camera_metadata_t type to parm_type_t*/
11742 int rc = 0;
11743 int32_t hal_version = CAM_HAL_V3;
11744
11745 clear_metadata_buffer(mParameters);
11746 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11747 LOGE("Failed to set hal version in the parameters");
11748 return BAD_VALUE;
11749 }
11750
11751 /*we need to update the frame number in the parameters*/
11752 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11753 request->frame_number)) {
11754 LOGE("Failed to set the frame number in the parameters");
11755 return BAD_VALUE;
11756 }
11757
11758 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011759 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011760 LOGE("Failed to set stream type mask in the parameters");
11761 return BAD_VALUE;
11762 }
11763
11764 if (mUpdateDebugLevel) {
11765 uint32_t dummyDebugLevel = 0;
11766 /* The value of dummyDebugLevel is irrelavent. On
11767 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11768 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11769 dummyDebugLevel)) {
11770 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11771 return BAD_VALUE;
11772 }
11773 mUpdateDebugLevel = false;
11774 }
11775
11776 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011777 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011778 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11779 if (blob_request)
11780 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11781 }
11782
11783 return rc;
11784}
11785
11786/*===========================================================================
11787 * FUNCTION : setReprocParameters
11788 *
11789 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11790 * return it.
11791 *
11792 * PARAMETERS :
11793 * @request : request that needs to be serviced
11794 *
11795 * RETURN : success: NO_ERROR
11796 * failure:
11797 *==========================================================================*/
11798int32_t QCamera3HardwareInterface::setReprocParameters(
11799 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11800 uint32_t snapshotStreamId)
11801{
11802 /*translate from camera_metadata_t type to parm_type_t*/
11803 int rc = 0;
11804
11805 if (NULL == request->settings){
11806 LOGE("Reprocess settings cannot be NULL");
11807 return BAD_VALUE;
11808 }
11809
11810 if (NULL == reprocParam) {
11811 LOGE("Invalid reprocessing metadata buffer");
11812 return BAD_VALUE;
11813 }
11814 clear_metadata_buffer(reprocParam);
11815
11816 /*we need to update the frame number in the parameters*/
11817 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11818 request->frame_number)) {
11819 LOGE("Failed to set the frame number in the parameters");
11820 return BAD_VALUE;
11821 }
11822
11823 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11824 if (rc < 0) {
11825 LOGE("Failed to translate reproc request");
11826 return rc;
11827 }
11828
11829 CameraMetadata frame_settings;
11830 frame_settings = request->settings;
11831 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11832 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11833 int32_t *crop_count =
11834 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11835 int32_t *crop_data =
11836 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11837 int32_t *roi_map =
11838 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11839 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11840 cam_crop_data_t crop_meta;
11841 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11842 crop_meta.num_of_streams = 1;
11843 crop_meta.crop_info[0].crop.left = crop_data[0];
11844 crop_meta.crop_info[0].crop.top = crop_data[1];
11845 crop_meta.crop_info[0].crop.width = crop_data[2];
11846 crop_meta.crop_info[0].crop.height = crop_data[3];
11847
11848 crop_meta.crop_info[0].roi_map.left =
11849 roi_map[0];
11850 crop_meta.crop_info[0].roi_map.top =
11851 roi_map[1];
11852 crop_meta.crop_info[0].roi_map.width =
11853 roi_map[2];
11854 crop_meta.crop_info[0].roi_map.height =
11855 roi_map[3];
11856
11857 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11858 rc = BAD_VALUE;
11859 }
11860 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11861 request->input_buffer->stream,
11862 crop_meta.crop_info[0].crop.left,
11863 crop_meta.crop_info[0].crop.top,
11864 crop_meta.crop_info[0].crop.width,
11865 crop_meta.crop_info[0].crop.height);
11866 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11867 request->input_buffer->stream,
11868 crop_meta.crop_info[0].roi_map.left,
11869 crop_meta.crop_info[0].roi_map.top,
11870 crop_meta.crop_info[0].roi_map.width,
11871 crop_meta.crop_info[0].roi_map.height);
11872 } else {
11873 LOGE("Invalid reprocess crop count %d!", *crop_count);
11874 }
11875 } else {
11876 LOGE("No crop data from matching output stream");
11877 }
11878
11879 /* These settings are not needed for regular requests so handle them specially for
11880 reprocess requests; information needed for EXIF tags */
11881 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11882 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11883 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11884 if (NAME_NOT_FOUND != val) {
11885 uint32_t flashMode = (uint32_t)val;
11886 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11887 rc = BAD_VALUE;
11888 }
11889 } else {
11890 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11891 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11892 }
11893 } else {
11894 LOGH("No flash mode in reprocess settings");
11895 }
11896
11897 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11898 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11899 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11900 rc = BAD_VALUE;
11901 }
11902 } else {
11903 LOGH("No flash state in reprocess settings");
11904 }
11905
11906 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11907 uint8_t *reprocessFlags =
11908 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11909 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11910 *reprocessFlags)) {
11911 rc = BAD_VALUE;
11912 }
11913 }
11914
Thierry Strudel54dc9782017-02-15 12:12:10 -080011915 // Add exif debug data to internal metadata
11916 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11917 mm_jpeg_debug_exif_params_t *debug_params =
11918 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11919 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11920 // AE
11921 if (debug_params->ae_debug_params_valid == TRUE) {
11922 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11923 debug_params->ae_debug_params);
11924 }
11925 // AWB
11926 if (debug_params->awb_debug_params_valid == TRUE) {
11927 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11928 debug_params->awb_debug_params);
11929 }
11930 // AF
11931 if (debug_params->af_debug_params_valid == TRUE) {
11932 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11933 debug_params->af_debug_params);
11934 }
11935 // ASD
11936 if (debug_params->asd_debug_params_valid == TRUE) {
11937 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11938 debug_params->asd_debug_params);
11939 }
11940 // Stats
11941 if (debug_params->stats_debug_params_valid == TRUE) {
11942 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11943 debug_params->stats_debug_params);
11944 }
11945 // BE Stats
11946 if (debug_params->bestats_debug_params_valid == TRUE) {
11947 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11948 debug_params->bestats_debug_params);
11949 }
11950 // BHIST
11951 if (debug_params->bhist_debug_params_valid == TRUE) {
11952 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11953 debug_params->bhist_debug_params);
11954 }
11955 // 3A Tuning
11956 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11957 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11958 debug_params->q3a_tuning_debug_params);
11959 }
11960 }
11961
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011962 // Add metadata which reprocess needs
11963 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11964 cam_reprocess_info_t *repro_info =
11965 (cam_reprocess_info_t *)frame_settings.find
11966 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011967 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011968 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011969 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011970 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011971 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011972 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011973 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011974 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011975 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011976 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011977 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011978 repro_info->pipeline_flip);
11979 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11980 repro_info->af_roi);
11981 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11982 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011983 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11984 CAM_INTF_PARM_ROTATION metadata then has been added in
11985 translateToHalMetadata. HAL need to keep this new rotation
11986 metadata. Otherwise, the old rotation info saved in the vendor tag
11987 would be used */
11988 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11989 CAM_INTF_PARM_ROTATION, reprocParam) {
11990 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11991 } else {
11992 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011993 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011994 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011995 }
11996
11997 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11998 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11999 roi.width and roi.height would be the final JPEG size.
12000 For now, HAL only checks this for reprocess request */
12001 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
12002 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
12003 uint8_t *enable =
12004 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
12005 if (*enable == TRUE) {
12006 int32_t *crop_data =
12007 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
12008 cam_stream_crop_info_t crop_meta;
12009 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
12010 crop_meta.stream_id = 0;
12011 crop_meta.crop.left = crop_data[0];
12012 crop_meta.crop.top = crop_data[1];
12013 crop_meta.crop.width = crop_data[2];
12014 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012015 // The JPEG crop roi should match cpp output size
12016 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
12017 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
12018 crop_meta.roi_map.left = 0;
12019 crop_meta.roi_map.top = 0;
12020 crop_meta.roi_map.width = cpp_crop->crop.width;
12021 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070012022 }
12023 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
12024 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012025 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070012026 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012027 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
12028 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070012029 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012030 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
12031
12032 // Add JPEG scale information
12033 cam_dimension_t scale_dim;
12034 memset(&scale_dim, 0, sizeof(cam_dimension_t));
12035 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
12036 int32_t *roi =
12037 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
12038 scale_dim.width = roi[2];
12039 scale_dim.height = roi[3];
12040 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
12041 scale_dim);
12042 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
12043 scale_dim.width, scale_dim.height, mCameraId);
12044 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012045 }
12046 }
12047
12048 return rc;
12049}
12050
12051/*===========================================================================
12052 * FUNCTION : saveRequestSettings
12053 *
12054 * DESCRIPTION: Add any settings that might have changed to the request settings
12055 * and save the settings to be applied on the frame
12056 *
12057 * PARAMETERS :
12058 * @jpegMetadata : the extracted and/or modified jpeg metadata
12059 * @request : request with initial settings
12060 *
12061 * RETURN :
12062 * camera_metadata_t* : pointer to the saved request settings
12063 *==========================================================================*/
12064camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
12065 const CameraMetadata &jpegMetadata,
12066 camera3_capture_request_t *request)
12067{
12068 camera_metadata_t *resultMetadata;
12069 CameraMetadata camMetadata;
12070 camMetadata = request->settings;
12071
12072 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12073 int32_t thumbnail_size[2];
12074 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12075 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12076 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
12077 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
12078 }
12079
12080 if (request->input_buffer != NULL) {
12081 uint8_t reprocessFlags = 1;
12082 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
12083 (uint8_t*)&reprocessFlags,
12084 sizeof(reprocessFlags));
12085 }
12086
12087 resultMetadata = camMetadata.release();
12088 return resultMetadata;
12089}
12090
12091/*===========================================================================
12092 * FUNCTION : setHalFpsRange
12093 *
12094 * DESCRIPTION: set FPS range parameter
12095 *
12096 *
12097 * PARAMETERS :
12098 * @settings : Metadata from framework
12099 * @hal_metadata: Metadata buffer
12100 *
12101 *
12102 * RETURN : success: NO_ERROR
12103 * failure:
12104 *==========================================================================*/
12105int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
12106 metadata_buffer_t *hal_metadata)
12107{
12108 int32_t rc = NO_ERROR;
12109 cam_fps_range_t fps_range;
12110 fps_range.min_fps = (float)
12111 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12112 fps_range.max_fps = (float)
12113 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12114 fps_range.video_min_fps = fps_range.min_fps;
12115 fps_range.video_max_fps = fps_range.max_fps;
12116
12117 LOGD("aeTargetFpsRange fps: [%f %f]",
12118 fps_range.min_fps, fps_range.max_fps);
12119 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12120 * follows:
12121 * ---------------------------------------------------------------|
12122 * Video stream is absent in configure_streams |
12123 * (Camcorder preview before the first video record |
12124 * ---------------------------------------------------------------|
12125 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12126 * | | | vid_min/max_fps|
12127 * ---------------------------------------------------------------|
12128 * NO | [ 30, 240] | 240 | [240, 240] |
12129 * |-------------|-------------|----------------|
12130 * | [240, 240] | 240 | [240, 240] |
12131 * ---------------------------------------------------------------|
12132 * Video stream is present in configure_streams |
12133 * ---------------------------------------------------------------|
12134 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12135 * | | | vid_min/max_fps|
12136 * ---------------------------------------------------------------|
12137 * NO | [ 30, 240] | 240 | [240, 240] |
12138 * (camcorder prev |-------------|-------------|----------------|
12139 * after video rec | [240, 240] | 240 | [240, 240] |
12140 * is stopped) | | | |
12141 * ---------------------------------------------------------------|
12142 * YES | [ 30, 240] | 240 | [240, 240] |
12143 * |-------------|-------------|----------------|
12144 * | [240, 240] | 240 | [240, 240] |
12145 * ---------------------------------------------------------------|
12146 * When Video stream is absent in configure_streams,
12147 * preview fps = sensor_fps / batchsize
12148 * Eg: for 240fps at batchSize 4, preview = 60fps
12149 * for 120fps at batchSize 4, preview = 30fps
12150 *
12151 * When video stream is present in configure_streams, preview fps is as per
12152 * the ratio of preview buffers to video buffers requested in process
12153 * capture request
12154 */
12155 mBatchSize = 0;
12156 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12157 fps_range.min_fps = fps_range.video_max_fps;
12158 fps_range.video_min_fps = fps_range.video_max_fps;
12159 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12160 fps_range.max_fps);
12161 if (NAME_NOT_FOUND != val) {
12162 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12163 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12164 return BAD_VALUE;
12165 }
12166
12167 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12168 /* If batchmode is currently in progress and the fps changes,
12169 * set the flag to restart the sensor */
12170 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12171 (mHFRVideoFps != fps_range.max_fps)) {
12172 mNeedSensorRestart = true;
12173 }
12174 mHFRVideoFps = fps_range.max_fps;
12175 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12176 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12177 mBatchSize = MAX_HFR_BATCH_SIZE;
12178 }
12179 }
12180 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12181
12182 }
12183 } else {
12184 /* HFR mode is session param in backend/ISP. This should be reset when
12185 * in non-HFR mode */
12186 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12187 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12188 return BAD_VALUE;
12189 }
12190 }
12191 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12192 return BAD_VALUE;
12193 }
12194 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12195 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12196 return rc;
12197}
12198
12199/*===========================================================================
12200 * FUNCTION : translateToHalMetadata
12201 *
12202 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12203 *
12204 *
12205 * PARAMETERS :
12206 * @request : request sent from framework
12207 *
12208 *
12209 * RETURN : success: NO_ERROR
12210 * failure:
12211 *==========================================================================*/
12212int QCamera3HardwareInterface::translateToHalMetadata
12213 (const camera3_capture_request_t *request,
12214 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012215 uint32_t snapshotStreamId) {
12216 if (request == nullptr || hal_metadata == nullptr) {
12217 return BAD_VALUE;
12218 }
12219
12220 int64_t minFrameDuration = getMinFrameDuration(request);
12221
12222 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12223 minFrameDuration);
12224}
12225
12226int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12227 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12228 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12229
Thierry Strudel3d639192016-09-09 11:52:26 -070012230 int rc = 0;
12231 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012232 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012233
12234 /* Do not change the order of the following list unless you know what you are
12235 * doing.
12236 * The order is laid out in such a way that parameters in the front of the table
12237 * may be used to override the parameters later in the table. Examples are:
12238 * 1. META_MODE should precede AEC/AWB/AF MODE
12239 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12240 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12241 * 4. Any mode should precede it's corresponding settings
12242 */
12243 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12244 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12245 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12246 rc = BAD_VALUE;
12247 }
12248 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12249 if (rc != NO_ERROR) {
12250 LOGE("extractSceneMode failed");
12251 }
12252 }
12253
12254 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12255 uint8_t fwk_aeMode =
12256 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12257 uint8_t aeMode;
12258 int32_t redeye;
12259
12260 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12261 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012262 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12263 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012264 } else {
12265 aeMode = CAM_AE_MODE_ON;
12266 }
12267 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12268 redeye = 1;
12269 } else {
12270 redeye = 0;
12271 }
12272
12273 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12274 fwk_aeMode);
12275 if (NAME_NOT_FOUND != val) {
12276 int32_t flashMode = (int32_t)val;
12277 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12278 }
12279
12280 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12281 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12282 rc = BAD_VALUE;
12283 }
12284 }
12285
12286 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12287 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12288 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12289 fwk_whiteLevel);
12290 if (NAME_NOT_FOUND != val) {
12291 uint8_t whiteLevel = (uint8_t)val;
12292 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12293 rc = BAD_VALUE;
12294 }
12295 }
12296 }
12297
12298 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12299 uint8_t fwk_cacMode =
12300 frame_settings.find(
12301 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12302 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12303 fwk_cacMode);
12304 if (NAME_NOT_FOUND != val) {
12305 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12306 bool entryAvailable = FALSE;
12307 // Check whether Frameworks set CAC mode is supported in device or not
12308 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12309 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12310 entryAvailable = TRUE;
12311 break;
12312 }
12313 }
12314 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12315 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12316 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12317 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12318 if (entryAvailable == FALSE) {
12319 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12320 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12321 } else {
12322 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12323 // High is not supported and so set the FAST as spec say's underlying
12324 // device implementation can be the same for both modes.
12325 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12326 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12327 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12328 // in order to avoid the fps drop due to high quality
12329 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12330 } else {
12331 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12332 }
12333 }
12334 }
12335 LOGD("Final cacMode is %d", cacMode);
12336 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12337 rc = BAD_VALUE;
12338 }
12339 } else {
12340 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12341 }
12342 }
12343
Jason Lee84ae9972017-02-24 13:24:24 -080012344 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012345 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012346 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012347 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012348 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12349 fwk_focusMode);
12350 if (NAME_NOT_FOUND != val) {
12351 uint8_t focusMode = (uint8_t)val;
12352 LOGD("set focus mode %d", focusMode);
12353 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12354 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12355 rc = BAD_VALUE;
12356 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012357 }
12358 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012359 } else {
12360 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12361 LOGE("Focus forced to infinity %d", focusMode);
12362 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12363 rc = BAD_VALUE;
12364 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012365 }
12366
Jason Lee84ae9972017-02-24 13:24:24 -080012367 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12368 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012369 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12370 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12371 focalDistance)) {
12372 rc = BAD_VALUE;
12373 }
12374 }
12375
12376 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12377 uint8_t fwk_antibandingMode =
12378 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12379 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12380 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12381 if (NAME_NOT_FOUND != val) {
12382 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012383 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12384 if (m60HzZone) {
12385 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12386 } else {
12387 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12388 }
12389 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012390 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12391 hal_antibandingMode)) {
12392 rc = BAD_VALUE;
12393 }
12394 }
12395 }
12396
12397 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12398 int32_t expCompensation = frame_settings.find(
12399 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12400 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12401 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12402 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12403 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012404 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012405 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12406 expCompensation)) {
12407 rc = BAD_VALUE;
12408 }
12409 }
12410
12411 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12412 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12413 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12414 rc = BAD_VALUE;
12415 }
12416 }
12417 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12418 rc = setHalFpsRange(frame_settings, hal_metadata);
12419 if (rc != NO_ERROR) {
12420 LOGE("setHalFpsRange failed");
12421 }
12422 }
12423
12424 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12425 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12426 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12427 rc = BAD_VALUE;
12428 }
12429 }
12430
12431 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12432 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12433 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12434 fwk_effectMode);
12435 if (NAME_NOT_FOUND != val) {
12436 uint8_t effectMode = (uint8_t)val;
12437 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12438 rc = BAD_VALUE;
12439 }
12440 }
12441 }
12442
12443 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12444 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12445 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12446 colorCorrectMode)) {
12447 rc = BAD_VALUE;
12448 }
12449 }
12450
12451 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12452 cam_color_correct_gains_t colorCorrectGains;
12453 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12454 colorCorrectGains.gains[i] =
12455 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12456 }
12457 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12458 colorCorrectGains)) {
12459 rc = BAD_VALUE;
12460 }
12461 }
12462
12463 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12464 cam_color_correct_matrix_t colorCorrectTransform;
12465 cam_rational_type_t transform_elem;
12466 size_t num = 0;
12467 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12468 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12469 transform_elem.numerator =
12470 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12471 transform_elem.denominator =
12472 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12473 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12474 num++;
12475 }
12476 }
12477 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12478 colorCorrectTransform)) {
12479 rc = BAD_VALUE;
12480 }
12481 }
12482
12483 cam_trigger_t aecTrigger;
12484 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12485 aecTrigger.trigger_id = -1;
12486 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12487 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12488 aecTrigger.trigger =
12489 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12490 aecTrigger.trigger_id =
12491 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12492 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12493 aecTrigger)) {
12494 rc = BAD_VALUE;
12495 }
12496 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12497 aecTrigger.trigger, aecTrigger.trigger_id);
12498 }
12499
12500 /*af_trigger must come with a trigger id*/
12501 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12502 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12503 cam_trigger_t af_trigger;
12504 af_trigger.trigger =
12505 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12506 af_trigger.trigger_id =
12507 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12508 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12509 rc = BAD_VALUE;
12510 }
12511 LOGD("AfTrigger: %d AfTriggerID: %d",
12512 af_trigger.trigger, af_trigger.trigger_id);
12513 }
12514
12515 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12516 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12517 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12518 rc = BAD_VALUE;
12519 }
12520 }
12521 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12522 cam_edge_application_t edge_application;
12523 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012524
Thierry Strudel3d639192016-09-09 11:52:26 -070012525 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12526 edge_application.sharpness = 0;
12527 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012528 edge_application.sharpness =
12529 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12530 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12531 int32_t sharpness =
12532 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12533 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12534 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12535 LOGD("Setting edge mode sharpness %d", sharpness);
12536 edge_application.sharpness = sharpness;
12537 }
12538 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012539 }
12540 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12541 rc = BAD_VALUE;
12542 }
12543 }
12544
12545 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12546 int32_t respectFlashMode = 1;
12547 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12548 uint8_t fwk_aeMode =
12549 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012550 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12551 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12552 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012553 respectFlashMode = 0;
12554 LOGH("AE Mode controls flash, ignore android.flash.mode");
12555 }
12556 }
12557 if (respectFlashMode) {
12558 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12559 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12560 LOGH("flash mode after mapping %d", val);
12561 // To check: CAM_INTF_META_FLASH_MODE usage
12562 if (NAME_NOT_FOUND != val) {
12563 uint8_t flashMode = (uint8_t)val;
12564 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12565 rc = BAD_VALUE;
12566 }
12567 }
12568 }
12569 }
12570
12571 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12572 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12573 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12574 rc = BAD_VALUE;
12575 }
12576 }
12577
12578 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12579 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12580 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12581 flashFiringTime)) {
12582 rc = BAD_VALUE;
12583 }
12584 }
12585
12586 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12587 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12588 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12589 hotPixelMode)) {
12590 rc = BAD_VALUE;
12591 }
12592 }
12593
12594 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12595 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12596 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12597 lensAperture)) {
12598 rc = BAD_VALUE;
12599 }
12600 }
12601
12602 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12603 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12604 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12605 filterDensity)) {
12606 rc = BAD_VALUE;
12607 }
12608 }
12609
12610 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12611 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12612 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12613 focalLength)) {
12614 rc = BAD_VALUE;
12615 }
12616 }
12617
12618 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12619 uint8_t optStabMode =
12620 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12621 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12622 optStabMode)) {
12623 rc = BAD_VALUE;
12624 }
12625 }
12626
12627 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12628 uint8_t videoStabMode =
12629 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12630 LOGD("videoStabMode from APP = %d", videoStabMode);
12631 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12632 videoStabMode)) {
12633 rc = BAD_VALUE;
12634 }
12635 }
12636
12637
12638 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12639 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12640 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12641 noiseRedMode)) {
12642 rc = BAD_VALUE;
12643 }
12644 }
12645
12646 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12647 float reprocessEffectiveExposureFactor =
12648 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12649 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12650 reprocessEffectiveExposureFactor)) {
12651 rc = BAD_VALUE;
12652 }
12653 }
12654
12655 cam_crop_region_t scalerCropRegion;
12656 bool scalerCropSet = false;
12657 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12658 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12659 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12660 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12661 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12662
12663 // Map coordinate system from active array to sensor output.
12664 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12665 scalerCropRegion.width, scalerCropRegion.height);
12666
12667 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12668 scalerCropRegion)) {
12669 rc = BAD_VALUE;
12670 }
12671 scalerCropSet = true;
12672 }
12673
12674 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12675 int64_t sensorExpTime =
12676 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12677 LOGD("setting sensorExpTime %lld", sensorExpTime);
12678 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12679 sensorExpTime)) {
12680 rc = BAD_VALUE;
12681 }
12682 }
12683
12684 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12685 int64_t sensorFrameDuration =
12686 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012687 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12688 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12689 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12690 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12691 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12692 sensorFrameDuration)) {
12693 rc = BAD_VALUE;
12694 }
12695 }
12696
12697 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12698 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12699 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12700 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12701 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12702 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12703 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12704 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12705 sensorSensitivity)) {
12706 rc = BAD_VALUE;
12707 }
12708 }
12709
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012710#ifndef USE_HAL_3_3
12711 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12712 int32_t ispSensitivity =
12713 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12714 if (ispSensitivity <
12715 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12716 ispSensitivity =
12717 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12718 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12719 }
12720 if (ispSensitivity >
12721 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12722 ispSensitivity =
12723 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12724 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12725 }
12726 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12727 ispSensitivity)) {
12728 rc = BAD_VALUE;
12729 }
12730 }
12731#endif
12732
Thierry Strudel3d639192016-09-09 11:52:26 -070012733 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12734 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12735 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12736 rc = BAD_VALUE;
12737 }
12738 }
12739
12740 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12741 uint8_t fwk_facedetectMode =
12742 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12743
12744 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12745 fwk_facedetectMode);
12746
12747 if (NAME_NOT_FOUND != val) {
12748 uint8_t facedetectMode = (uint8_t)val;
12749 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12750 facedetectMode)) {
12751 rc = BAD_VALUE;
12752 }
12753 }
12754 }
12755
Thierry Strudel54dc9782017-02-15 12:12:10 -080012756 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012757 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012758 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012759 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12760 histogramMode)) {
12761 rc = BAD_VALUE;
12762 }
12763 }
12764
12765 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12766 uint8_t sharpnessMapMode =
12767 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12768 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12769 sharpnessMapMode)) {
12770 rc = BAD_VALUE;
12771 }
12772 }
12773
12774 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12775 uint8_t tonemapMode =
12776 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12777 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12778 rc = BAD_VALUE;
12779 }
12780 }
12781 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12782 /*All tonemap channels will have the same number of points*/
12783 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12784 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12785 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12786 cam_rgb_tonemap_curves tonemapCurves;
12787 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12788 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12789 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12790 tonemapCurves.tonemap_points_cnt,
12791 CAM_MAX_TONEMAP_CURVE_SIZE);
12792 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12793 }
12794
12795 /* ch0 = G*/
12796 size_t point = 0;
12797 cam_tonemap_curve_t tonemapCurveGreen;
12798 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12799 for (size_t j = 0; j < 2; j++) {
12800 tonemapCurveGreen.tonemap_points[i][j] =
12801 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12802 point++;
12803 }
12804 }
12805 tonemapCurves.curves[0] = tonemapCurveGreen;
12806
12807 /* ch 1 = B */
12808 point = 0;
12809 cam_tonemap_curve_t tonemapCurveBlue;
12810 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12811 for (size_t j = 0; j < 2; j++) {
12812 tonemapCurveBlue.tonemap_points[i][j] =
12813 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12814 point++;
12815 }
12816 }
12817 tonemapCurves.curves[1] = tonemapCurveBlue;
12818
12819 /* ch 2 = R */
12820 point = 0;
12821 cam_tonemap_curve_t tonemapCurveRed;
12822 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12823 for (size_t j = 0; j < 2; j++) {
12824 tonemapCurveRed.tonemap_points[i][j] =
12825 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12826 point++;
12827 }
12828 }
12829 tonemapCurves.curves[2] = tonemapCurveRed;
12830
12831 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12832 tonemapCurves)) {
12833 rc = BAD_VALUE;
12834 }
12835 }
12836
12837 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12838 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12839 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12840 captureIntent)) {
12841 rc = BAD_VALUE;
12842 }
12843 }
12844
12845 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12846 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12847 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12848 blackLevelLock)) {
12849 rc = BAD_VALUE;
12850 }
12851 }
12852
12853 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12854 uint8_t lensShadingMapMode =
12855 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12856 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12857 lensShadingMapMode)) {
12858 rc = BAD_VALUE;
12859 }
12860 }
12861
12862 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12863 cam_area_t roi;
12864 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012865 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012866
12867 // Map coordinate system from active array to sensor output.
12868 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12869 roi.rect.height);
12870
12871 if (scalerCropSet) {
12872 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12873 }
12874 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12875 rc = BAD_VALUE;
12876 }
12877 }
12878
12879 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12880 cam_area_t roi;
12881 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012882 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012883
12884 // Map coordinate system from active array to sensor output.
12885 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12886 roi.rect.height);
12887
12888 if (scalerCropSet) {
12889 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12890 }
12891 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12892 rc = BAD_VALUE;
12893 }
12894 }
12895
12896 // CDS for non-HFR non-video mode
12897 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12898 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12899 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12900 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12901 LOGE("Invalid CDS mode %d!", *fwk_cds);
12902 } else {
12903 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12904 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12905 rc = BAD_VALUE;
12906 }
12907 }
12908 }
12909
Thierry Strudel04e026f2016-10-10 11:27:36 -070012910 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012911 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012912 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012913 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12914 }
12915 if (m_bVideoHdrEnabled)
12916 vhdr = CAM_VIDEO_HDR_MODE_ON;
12917
Thierry Strudel54dc9782017-02-15 12:12:10 -080012918 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12919
12920 if(vhdr != curr_hdr_state)
12921 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12922
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012923 rc = setVideoHdrMode(mParameters, vhdr);
12924 if (rc != NO_ERROR) {
12925 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012926 }
12927
12928 //IR
12929 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12930 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12931 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012932 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12933 uint8_t isIRon = 0;
12934
12935 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012936 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12937 LOGE("Invalid IR mode %d!", fwk_ir);
12938 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012939 if(isIRon != curr_ir_state )
12940 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12941
Thierry Strudel04e026f2016-10-10 11:27:36 -070012942 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12943 CAM_INTF_META_IR_MODE, fwk_ir)) {
12944 rc = BAD_VALUE;
12945 }
12946 }
12947 }
12948
Thierry Strudel54dc9782017-02-15 12:12:10 -080012949 //Binning Correction Mode
12950 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12951 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12952 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12953 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12954 || (0 > fwk_binning_correction)) {
12955 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12956 } else {
12957 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12958 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12959 rc = BAD_VALUE;
12960 }
12961 }
12962 }
12963
Thierry Strudel269c81a2016-10-12 12:13:59 -070012964 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12965 float aec_speed;
12966 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12967 LOGD("AEC Speed :%f", aec_speed);
12968 if ( aec_speed < 0 ) {
12969 LOGE("Invalid AEC mode %f!", aec_speed);
12970 } else {
12971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12972 aec_speed)) {
12973 rc = BAD_VALUE;
12974 }
12975 }
12976 }
12977
12978 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12979 float awb_speed;
12980 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12981 LOGD("AWB Speed :%f", awb_speed);
12982 if ( awb_speed < 0 ) {
12983 LOGE("Invalid AWB mode %f!", awb_speed);
12984 } else {
12985 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12986 awb_speed)) {
12987 rc = BAD_VALUE;
12988 }
12989 }
12990 }
12991
Thierry Strudel3d639192016-09-09 11:52:26 -070012992 // TNR
12993 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12994 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12995 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012996 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012997 cam_denoise_param_t tnr;
12998 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12999 tnr.process_plates =
13000 (cam_denoise_process_type_t)frame_settings.find(
13001 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
13002 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080013003
13004 if(b_TnrRequested != curr_tnr_state)
13005 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
13006
Thierry Strudel3d639192016-09-09 11:52:26 -070013007 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
13008 rc = BAD_VALUE;
13009 }
13010 }
13011
Thierry Strudel54dc9782017-02-15 12:12:10 -080013012 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013013 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080013014 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013015 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
13016 *exposure_metering_mode)) {
13017 rc = BAD_VALUE;
13018 }
13019 }
13020
Thierry Strudel3d639192016-09-09 11:52:26 -070013021 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
13022 int32_t fwk_testPatternMode =
13023 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
13024 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
13025 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
13026
13027 if (NAME_NOT_FOUND != testPatternMode) {
13028 cam_test_pattern_data_t testPatternData;
13029 memset(&testPatternData, 0, sizeof(testPatternData));
13030 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
13031 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
13032 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
13033 int32_t *fwk_testPatternData =
13034 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
13035 testPatternData.r = fwk_testPatternData[0];
13036 testPatternData.b = fwk_testPatternData[3];
13037 switch (gCamCapability[mCameraId]->color_arrangement) {
13038 case CAM_FILTER_ARRANGEMENT_RGGB:
13039 case CAM_FILTER_ARRANGEMENT_GRBG:
13040 testPatternData.gr = fwk_testPatternData[1];
13041 testPatternData.gb = fwk_testPatternData[2];
13042 break;
13043 case CAM_FILTER_ARRANGEMENT_GBRG:
13044 case CAM_FILTER_ARRANGEMENT_BGGR:
13045 testPatternData.gr = fwk_testPatternData[2];
13046 testPatternData.gb = fwk_testPatternData[1];
13047 break;
13048 default:
13049 LOGE("color arrangement %d is not supported",
13050 gCamCapability[mCameraId]->color_arrangement);
13051 break;
13052 }
13053 }
13054 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
13055 testPatternData)) {
13056 rc = BAD_VALUE;
13057 }
13058 } else {
13059 LOGE("Invalid framework sensor test pattern mode %d",
13060 fwk_testPatternMode);
13061 }
13062 }
13063
13064 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
13065 size_t count = 0;
13066 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
13067 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
13068 gps_coords.data.d, gps_coords.count, count);
13069 if (gps_coords.count != count) {
13070 rc = BAD_VALUE;
13071 }
13072 }
13073
13074 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
13075 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
13076 size_t count = 0;
13077 const char *gps_methods_src = (const char *)
13078 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
13079 memset(gps_methods, '\0', sizeof(gps_methods));
13080 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
13081 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
13082 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
13083 if (GPS_PROCESSING_METHOD_SIZE != count) {
13084 rc = BAD_VALUE;
13085 }
13086 }
13087
13088 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
13089 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
13090 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
13091 gps_timestamp)) {
13092 rc = BAD_VALUE;
13093 }
13094 }
13095
13096 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
13097 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
13098 cam_rotation_info_t rotation_info;
13099 if (orientation == 0) {
13100 rotation_info.rotation = ROTATE_0;
13101 } else if (orientation == 90) {
13102 rotation_info.rotation = ROTATE_90;
13103 } else if (orientation == 180) {
13104 rotation_info.rotation = ROTATE_180;
13105 } else if (orientation == 270) {
13106 rotation_info.rotation = ROTATE_270;
13107 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070013108 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070013109 rotation_info.streamId = snapshotStreamId;
13110 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
13111 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13112 rc = BAD_VALUE;
13113 }
13114 }
13115
13116 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13117 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13118 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13119 rc = BAD_VALUE;
13120 }
13121 }
13122
13123 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13124 uint32_t thumb_quality = (uint32_t)
13125 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13126 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13127 thumb_quality)) {
13128 rc = BAD_VALUE;
13129 }
13130 }
13131
13132 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13133 cam_dimension_t dim;
13134 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13135 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13136 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13137 rc = BAD_VALUE;
13138 }
13139 }
13140
13141 // Internal metadata
13142 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13143 size_t count = 0;
13144 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13145 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13146 privatedata.data.i32, privatedata.count, count);
13147 if (privatedata.count != count) {
13148 rc = BAD_VALUE;
13149 }
13150 }
13151
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013152 // ISO/Exposure Priority
13153 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13154 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13155 cam_priority_mode_t mode =
13156 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13157 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13158 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13159 use_iso_exp_pty.previewOnly = FALSE;
13160 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13161 use_iso_exp_pty.value = *ptr;
13162
13163 if(CAM_ISO_PRIORITY == mode) {
13164 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13165 use_iso_exp_pty)) {
13166 rc = BAD_VALUE;
13167 }
13168 }
13169 else {
13170 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13171 use_iso_exp_pty)) {
13172 rc = BAD_VALUE;
13173 }
13174 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013175
13176 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13177 rc = BAD_VALUE;
13178 }
13179 }
13180 } else {
13181 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13182 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013183 }
13184 }
13185
13186 // Saturation
13187 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13188 int32_t* use_saturation =
13189 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13190 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13191 rc = BAD_VALUE;
13192 }
13193 }
13194
Thierry Strudel3d639192016-09-09 11:52:26 -070013195 // EV step
13196 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13197 gCamCapability[mCameraId]->exp_compensation_step)) {
13198 rc = BAD_VALUE;
13199 }
13200
13201 // CDS info
13202 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13203 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13204 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13205
13206 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13207 CAM_INTF_META_CDS_DATA, *cdsData)) {
13208 rc = BAD_VALUE;
13209 }
13210 }
13211
Shuzhen Wang19463d72016-03-08 11:09:52 -080013212 // Hybrid AE
13213 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13214 uint8_t *hybrid_ae = (uint8_t *)
13215 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
Shuzhen Wang77b049a2017-08-30 12:24:36 -070013216 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13217 rc = BAD_VALUE;
13218 }
Shuzhen Wang19463d72016-03-08 11:09:52 -080013219 }
13220
Shuzhen Wang14415f52016-11-16 18:26:18 -080013221 // Histogram
13222 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13223 uint8_t histogramMode =
13224 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13225 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13226 histogramMode)) {
13227 rc = BAD_VALUE;
13228 }
13229 }
13230
13231 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13232 int32_t histogramBins =
13233 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13234 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13235 histogramBins)) {
13236 rc = BAD_VALUE;
13237 }
13238 }
13239
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013240 // Tracking AF
13241 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13242 uint8_t trackingAfTrigger =
13243 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13244 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13245 trackingAfTrigger)) {
13246 rc = BAD_VALUE;
13247 }
13248 }
13249
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013250 // Makernote
13251 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13252 if (entry.count != 0) {
13253 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13254 cam_makernote_t makernote;
13255 makernote.length = entry.count;
13256 memcpy(makernote.data, entry.data.u8, makernote.length);
13257 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13258 rc = BAD_VALUE;
13259 }
13260 } else {
13261 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13262 MAX_MAKERNOTE_LENGTH);
13263 rc = BAD_VALUE;
13264 }
13265 }
13266
Thierry Strudel3d639192016-09-09 11:52:26 -070013267 return rc;
13268}
13269
13270/*===========================================================================
13271 * FUNCTION : captureResultCb
13272 *
13273 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13274 *
13275 * PARAMETERS :
13276 * @frame : frame information from mm-camera-interface
13277 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13278 * @userdata: userdata
13279 *
13280 * RETURN : NONE
13281 *==========================================================================*/
13282void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13283 camera3_stream_buffer_t *buffer,
13284 uint32_t frame_number, bool isInputBuffer, void *userdata)
13285{
13286 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13287 if (hw == NULL) {
13288 LOGE("Invalid hw %p", hw);
13289 return;
13290 }
13291
13292 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13293 return;
13294}
13295
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013296/*===========================================================================
13297 * FUNCTION : setBufferErrorStatus
13298 *
13299 * DESCRIPTION: Callback handler for channels to report any buffer errors
13300 *
13301 * PARAMETERS :
13302 * @ch : Channel on which buffer error is reported from
13303 * @frame_number : frame number on which buffer error is reported on
13304 * @buffer_status : buffer error status
13305 * @userdata: userdata
13306 *
13307 * RETURN : NONE
13308 *==========================================================================*/
13309void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13310 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13311{
13312 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13313 if (hw == NULL) {
13314 LOGE("Invalid hw %p", hw);
13315 return;
13316 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013317
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013318 hw->setBufferErrorStatus(ch, frame_number, err);
13319 return;
13320}
13321
13322void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13323 uint32_t frameNumber, camera3_buffer_status_t err)
13324{
13325 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13326 pthread_mutex_lock(&mMutex);
13327
13328 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13329 if (req.frame_number != frameNumber)
13330 continue;
13331 for (auto& k : req.mPendingBufferList) {
13332 if(k.stream->priv == ch) {
13333 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13334 }
13335 }
13336 }
13337
13338 pthread_mutex_unlock(&mMutex);
13339 return;
13340}
Thierry Strudel3d639192016-09-09 11:52:26 -070013341/*===========================================================================
13342 * FUNCTION : initialize
13343 *
13344 * DESCRIPTION: Pass framework callback pointers to HAL
13345 *
13346 * PARAMETERS :
13347 *
13348 *
13349 * RETURN : Success : 0
13350 * Failure: -ENODEV
13351 *==========================================================================*/
13352
13353int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13354 const camera3_callback_ops_t *callback_ops)
13355{
13356 LOGD("E");
13357 QCamera3HardwareInterface *hw =
13358 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13359 if (!hw) {
13360 LOGE("NULL camera device");
13361 return -ENODEV;
13362 }
13363
13364 int rc = hw->initialize(callback_ops);
13365 LOGD("X");
13366 return rc;
13367}
13368
13369/*===========================================================================
13370 * FUNCTION : configure_streams
13371 *
13372 * DESCRIPTION:
13373 *
13374 * PARAMETERS :
13375 *
13376 *
13377 * RETURN : Success: 0
13378 * Failure: -EINVAL (if stream configuration is invalid)
13379 * -ENODEV (fatal error)
13380 *==========================================================================*/
13381
13382int QCamera3HardwareInterface::configure_streams(
13383 const struct camera3_device *device,
13384 camera3_stream_configuration_t *stream_list)
13385{
13386 LOGD("E");
13387 QCamera3HardwareInterface *hw =
13388 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13389 if (!hw) {
13390 LOGE("NULL camera device");
13391 return -ENODEV;
13392 }
13393 int rc = hw->configureStreams(stream_list);
13394 LOGD("X");
13395 return rc;
13396}
13397
13398/*===========================================================================
13399 * FUNCTION : construct_default_request_settings
13400 *
13401 * DESCRIPTION: Configure a settings buffer to meet the required use case
13402 *
13403 * PARAMETERS :
13404 *
13405 *
13406 * RETURN : Success: Return valid metadata
13407 * Failure: Return NULL
13408 *==========================================================================*/
13409const camera_metadata_t* QCamera3HardwareInterface::
13410 construct_default_request_settings(const struct camera3_device *device,
13411 int type)
13412{
13413
13414 LOGD("E");
13415 camera_metadata_t* fwk_metadata = NULL;
13416 QCamera3HardwareInterface *hw =
13417 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13418 if (!hw) {
13419 LOGE("NULL camera device");
13420 return NULL;
13421 }
13422
13423 fwk_metadata = hw->translateCapabilityToMetadata(type);
13424
13425 LOGD("X");
13426 return fwk_metadata;
13427}
13428
13429/*===========================================================================
13430 * FUNCTION : process_capture_request
13431 *
13432 * DESCRIPTION:
13433 *
13434 * PARAMETERS :
13435 *
13436 *
13437 * RETURN :
13438 *==========================================================================*/
13439int QCamera3HardwareInterface::process_capture_request(
13440 const struct camera3_device *device,
13441 camera3_capture_request_t *request)
13442{
13443 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013444 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013445 QCamera3HardwareInterface *hw =
13446 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13447 if (!hw) {
13448 LOGE("NULL camera device");
13449 return -EINVAL;
13450 }
13451
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013452 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013453 LOGD("X");
13454 return rc;
13455}
13456
13457/*===========================================================================
13458 * FUNCTION : dump
13459 *
13460 * DESCRIPTION:
13461 *
13462 * PARAMETERS :
13463 *
13464 *
13465 * RETURN :
13466 *==========================================================================*/
13467
13468void QCamera3HardwareInterface::dump(
13469 const struct camera3_device *device, int fd)
13470{
13471 /* Log level property is read when "adb shell dumpsys media.camera" is
13472 called so that the log level can be controlled without restarting
13473 the media server */
13474 getLogLevel();
13475
13476 LOGD("E");
13477 QCamera3HardwareInterface *hw =
13478 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13479 if (!hw) {
13480 LOGE("NULL camera device");
13481 return;
13482 }
13483
13484 hw->dump(fd);
13485 LOGD("X");
13486 return;
13487}
13488
13489/*===========================================================================
13490 * FUNCTION : flush
13491 *
13492 * DESCRIPTION:
13493 *
13494 * PARAMETERS :
13495 *
13496 *
13497 * RETURN :
13498 *==========================================================================*/
13499
13500int QCamera3HardwareInterface::flush(
13501 const struct camera3_device *device)
13502{
13503 int rc;
13504 LOGD("E");
13505 QCamera3HardwareInterface *hw =
13506 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13507 if (!hw) {
13508 LOGE("NULL camera device");
13509 return -EINVAL;
13510 }
13511
13512 pthread_mutex_lock(&hw->mMutex);
13513 // Validate current state
13514 switch (hw->mState) {
13515 case STARTED:
13516 /* valid state */
13517 break;
13518
13519 case ERROR:
13520 pthread_mutex_unlock(&hw->mMutex);
13521 hw->handleCameraDeviceError();
13522 return -ENODEV;
13523
13524 default:
13525 LOGI("Flush returned during state %d", hw->mState);
13526 pthread_mutex_unlock(&hw->mMutex);
13527 return 0;
13528 }
13529 pthread_mutex_unlock(&hw->mMutex);
13530
13531 rc = hw->flush(true /* restart channels */ );
13532 LOGD("X");
13533 return rc;
13534}
13535
13536/*===========================================================================
13537 * FUNCTION : close_camera_device
13538 *
13539 * DESCRIPTION:
13540 *
13541 * PARAMETERS :
13542 *
13543 *
13544 * RETURN :
13545 *==========================================================================*/
13546int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13547{
13548 int ret = NO_ERROR;
13549 QCamera3HardwareInterface *hw =
13550 reinterpret_cast<QCamera3HardwareInterface *>(
13551 reinterpret_cast<camera3_device_t *>(device)->priv);
13552 if (!hw) {
13553 LOGE("NULL camera device");
13554 return BAD_VALUE;
13555 }
13556
13557 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13558 delete hw;
13559 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013560 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013561 return ret;
13562}
13563
13564/*===========================================================================
13565 * FUNCTION : getWaveletDenoiseProcessPlate
13566 *
13567 * DESCRIPTION: query wavelet denoise process plate
13568 *
13569 * PARAMETERS : None
13570 *
13571 * RETURN : WNR prcocess plate value
13572 *==========================================================================*/
13573cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13574{
13575 char prop[PROPERTY_VALUE_MAX];
13576 memset(prop, 0, sizeof(prop));
13577 property_get("persist.denoise.process.plates", prop, "0");
13578 int processPlate = atoi(prop);
13579 switch(processPlate) {
13580 case 0:
13581 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13582 case 1:
13583 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13584 case 2:
13585 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13586 case 3:
13587 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13588 default:
13589 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13590 }
13591}
13592
13593
13594/*===========================================================================
13595 * FUNCTION : getTemporalDenoiseProcessPlate
13596 *
13597 * DESCRIPTION: query temporal denoise process plate
13598 *
13599 * PARAMETERS : None
13600 *
13601 * RETURN : TNR prcocess plate value
13602 *==========================================================================*/
13603cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13604{
13605 char prop[PROPERTY_VALUE_MAX];
13606 memset(prop, 0, sizeof(prop));
13607 property_get("persist.tnr.process.plates", prop, "0");
13608 int processPlate = atoi(prop);
13609 switch(processPlate) {
13610 case 0:
13611 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13612 case 1:
13613 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13614 case 2:
13615 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13616 case 3:
13617 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13618 default:
13619 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13620 }
13621}
13622
13623
13624/*===========================================================================
13625 * FUNCTION : extractSceneMode
13626 *
13627 * DESCRIPTION: Extract scene mode from frameworks set metadata
13628 *
13629 * PARAMETERS :
13630 * @frame_settings: CameraMetadata reference
13631 * @metaMode: ANDROID_CONTORL_MODE
13632 * @hal_metadata: hal metadata structure
13633 *
13634 * RETURN : None
13635 *==========================================================================*/
13636int32_t QCamera3HardwareInterface::extractSceneMode(
13637 const CameraMetadata &frame_settings, uint8_t metaMode,
13638 metadata_buffer_t *hal_metadata)
13639{
13640 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013641 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13642
13643 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13644 LOGD("Ignoring control mode OFF_KEEP_STATE");
13645 return NO_ERROR;
13646 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013647
13648 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13649 camera_metadata_ro_entry entry =
13650 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13651 if (0 == entry.count)
13652 return rc;
13653
13654 uint8_t fwk_sceneMode = entry.data.u8[0];
13655
13656 int val = lookupHalName(SCENE_MODES_MAP,
13657 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13658 fwk_sceneMode);
13659 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013660 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013661 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013662 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013663 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013664
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013665 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13666 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13667 }
13668
13669 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13670 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013671 cam_hdr_param_t hdr_params;
13672 hdr_params.hdr_enable = 1;
13673 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13674 hdr_params.hdr_need_1x = false;
13675 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13676 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13677 rc = BAD_VALUE;
13678 }
13679 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013680
Thierry Strudel3d639192016-09-09 11:52:26 -070013681 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13682 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13683 rc = BAD_VALUE;
13684 }
13685 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013686
13687 if (mForceHdrSnapshot) {
13688 cam_hdr_param_t hdr_params;
13689 hdr_params.hdr_enable = 1;
13690 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13691 hdr_params.hdr_need_1x = false;
13692 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13693 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13694 rc = BAD_VALUE;
13695 }
13696 }
13697
Thierry Strudel3d639192016-09-09 11:52:26 -070013698 return rc;
13699}
13700
13701/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013702 * FUNCTION : setVideoHdrMode
13703 *
13704 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13705 *
13706 * PARAMETERS :
13707 * @hal_metadata: hal metadata structure
13708 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13709 *
13710 * RETURN : None
13711 *==========================================================================*/
13712int32_t QCamera3HardwareInterface::setVideoHdrMode(
13713 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13714{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013715 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13716 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13717 }
13718
13719 LOGE("Invalid Video HDR mode %d!", vhdr);
13720 return BAD_VALUE;
13721}
13722
13723/*===========================================================================
13724 * FUNCTION : setSensorHDR
13725 *
13726 * DESCRIPTION: Enable/disable sensor HDR.
13727 *
13728 * PARAMETERS :
13729 * @hal_metadata: hal metadata structure
13730 * @enable: boolean whether to enable/disable sensor HDR
13731 *
13732 * RETURN : None
13733 *==========================================================================*/
13734int32_t QCamera3HardwareInterface::setSensorHDR(
13735 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13736{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013737 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013738 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13739
13740 if (enable) {
13741 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13742 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13743 #ifdef _LE_CAMERA_
13744 //Default to staggered HDR for IOT
13745 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13746 #else
13747 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13748 #endif
13749 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13750 }
13751
13752 bool isSupported = false;
13753 switch (sensor_hdr) {
13754 case CAM_SENSOR_HDR_IN_SENSOR:
13755 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13756 CAM_QCOM_FEATURE_SENSOR_HDR) {
13757 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013758 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013759 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013760 break;
13761 case CAM_SENSOR_HDR_ZIGZAG:
13762 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13763 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13764 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013765 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013766 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013767 break;
13768 case CAM_SENSOR_HDR_STAGGERED:
13769 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13770 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13771 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013772 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013773 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013774 break;
13775 case CAM_SENSOR_HDR_OFF:
13776 isSupported = true;
13777 LOGD("Turning off sensor HDR");
13778 break;
13779 default:
13780 LOGE("HDR mode %d not supported", sensor_hdr);
13781 rc = BAD_VALUE;
13782 break;
13783 }
13784
13785 if(isSupported) {
13786 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13787 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13788 rc = BAD_VALUE;
13789 } else {
13790 if(!isVideoHdrEnable)
13791 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013792 }
13793 }
13794 return rc;
13795}
13796
13797/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013798 * FUNCTION : needRotationReprocess
13799 *
13800 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13801 *
13802 * PARAMETERS : none
13803 *
13804 * RETURN : true: needed
13805 * false: no need
13806 *==========================================================================*/
13807bool QCamera3HardwareInterface::needRotationReprocess()
13808{
13809 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13810 // current rotation is not zero, and pp has the capability to process rotation
13811 LOGH("need do reprocess for rotation");
13812 return true;
13813 }
13814
13815 return false;
13816}
13817
13818/*===========================================================================
13819 * FUNCTION : needReprocess
13820 *
13821 * DESCRIPTION: if reprocess in needed
13822 *
13823 * PARAMETERS : none
13824 *
13825 * RETURN : true: needed
13826 * false: no need
13827 *==========================================================================*/
13828bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13829{
13830 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13831 // TODO: add for ZSL HDR later
13832 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13833 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13834 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13835 return true;
13836 } else {
13837 LOGH("already post processed frame");
13838 return false;
13839 }
13840 }
13841 return needRotationReprocess();
13842}
13843
13844/*===========================================================================
13845 * FUNCTION : needJpegExifRotation
13846 *
13847 * DESCRIPTION: if rotation from jpeg is needed
13848 *
13849 * PARAMETERS : none
13850 *
13851 * RETURN : true: needed
13852 * false: no need
13853 *==========================================================================*/
13854bool QCamera3HardwareInterface::needJpegExifRotation()
13855{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013856 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013857 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13858 LOGD("Need use Jpeg EXIF Rotation");
13859 return true;
13860 }
13861 return false;
13862}
13863
13864/*===========================================================================
13865 * FUNCTION : addOfflineReprocChannel
13866 *
13867 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13868 * coming from input channel
13869 *
13870 * PARAMETERS :
13871 * @config : reprocess configuration
13872 * @inputChHandle : pointer to the input (source) channel
13873 *
13874 *
13875 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13876 *==========================================================================*/
13877QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13878 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13879{
13880 int32_t rc = NO_ERROR;
13881 QCamera3ReprocessChannel *pChannel = NULL;
13882
13883 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013884 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13885 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013886 if (NULL == pChannel) {
13887 LOGE("no mem for reprocess channel");
13888 return NULL;
13889 }
13890
13891 rc = pChannel->initialize(IS_TYPE_NONE);
13892 if (rc != NO_ERROR) {
13893 LOGE("init reprocess channel failed, ret = %d", rc);
13894 delete pChannel;
13895 return NULL;
13896 }
13897
13898 // pp feature config
13899 cam_pp_feature_config_t pp_config;
13900 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13901
13902 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13903 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13904 & CAM_QCOM_FEATURE_DSDN) {
13905 //Use CPP CDS incase h/w supports it.
13906 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13907 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13908 }
13909 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13910 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13911 }
13912
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013913 if (config.hdr_param.hdr_enable) {
13914 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13915 pp_config.hdr_param = config.hdr_param;
13916 }
13917
13918 if (mForceHdrSnapshot) {
13919 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13920 pp_config.hdr_param.hdr_enable = 1;
13921 pp_config.hdr_param.hdr_need_1x = 0;
13922 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13923 }
13924
Thierry Strudel3d639192016-09-09 11:52:26 -070013925 rc = pChannel->addReprocStreamsFromSource(pp_config,
13926 config,
13927 IS_TYPE_NONE,
13928 mMetadataChannel);
13929
13930 if (rc != NO_ERROR) {
13931 delete pChannel;
13932 return NULL;
13933 }
13934 return pChannel;
13935}
13936
13937/*===========================================================================
13938 * FUNCTION : getMobicatMask
13939 *
13940 * DESCRIPTION: returns mobicat mask
13941 *
13942 * PARAMETERS : none
13943 *
13944 * RETURN : mobicat mask
13945 *
13946 *==========================================================================*/
13947uint8_t QCamera3HardwareInterface::getMobicatMask()
13948{
13949 return m_MobicatMask;
13950}
13951
13952/*===========================================================================
13953 * FUNCTION : setMobicat
13954 *
13955 * DESCRIPTION: set Mobicat on/off.
13956 *
13957 * PARAMETERS :
13958 * @params : none
13959 *
13960 * RETURN : int32_t type of status
13961 * NO_ERROR -- success
13962 * none-zero failure code
13963 *==========================================================================*/
13964int32_t QCamera3HardwareInterface::setMobicat()
13965{
Thierry Strudel3d639192016-09-09 11:52:26 -070013966 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013967
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013968 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013969 tune_cmd_t tune_cmd;
13970 tune_cmd.type = SET_RELOAD_CHROMATIX;
13971 tune_cmd.module = MODULE_ALL;
13972 tune_cmd.value = TRUE;
13973 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13974 CAM_INTF_PARM_SET_VFE_COMMAND,
13975 tune_cmd);
13976
13977 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13978 CAM_INTF_PARM_SET_PP_COMMAND,
13979 tune_cmd);
13980 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013981
13982 return ret;
13983}
13984
13985/*===========================================================================
13986* FUNCTION : getLogLevel
13987*
13988* DESCRIPTION: Reads the log level property into a variable
13989*
13990* PARAMETERS :
13991* None
13992*
13993* RETURN :
13994* None
13995*==========================================================================*/
13996void QCamera3HardwareInterface::getLogLevel()
13997{
13998 char prop[PROPERTY_VALUE_MAX];
13999 uint32_t globalLogLevel = 0;
14000
14001 property_get("persist.camera.hal.debug", prop, "0");
14002 int val = atoi(prop);
14003 if (0 <= val) {
14004 gCamHal3LogLevel = (uint32_t)val;
14005 }
14006
Thierry Strudel9ec39c62016-12-28 11:30:05 -080014007 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070014008 gKpiDebugLevel = atoi(prop);
14009
14010 property_get("persist.camera.global.debug", prop, "0");
14011 val = atoi(prop);
14012 if (0 <= val) {
14013 globalLogLevel = (uint32_t)val;
14014 }
14015
14016 /* Highest log level among hal.logs and global.logs is selected */
14017 if (gCamHal3LogLevel < globalLogLevel)
14018 gCamHal3LogLevel = globalLogLevel;
14019
14020 return;
14021}
14022
14023/*===========================================================================
14024 * FUNCTION : validateStreamRotations
14025 *
14026 * DESCRIPTION: Check if the rotations requested are supported
14027 *
14028 * PARAMETERS :
14029 * @stream_list : streams to be configured
14030 *
14031 * RETURN : NO_ERROR on success
14032 * -EINVAL on failure
14033 *
14034 *==========================================================================*/
14035int QCamera3HardwareInterface::validateStreamRotations(
14036 camera3_stream_configuration_t *streamList)
14037{
14038 int rc = NO_ERROR;
14039
14040 /*
14041 * Loop through all streams requested in configuration
14042 * Check if unsupported rotations have been requested on any of them
14043 */
14044 for (size_t j = 0; j < streamList->num_streams; j++){
14045 camera3_stream_t *newStream = streamList->streams[j];
14046
Emilian Peev35ceeed2017-06-29 11:58:56 -070014047 switch(newStream->rotation) {
14048 case CAMERA3_STREAM_ROTATION_0:
14049 case CAMERA3_STREAM_ROTATION_90:
14050 case CAMERA3_STREAM_ROTATION_180:
14051 case CAMERA3_STREAM_ROTATION_270:
14052 //Expected values
14053 break;
14054 default:
14055 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
14056 "type:%d and stream format:%d", __func__,
14057 newStream->rotation, newStream->stream_type,
14058 newStream->format);
14059 return -EINVAL;
14060 }
14061
Thierry Strudel3d639192016-09-09 11:52:26 -070014062 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
14063 bool isImplDef = (newStream->format ==
14064 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
14065 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
14066 isImplDef);
14067
14068 if (isRotated && (!isImplDef || isZsl)) {
14069 LOGE("Error: Unsupported rotation of %d requested for stream"
14070 "type:%d and stream format:%d",
14071 newStream->rotation, newStream->stream_type,
14072 newStream->format);
14073 rc = -EINVAL;
14074 break;
14075 }
14076 }
14077
14078 return rc;
14079}
14080
14081/*===========================================================================
14082* FUNCTION : getFlashInfo
14083*
14084* DESCRIPTION: Retrieve information about whether the device has a flash.
14085*
14086* PARAMETERS :
14087* @cameraId : Camera id to query
14088* @hasFlash : Boolean indicating whether there is a flash device
14089* associated with given camera
14090* @flashNode : If a flash device exists, this will be its device node.
14091*
14092* RETURN :
14093* None
14094*==========================================================================*/
14095void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
14096 bool& hasFlash,
14097 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
14098{
14099 cam_capability_t* camCapability = gCamCapability[cameraId];
14100 if (NULL == camCapability) {
14101 hasFlash = false;
14102 flashNode[0] = '\0';
14103 } else {
14104 hasFlash = camCapability->flash_available;
14105 strlcpy(flashNode,
14106 (char*)camCapability->flash_dev_name,
14107 QCAMERA_MAX_FILEPATH_LENGTH);
14108 }
14109}
14110
14111/*===========================================================================
14112* FUNCTION : getEepromVersionInfo
14113*
14114* DESCRIPTION: Retrieve version info of the sensor EEPROM data
14115*
14116* PARAMETERS : None
14117*
14118* RETURN : string describing EEPROM version
14119* "\0" if no such info available
14120*==========================================================================*/
14121const char *QCamera3HardwareInterface::getEepromVersionInfo()
14122{
14123 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14124}
14125
14126/*===========================================================================
14127* FUNCTION : getLdafCalib
14128*
14129* DESCRIPTION: Retrieve Laser AF calibration data
14130*
14131* PARAMETERS : None
14132*
14133* RETURN : Two uint32_t describing laser AF calibration data
14134* NULL if none is available.
14135*==========================================================================*/
14136const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14137{
14138 if (mLdafCalibExist) {
14139 return &mLdafCalib[0];
14140 } else {
14141 return NULL;
14142 }
14143}
14144
14145/*===========================================================================
Arnd Geis082a4d72017-08-24 10:33:07 -070014146* FUNCTION : getEaselFwVersion
14147*
14148* DESCRIPTION: Retrieve Easel firmware version
14149*
14150* PARAMETERS : None
14151*
14152* RETURN : string describing Firmware version
Arnd Geis8cbfc182017-09-07 14:46:41 -070014153* "\0" if version is not up to date
Arnd Geis082a4d72017-08-24 10:33:07 -070014154*==========================================================================*/
14155const char *QCamera3HardwareInterface::getEaselFwVersion()
14156{
Arnd Geis8cbfc182017-09-07 14:46:41 -070014157 if (mEaselFwUpdated) {
14158 return (const char *)&mEaselFwVersion[0];
14159 } else {
14160 return NULL;
Arnd Geis082a4d72017-08-24 10:33:07 -070014161 }
Arnd Geis082a4d72017-08-24 10:33:07 -070014162}
14163
14164/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014165 * FUNCTION : dynamicUpdateMetaStreamInfo
14166 *
14167 * DESCRIPTION: This function:
14168 * (1) stops all the channels
14169 * (2) returns error on pending requests and buffers
14170 * (3) sends metastream_info in setparams
14171 * (4) starts all channels
14172 * This is useful when sensor has to be restarted to apply any
14173 * settings such as frame rate from a different sensor mode
14174 *
14175 * PARAMETERS : None
14176 *
14177 * RETURN : NO_ERROR on success
14178 * Error codes on failure
14179 *
14180 *==========================================================================*/
14181int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14182{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014183 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014184 int rc = NO_ERROR;
14185
14186 LOGD("E");
14187
14188 rc = stopAllChannels();
14189 if (rc < 0) {
14190 LOGE("stopAllChannels failed");
14191 return rc;
14192 }
14193
14194 rc = notifyErrorForPendingRequests();
14195 if (rc < 0) {
14196 LOGE("notifyErrorForPendingRequests failed");
14197 return rc;
14198 }
14199
14200 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14201 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14202 "Format:%d",
14203 mStreamConfigInfo.type[i],
14204 mStreamConfigInfo.stream_sizes[i].width,
14205 mStreamConfigInfo.stream_sizes[i].height,
14206 mStreamConfigInfo.postprocess_mask[i],
14207 mStreamConfigInfo.format[i]);
14208 }
14209
14210 /* Send meta stream info once again so that ISP can start */
14211 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14212 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14213 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14214 mParameters);
14215 if (rc < 0) {
14216 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14217 }
14218
14219 rc = startAllChannels();
14220 if (rc < 0) {
14221 LOGE("startAllChannels failed");
14222 return rc;
14223 }
14224
14225 LOGD("X");
14226 return rc;
14227}
14228
14229/*===========================================================================
14230 * FUNCTION : stopAllChannels
14231 *
14232 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14233 *
14234 * PARAMETERS : None
14235 *
14236 * RETURN : NO_ERROR on success
14237 * Error codes on failure
14238 *
14239 *==========================================================================*/
14240int32_t QCamera3HardwareInterface::stopAllChannels()
14241{
14242 int32_t rc = NO_ERROR;
14243
14244 LOGD("Stopping all channels");
14245 // Stop the Streams/Channels
14246 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14247 it != mStreamInfo.end(); it++) {
14248 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14249 if (channel) {
14250 channel->stop();
14251 }
14252 (*it)->status = INVALID;
14253 }
14254
14255 if (mSupportChannel) {
14256 mSupportChannel->stop();
14257 }
14258 if (mAnalysisChannel) {
14259 mAnalysisChannel->stop();
14260 }
14261 if (mRawDumpChannel) {
14262 mRawDumpChannel->stop();
14263 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014264 if (mHdrPlusRawSrcChannel) {
14265 mHdrPlusRawSrcChannel->stop();
14266 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014267 if (mMetadataChannel) {
14268 /* If content of mStreamInfo is not 0, there is metadata stream */
14269 mMetadataChannel->stop();
14270 }
14271
14272 LOGD("All channels stopped");
14273 return rc;
14274}
14275
14276/*===========================================================================
14277 * FUNCTION : startAllChannels
14278 *
14279 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14280 *
14281 * PARAMETERS : None
14282 *
14283 * RETURN : NO_ERROR on success
14284 * Error codes on failure
14285 *
14286 *==========================================================================*/
14287int32_t QCamera3HardwareInterface::startAllChannels()
14288{
14289 int32_t rc = NO_ERROR;
14290
14291 LOGD("Start all channels ");
14292 // Start the Streams/Channels
14293 if (mMetadataChannel) {
14294 /* If content of mStreamInfo is not 0, there is metadata stream */
14295 rc = mMetadataChannel->start();
14296 if (rc < 0) {
14297 LOGE("META channel start failed");
14298 return rc;
14299 }
14300 }
14301 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14302 it != mStreamInfo.end(); it++) {
14303 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14304 if (channel) {
14305 rc = channel->start();
14306 if (rc < 0) {
14307 LOGE("channel start failed");
14308 return rc;
14309 }
14310 }
14311 }
14312 if (mAnalysisChannel) {
14313 mAnalysisChannel->start();
14314 }
14315 if (mSupportChannel) {
14316 rc = mSupportChannel->start();
14317 if (rc < 0) {
14318 LOGE("Support channel start failed");
14319 return rc;
14320 }
14321 }
14322 if (mRawDumpChannel) {
14323 rc = mRawDumpChannel->start();
14324 if (rc < 0) {
14325 LOGE("RAW dump channel start failed");
14326 return rc;
14327 }
14328 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014329 if (mHdrPlusRawSrcChannel) {
14330 rc = mHdrPlusRawSrcChannel->start();
14331 if (rc < 0) {
14332 LOGE("HDR+ RAW channel start failed");
14333 return rc;
14334 }
14335 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014336
14337 LOGD("All channels started");
14338 return rc;
14339}
14340
14341/*===========================================================================
14342 * FUNCTION : notifyErrorForPendingRequests
14343 *
14344 * DESCRIPTION: This function sends error for all the pending requests/buffers
14345 *
14346 * PARAMETERS : None
14347 *
14348 * RETURN : Error codes
14349 * NO_ERROR on success
14350 *
14351 *==========================================================================*/
14352int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14353{
Emilian Peev7650c122017-01-19 08:24:33 -080014354 notifyErrorFoPendingDepthData(mDepthChannel);
14355
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014356 auto pendingRequest = mPendingRequestsList.begin();
14357 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014358
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014359 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14360 // buffers (for which buffers aren't sent yet).
14361 while (pendingRequest != mPendingRequestsList.end() ||
14362 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14363 if (pendingRequest == mPendingRequestsList.end() ||
14364 pendingBuffer->frame_number < pendingRequest->frame_number) {
14365 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14366 // with error.
14367 for (auto &info : pendingBuffer->mPendingBufferList) {
14368 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014369 camera3_notify_msg_t notify_msg;
14370 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14371 notify_msg.type = CAMERA3_MSG_ERROR;
14372 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014373 notify_msg.message.error.error_stream = info.stream;
14374 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014375 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014376
14377 camera3_stream_buffer_t buffer = {};
14378 buffer.acquire_fence = -1;
14379 buffer.release_fence = -1;
14380 buffer.buffer = info.buffer;
14381 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14382 buffer.stream = info.stream;
14383 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014384 }
14385
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014386 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14387 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14388 pendingBuffer->frame_number > pendingRequest->frame_number) {
14389 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014390 camera3_notify_msg_t notify_msg;
14391 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14392 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014393 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14394 notify_msg.message.error.error_stream = nullptr;
14395 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014396 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014397
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014398 if (pendingRequest->input_buffer != nullptr) {
14399 camera3_capture_result result = {};
14400 result.frame_number = pendingRequest->frame_number;
14401 result.result = nullptr;
14402 result.input_buffer = pendingRequest->input_buffer;
14403 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014404 }
14405
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014406 mShutterDispatcher.clear(pendingRequest->frame_number);
14407 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14408 } else {
14409 // If both buffers and result metadata weren't sent yet, notify about a request error
14410 // and return buffers with error.
14411 for (auto &info : pendingBuffer->mPendingBufferList) {
14412 camera3_notify_msg_t notify_msg;
14413 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14414 notify_msg.type = CAMERA3_MSG_ERROR;
14415 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14416 notify_msg.message.error.error_stream = info.stream;
14417 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14418 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014419
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014420 camera3_stream_buffer_t buffer = {};
14421 buffer.acquire_fence = -1;
14422 buffer.release_fence = -1;
14423 buffer.buffer = info.buffer;
14424 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14425 buffer.stream = info.stream;
14426 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14427 }
14428
14429 if (pendingRequest->input_buffer != nullptr) {
14430 camera3_capture_result result = {};
14431 result.frame_number = pendingRequest->frame_number;
14432 result.result = nullptr;
14433 result.input_buffer = pendingRequest->input_buffer;
14434 orchestrateResult(&result);
14435 }
14436
14437 mShutterDispatcher.clear(pendingRequest->frame_number);
14438 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14439 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014440 }
14441 }
14442
14443 /* Reset pending frame Drop list and requests list */
14444 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014445 mShutterDispatcher.clear();
14446 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014447 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014448 mExpectedFrameDuration = 0;
14449 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014450 LOGH("Cleared all the pending buffers ");
14451
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014452 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014453}
14454
14455bool QCamera3HardwareInterface::isOnEncoder(
14456 const cam_dimension_t max_viewfinder_size,
14457 uint32_t width, uint32_t height)
14458{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014459 return ((width > (uint32_t)max_viewfinder_size.width) ||
14460 (height > (uint32_t)max_viewfinder_size.height) ||
14461 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14462 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014463}
14464
14465/*===========================================================================
14466 * FUNCTION : setBundleInfo
14467 *
14468 * DESCRIPTION: Set bundle info for all streams that are bundle.
14469 *
14470 * PARAMETERS : None
14471 *
14472 * RETURN : NO_ERROR on success
14473 * Error codes on failure
14474 *==========================================================================*/
14475int32_t QCamera3HardwareInterface::setBundleInfo()
14476{
14477 int32_t rc = NO_ERROR;
14478
14479 if (mChannelHandle) {
14480 cam_bundle_config_t bundleInfo;
14481 memset(&bundleInfo, 0, sizeof(bundleInfo));
14482 rc = mCameraHandle->ops->get_bundle_info(
14483 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14484 if (rc != NO_ERROR) {
14485 LOGE("get_bundle_info failed");
14486 return rc;
14487 }
14488 if (mAnalysisChannel) {
14489 mAnalysisChannel->setBundleInfo(bundleInfo);
14490 }
14491 if (mSupportChannel) {
14492 mSupportChannel->setBundleInfo(bundleInfo);
14493 }
14494 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14495 it != mStreamInfo.end(); it++) {
14496 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14497 channel->setBundleInfo(bundleInfo);
14498 }
14499 if (mRawDumpChannel) {
14500 mRawDumpChannel->setBundleInfo(bundleInfo);
14501 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014502 if (mHdrPlusRawSrcChannel) {
14503 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14504 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014505 }
14506
14507 return rc;
14508}
14509
14510/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014511 * FUNCTION : setInstantAEC
14512 *
14513 * DESCRIPTION: Set Instant AEC related params.
14514 *
14515 * PARAMETERS :
14516 * @meta: CameraMetadata reference
14517 *
14518 * RETURN : NO_ERROR on success
14519 * Error codes on failure
14520 *==========================================================================*/
14521int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14522{
14523 int32_t rc = NO_ERROR;
14524 uint8_t val = 0;
14525 char prop[PROPERTY_VALUE_MAX];
14526
14527 // First try to configure instant AEC from framework metadata
14528 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14529 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14530 }
14531
14532 // If framework did not set this value, try to read from set prop.
14533 if (val == 0) {
14534 memset(prop, 0, sizeof(prop));
14535 property_get("persist.camera.instant.aec", prop, "0");
14536 val = (uint8_t)atoi(prop);
14537 }
14538
14539 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14540 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14541 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14542 mInstantAEC = val;
14543 mInstantAECSettledFrameNumber = 0;
14544 mInstantAecFrameIdxCount = 0;
14545 LOGH("instantAEC value set %d",val);
14546 if (mInstantAEC) {
14547 memset(prop, 0, sizeof(prop));
14548 property_get("persist.camera.ae.instant.bound", prop, "10");
14549 int32_t aec_frame_skip_cnt = atoi(prop);
14550 if (aec_frame_skip_cnt >= 0) {
14551 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14552 } else {
14553 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14554 rc = BAD_VALUE;
14555 }
14556 }
14557 } else {
14558 LOGE("Bad instant aec value set %d", val);
14559 rc = BAD_VALUE;
14560 }
14561 return rc;
14562}
14563
14564/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014565 * FUNCTION : get_num_overall_buffers
14566 *
14567 * DESCRIPTION: Estimate number of pending buffers across all requests.
14568 *
14569 * PARAMETERS : None
14570 *
14571 * RETURN : Number of overall pending buffers
14572 *
14573 *==========================================================================*/
14574uint32_t PendingBuffersMap::get_num_overall_buffers()
14575{
14576 uint32_t sum_buffers = 0;
14577 for (auto &req : mPendingBuffersInRequest) {
14578 sum_buffers += req.mPendingBufferList.size();
14579 }
14580 return sum_buffers;
14581}
14582
14583/*===========================================================================
14584 * FUNCTION : removeBuf
14585 *
14586 * DESCRIPTION: Remove a matching buffer from tracker.
14587 *
14588 * PARAMETERS : @buffer: image buffer for the callback
14589 *
14590 * RETURN : None
14591 *
14592 *==========================================================================*/
14593void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14594{
14595 bool buffer_found = false;
14596 for (auto req = mPendingBuffersInRequest.begin();
14597 req != mPendingBuffersInRequest.end(); req++) {
14598 for (auto k = req->mPendingBufferList.begin();
14599 k != req->mPendingBufferList.end(); k++ ) {
14600 if (k->buffer == buffer) {
14601 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14602 req->frame_number, buffer);
14603 k = req->mPendingBufferList.erase(k);
14604 if (req->mPendingBufferList.empty()) {
14605 // Remove this request from Map
14606 req = mPendingBuffersInRequest.erase(req);
14607 }
14608 buffer_found = true;
14609 break;
14610 }
14611 }
14612 if (buffer_found) {
14613 break;
14614 }
14615 }
14616 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14617 get_num_overall_buffers());
14618}
14619
14620/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014621 * FUNCTION : getBufErrStatus
14622 *
14623 * DESCRIPTION: get buffer error status
14624 *
14625 * PARAMETERS : @buffer: buffer handle
14626 *
14627 * RETURN : Error status
14628 *
14629 *==========================================================================*/
14630int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14631{
14632 for (auto& req : mPendingBuffersInRequest) {
14633 for (auto& k : req.mPendingBufferList) {
14634 if (k.buffer == buffer)
14635 return k.bufStatus;
14636 }
14637 }
14638 return CAMERA3_BUFFER_STATUS_OK;
14639}
14640
14641/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014642 * FUNCTION : setPAAFSupport
14643 *
14644 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14645 * feature mask according to stream type and filter
14646 * arrangement
14647 *
14648 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14649 * @stream_type: stream type
14650 * @filter_arrangement: filter arrangement
14651 *
14652 * RETURN : None
14653 *==========================================================================*/
14654void QCamera3HardwareInterface::setPAAFSupport(
14655 cam_feature_mask_t& feature_mask,
14656 cam_stream_type_t stream_type,
14657 cam_color_filter_arrangement_t filter_arrangement)
14658{
Thierry Strudel3d639192016-09-09 11:52:26 -070014659 switch (filter_arrangement) {
14660 case CAM_FILTER_ARRANGEMENT_RGGB:
14661 case CAM_FILTER_ARRANGEMENT_GRBG:
14662 case CAM_FILTER_ARRANGEMENT_GBRG:
14663 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014664 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14665 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014666 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014667 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14668 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014669 }
14670 break;
14671 case CAM_FILTER_ARRANGEMENT_Y:
14672 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14673 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14674 }
14675 break;
14676 default:
14677 break;
14678 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014679 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14680 feature_mask, stream_type, filter_arrangement);
14681
14682
Thierry Strudel3d639192016-09-09 11:52:26 -070014683}
14684
14685/*===========================================================================
14686* FUNCTION : getSensorMountAngle
14687*
14688* DESCRIPTION: Retrieve sensor mount angle
14689*
14690* PARAMETERS : None
14691*
14692* RETURN : sensor mount angle in uint32_t
14693*==========================================================================*/
14694uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14695{
14696 return gCamCapability[mCameraId]->sensor_mount_angle;
14697}
14698
14699/*===========================================================================
14700* FUNCTION : getRelatedCalibrationData
14701*
14702* DESCRIPTION: Retrieve related system calibration data
14703*
14704* PARAMETERS : None
14705*
14706* RETURN : Pointer of related system calibration data
14707*==========================================================================*/
14708const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14709{
14710 return (const cam_related_system_calibration_data_t *)
14711 &(gCamCapability[mCameraId]->related_cam_calibration);
14712}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014713
14714/*===========================================================================
14715 * FUNCTION : is60HzZone
14716 *
14717 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14718 *
14719 * PARAMETERS : None
14720 *
14721 * RETURN : True if in 60Hz zone, False otherwise
14722 *==========================================================================*/
14723bool QCamera3HardwareInterface::is60HzZone()
14724{
14725 time_t t = time(NULL);
14726 struct tm lt;
14727
14728 struct tm* r = localtime_r(&t, &lt);
14729
14730 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14731 return true;
14732 else
14733 return false;
14734}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014735
14736/*===========================================================================
14737 * FUNCTION : adjustBlackLevelForCFA
14738 *
14739 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14740 * of bayer CFA (Color Filter Array).
14741 *
14742 * PARAMETERS : @input: black level pattern in the order of RGGB
14743 * @output: black level pattern in the order of CFA
14744 * @color_arrangement: CFA color arrangement
14745 *
14746 * RETURN : None
14747 *==========================================================================*/
14748template<typename T>
14749void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14750 T input[BLACK_LEVEL_PATTERN_CNT],
14751 T output[BLACK_LEVEL_PATTERN_CNT],
14752 cam_color_filter_arrangement_t color_arrangement)
14753{
14754 switch (color_arrangement) {
14755 case CAM_FILTER_ARRANGEMENT_GRBG:
14756 output[0] = input[1];
14757 output[1] = input[0];
14758 output[2] = input[3];
14759 output[3] = input[2];
14760 break;
14761 case CAM_FILTER_ARRANGEMENT_GBRG:
14762 output[0] = input[2];
14763 output[1] = input[3];
14764 output[2] = input[0];
14765 output[3] = input[1];
14766 break;
14767 case CAM_FILTER_ARRANGEMENT_BGGR:
14768 output[0] = input[3];
14769 output[1] = input[2];
14770 output[2] = input[1];
14771 output[3] = input[0];
14772 break;
14773 case CAM_FILTER_ARRANGEMENT_RGGB:
14774 output[0] = input[0];
14775 output[1] = input[1];
14776 output[2] = input[2];
14777 output[3] = input[3];
14778 break;
14779 default:
14780 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14781 break;
14782 }
14783}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014784
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014785void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14786 CameraMetadata &resultMetadata,
14787 std::shared_ptr<metadata_buffer_t> settings)
14788{
14789 if (settings == nullptr) {
14790 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14791 return;
14792 }
14793
14794 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14795 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14796 }
14797
14798 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14799 String8 str((const char *)gps_methods);
14800 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14801 }
14802
14803 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14804 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14805 }
14806
14807 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14808 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14809 }
14810
14811 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14812 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14813 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14814 }
14815
14816 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14817 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14818 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14819 }
14820
14821 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14822 int32_t fwk_thumb_size[2];
14823 fwk_thumb_size[0] = thumb_size->width;
14824 fwk_thumb_size[1] = thumb_size->height;
14825 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14826 }
14827
14828 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14829 uint8_t fwk_intent = intent[0];
14830 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14831 }
14832}
14833
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014834bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14835 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chenec328c82017-08-30 16:41:08 -070014836 if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
14837 metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
14838 ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
14839 return false;
14840 }
14841
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014842 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14843 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14844 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014845 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014846 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014847 return false;
14848 }
14849
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014850 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014851 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14852 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014853 return false;
14854 }
14855
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014856 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14857 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14858 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14859 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14860 return false;
14861 }
14862
14863 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14864 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14865 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14866 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14867 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
14868 return false;
14869 }
14870
14871 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
14872 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
14873 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
14874 return false;
14875 }
14876
14877 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
14878 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
14879 ANDROID_CONTROL_EFFECT_MODE_OFF) {
14880 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
14881 return false;
14882 }
14883
14884 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
14885 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
14886 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
14887 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
14888 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
14889 return false;
14890 }
14891
14892 // TODO (b/32585046): support non-ZSL.
14893 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
14894 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
14895 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
14896 return false;
14897 }
14898
14899 // TODO (b/32586081): support flash.
14900 if (!metadata.exists(ANDROID_FLASH_MODE) ||
14901 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
14902 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
14903 return false;
14904 }
14905
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014906 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
14907 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
14908 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
14909 return false;
14910 }
14911
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014912
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014913 // TODO (b/36693254, b/36690506): support other outputs.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014914 if (!gEnableMultipleHdrplusOutputs && request.num_output_buffers != 1) {
14915 ALOGV("%s: Only support 1 output: %d", __FUNCTION__, request.num_output_buffers);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014916 return false;
14917 }
14918
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014919 switch (request.output_buffers[0].stream->format) {
14920 case HAL_PIXEL_FORMAT_BLOB:
14921 break;
14922 case HAL_PIXEL_FORMAT_YCbCr_420_888:
14923 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
14924 // TODO (b/36693254): Only support full size.
14925 if (!gEnableMultipleHdrplusOutputs) {
14926 if (static_cast<int>(request.output_buffers[0].stream->width) !=
14927 gCamCapability[mCameraId]->picture_sizes_tbl[0].width ||
14928 static_cast<int>(request.output_buffers[0].stream->height) !=
14929 gCamCapability[mCameraId]->picture_sizes_tbl[0].height) {
14930 ALOGV("%s: Only full size is supported.", __FUNCTION__);
14931 return false;
14932 }
14933 }
14934 break;
14935 default:
14936 ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
14937 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14938 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14939 request.output_buffers[0].stream->width,
14940 request.output_buffers[0].stream->height,
14941 request.output_buffers[0].stream->format);
14942 }
14943 return false;
14944 }
14945
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014946 return true;
14947}
14948
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014949void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
14950 if (hdrPlusRequest == nullptr) return;
14951
14952 for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
14953 // Find the stream for this buffer.
14954 for (auto streamInfo : mStreamInfo) {
14955 if (streamInfo->id == outputBufferIter.first) {
14956 if (streamInfo->channel == mPictureChannel) {
14957 // For picture channel, this buffer is internally allocated so return this
14958 // buffer to picture channel.
14959 mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
14960 } else {
14961 // Unregister this buffer for other channels.
14962 streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
14963 }
14964 break;
14965 }
14966 }
14967 }
14968
14969 hdrPlusRequest->outputBuffers.clear();
14970 hdrPlusRequest->frameworkOutputBuffers.clear();
14971}
14972
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014973bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14974 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14975 const CameraMetadata &metadata)
14976{
14977 if (hdrPlusRequest == nullptr) return false;
14978 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
14979
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014980 status_t res = OK;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014981 pbcamera::CaptureRequest pbRequest;
14982 pbRequest.id = request.frame_number;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014983 // Iterate through all requested output buffers and add them to an HDR+ request.
14984 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14985 // Find the index of the stream in mStreamInfo.
14986 uint32_t pbStreamId = 0;
14987 bool found = false;
14988 for (auto streamInfo : mStreamInfo) {
14989 if (streamInfo->stream == request.output_buffers[i].stream) {
14990 pbStreamId = streamInfo->id;
14991 found = true;
14992 break;
14993 }
14994 }
14995
14996 if (!found) {
14997 ALOGE("%s: requested stream was not configured.", __FUNCTION__);
14998 abortPendingHdrplusRequest(hdrPlusRequest);
14999 return false;
15000 }
15001 auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
15002 switch (request.output_buffers[i].stream->format) {
15003 case HAL_PIXEL_FORMAT_BLOB:
15004 {
15005 // For jpeg output, get a YUV buffer from pic channel.
15006 QCamera3PicChannel *picChannel =
15007 (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
15008 res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
15009 if (res != OK) {
15010 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
15011 __FUNCTION__, strerror(-res), res);
15012 abortPendingHdrplusRequest(hdrPlusRequest);
15013 return false;
15014 }
15015 break;
15016 }
15017 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15018 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15019 {
15020 // For YUV output, register the buffer and get the buffer def from the channel.
15021 QCamera3ProcessingChannel *channel =
15022 (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
15023 res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
15024 outBuffer.get());
15025 if (res != OK) {
15026 ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
15027 strerror(-res), res);
15028 abortPendingHdrplusRequest(hdrPlusRequest);
15029 return false;
15030 }
15031 break;
15032 }
15033 default:
15034 abortPendingHdrplusRequest(hdrPlusRequest);
15035 return false;
15036 }
15037
15038 pbcamera::StreamBuffer buffer;
15039 buffer.streamId = pbStreamId;
15040 buffer.dmaBufFd = outBuffer->fd;
15041 buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
15042 buffer.dataSize = outBuffer->frame_len;
15043
15044 pbRequest.outputBuffers.push_back(buffer);
15045
15046 hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
15047 hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
15048 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015049
15050 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070015051 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015052 if (res != OK) {
15053 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
15054 strerror(-res), res);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015055 abortPendingHdrplusRequest(hdrPlusRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015056 return false;
15057 }
15058
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015059 return true;
15060}
15061
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015062status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
15063{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015064 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
15065 return OK;
15066 }
15067
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015068 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015069 if (res != OK) {
15070 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
15071 strerror(-res), res);
15072 return res;
15073 }
15074 gHdrPlusClientOpening = true;
15075
15076 return OK;
15077}
15078
Chien-Yu Chenee335912017-02-09 17:53:20 -080015079status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
15080{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015081 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015082
Chien-Yu Chena6c99062017-05-23 13:45:06 -070015083 if (mHdrPlusModeEnabled) {
15084 return OK;
15085 }
15086
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015087 // Check if gHdrPlusClient is opened or being opened.
15088 if (gHdrPlusClient == nullptr) {
15089 if (gHdrPlusClientOpening) {
15090 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
15091 return OK;
15092 }
15093
15094 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015095 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015096 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
15097 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015098 return res;
15099 }
15100
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015101 // When opening HDR+ client completes, HDR+ mode will be enabled.
15102 return OK;
15103
Chien-Yu Chenee335912017-02-09 17:53:20 -080015104 }
15105
15106 // Configure stream for HDR+.
15107 res = configureHdrPlusStreamsLocked();
15108 if (res != OK) {
15109 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015110 return res;
15111 }
15112
15113 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
15114 res = gHdrPlusClient->setZslHdrPlusMode(true);
15115 if (res != OK) {
15116 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080015117 return res;
15118 }
15119
15120 mHdrPlusModeEnabled = true;
15121 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
15122
15123 return OK;
15124}
15125
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015126void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
15127{
15128 if (gHdrPlusClientOpening) {
15129 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
15130 }
15131}
15132
Chien-Yu Chenee335912017-02-09 17:53:20 -080015133void QCamera3HardwareInterface::disableHdrPlusModeLocked()
15134{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015135 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015136 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015137 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
15138 if (res != OK) {
15139 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
15140 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015141
15142 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015143 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015144 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015145 }
15146
15147 mHdrPlusModeEnabled = false;
15148 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
15149}
15150
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015151bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
15152{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015153 // Check that at least one YUV or one JPEG output is configured.
15154 // TODO: Support RAW (b/36690506)
15155 for (auto streamInfo : mStreamInfo) {
15156 if (streamInfo != nullptr && streamInfo->stream != nullptr) {
15157 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
15158 (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
15159 streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15160 streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
15161 return true;
15162 }
15163 }
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015164 }
15165
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015166 return false;
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015167}
15168
Chien-Yu Chenee335912017-02-09 17:53:20 -080015169status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015170{
15171 pbcamera::InputConfiguration inputConfig;
15172 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
15173 status_t res = OK;
15174
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015175 // Sensor MIPI will send data to Easel.
15176 inputConfig.isSensorInput = true;
15177 inputConfig.sensorMode.cameraId = mCameraId;
15178 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
15179 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
15180 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
15181 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
15182 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
15183 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
15184 if (mSensorModeInfo.num_raw_bits != 10) {
15185 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
15186 mSensorModeInfo.num_raw_bits);
15187 return BAD_VALUE;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015188 }
15189
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015190 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015191
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015192 // Iterate through configured output streams in HAL and configure those streams in HDR+
15193 // service.
15194 for (auto streamInfo : mStreamInfo) {
15195 pbcamera::StreamConfiguration outputConfig;
15196 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
15197 switch (streamInfo->stream->format) {
15198 case HAL_PIXEL_FORMAT_BLOB:
15199 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15200 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15201 res = fillPbStreamConfig(&outputConfig, streamInfo->id,
15202 streamInfo->channel, /*stream index*/0);
15203 if (res != OK) {
15204 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
15205 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015206
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015207 return res;
15208 }
15209
15210 outputStreamConfigs.push_back(outputConfig);
15211 break;
15212 default:
15213 // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
15214 break;
15215 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015216 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015217 }
15218
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015219 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015220 if (res != OK) {
15221 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
15222 strerror(-res), res);
15223 return res;
15224 }
15225
15226 return OK;
15227}
15228
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015229void QCamera3HardwareInterface::handleEaselFatalError()
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015230{
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015231 pthread_mutex_lock(&mMutex);
15232 mState = ERROR;
15233 pthread_mutex_unlock(&mMutex);
15234
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015235 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015236}
15237
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015238void QCamera3HardwareInterface::handleEaselFatalErrorAsync()
15239{
15240 if (mEaselErrorFuture.valid()) {
15241 // The error future has been invoked.
15242 return;
15243 }
15244
15245 // Launch a future to handle the fatal error.
15246 mEaselErrorFuture = std::async(std::launch::async,
15247 &QCamera3HardwareInterface::handleEaselFatalError, this);
15248}
15249
15250void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15251{
15252 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15253 handleEaselFatalErrorAsync();
15254}
15255
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015256void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15257{
Arnd Geis8cbfc182017-09-07 14:46:41 -070015258 int rc = NO_ERROR;
15259
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015260 if (client == nullptr) {
15261 ALOGE("%s: Opened client is null.", __FUNCTION__);
15262 return;
15263 }
15264
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015265 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015266 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15267
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015268 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015269 if (!gHdrPlusClientOpening) {
15270 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15271 return;
15272 }
15273
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015274 gHdrPlusClient = std::move(client);
15275 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015276 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015277
15278 // Set static metadata.
15279 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15280 if (res != OK) {
15281 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15282 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015283 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015284 gHdrPlusClient = nullptr;
15285 return;
15286 }
15287
15288 // Enable HDR+ mode.
15289 res = enableHdrPlusModeLocked();
15290 if (res != OK) {
15291 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15292 }
Arnd Geis8cbfc182017-09-07 14:46:41 -070015293
15294 // Get Easel firmware version
15295 if (EaselManagerClientOpened) {
15296 rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
15297 if (rc != OK) {
15298 ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
15299 } else {
15300 mEaselFwUpdated = true;
15301 }
15302 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015303}
15304
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015305void QCamera3HardwareInterface::onOpenFailed(status_t err)
15306{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015307 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015308 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015309 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015310 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015311}
15312
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015313void QCamera3HardwareInterface::onFatalError()
15314{
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015315 ALOGE("%s: HDR+ client encountered a fatal error.", __FUNCTION__);
15316 handleEaselFatalErrorAsync();
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015317}
15318
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015319void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15320{
15321 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15322 __LINE__, requestId, apSensorTimestampNs);
15323
15324 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15325}
15326
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015327void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15328{
15329 pthread_mutex_lock(&mMutex);
15330
15331 // Find the pending request for this result metadata.
15332 auto requestIter = mPendingRequestsList.begin();
15333 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15334 requestIter++;
15335 }
15336
15337 if (requestIter == mPendingRequestsList.end()) {
15338 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15339 pthread_mutex_unlock(&mMutex);
15340 return;
15341 }
15342
15343 requestIter->partial_result_cnt++;
15344
15345 CameraMetadata metadata;
15346 uint8_t ready = true;
15347 metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15348
15349 // Send it to framework.
15350 camera3_capture_result_t result = {};
15351
15352 result.result = metadata.getAndLock();
15353 // Populate metadata result
15354 result.frame_number = requestId;
15355 result.num_output_buffers = 0;
15356 result.output_buffers = NULL;
15357 result.partial_result = requestIter->partial_result_cnt;
15358
15359 orchestrateResult(&result);
15360 metadata.unlock(result.result);
15361
15362 pthread_mutex_unlock(&mMutex);
15363}
15364
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070015365void QCamera3HardwareInterface::onPostview(uint32_t requestId,
15366 std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
15367 uint32_t stride, int32_t format)
15368{
15369 if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
15370 ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
15371 __LINE__, width, height, requestId);
15372 char buf[FILENAME_MAX] = {};
15373 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
15374 requestId, width, height);
15375
15376 pbcamera::StreamConfiguration config = {};
15377 config.image.width = width;
15378 config.image.height = height;
15379 config.image.format = format;
15380
15381 pbcamera::PlaneConfiguration plane = {};
15382 plane.stride = stride;
15383 plane.scanline = height;
15384
15385 config.image.planes.push_back(plane);
15386
15387 pbcamera::StreamBuffer buffer = {};
15388 buffer.streamId = 0;
15389 buffer.dmaBufFd = -1;
15390 buffer.data = postview->data();
15391 buffer.dataSize = postview->size();
15392
15393 hdrplus_client_utils::writePpm(buf, config, buffer);
15394 }
15395
15396 pthread_mutex_lock(&mMutex);
15397
15398 // Find the pending request for this result metadata.
15399 auto requestIter = mPendingRequestsList.begin();
15400 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15401 requestIter++;
15402 }
15403
15404 if (requestIter == mPendingRequestsList.end()) {
15405 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15406 pthread_mutex_unlock(&mMutex);
15407 return;
15408 }
15409
15410 requestIter->partial_result_cnt++;
15411
15412 CameraMetadata metadata;
15413 int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
15414 static_cast<int32_t>(stride)};
15415 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
15416 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
15417
15418 // Send it to framework.
15419 camera3_capture_result_t result = {};
15420
15421 result.result = metadata.getAndLock();
15422 // Populate metadata result
15423 result.frame_number = requestId;
15424 result.num_output_buffers = 0;
15425 result.output_buffers = NULL;
15426 result.partial_result = requestIter->partial_result_cnt;
15427
15428 orchestrateResult(&result);
15429 metadata.unlock(result.result);
15430
15431 pthread_mutex_unlock(&mMutex);
15432}
15433
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015434void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015435 const camera_metadata_t &resultMetadata)
15436{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015437 if (result == nullptr) {
15438 ALOGE("%s: result is nullptr.", __FUNCTION__);
15439 return;
15440 }
15441
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015442 // Find the pending HDR+ request.
15443 HdrPlusPendingRequest pendingRequest;
15444 {
15445 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15446 auto req = mHdrPlusPendingRequests.find(result->requestId);
15447 pendingRequest = req->second;
15448 }
15449
15450 // Update the result metadata with the settings of the HDR+ still capture request because
15451 // the result metadata belongs to a ZSL buffer.
15452 CameraMetadata metadata;
15453 metadata = &resultMetadata;
15454 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15455 camera_metadata_t* updatedResultMetadata = metadata.release();
15456
15457 uint32_t halSnapshotStreamId = 0;
15458 if (mPictureChannel != nullptr) {
15459 halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
15460 }
15461
15462 auto halMetadata = std::make_shared<metadata_buffer_t>();
15463 clear_metadata_buffer(halMetadata.get());
15464
15465 // Convert updated result metadata to HAL metadata.
15466 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15467 halSnapshotStreamId, /*minFrameDuration*/0);
15468 if (res != 0) {
15469 ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
15470 }
15471
15472 for (auto &outputBuffer : result->outputBuffers) {
15473 uint32_t streamId = outputBuffer.streamId;
15474
15475 // Find the framework output buffer in the pending request.
15476 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15477 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15478 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15479 streamId);
15480 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015481 }
15482
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015483 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15484
15485 // Find the channel for the output buffer.
15486 QCamera3ProcessingChannel *channel =
15487 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15488
15489 // Find the output buffer def.
15490 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15491 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15492 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15493 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015494 }
15495
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015496 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015497
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015498 // Check whether to dump the buffer.
15499 if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15500 frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
15501 // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
15502 char prop[PROPERTY_VALUE_MAX];
15503 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15504 bool dumpYuvOutput = atoi(prop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015505
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015506 if (dumpYuvOutput) {
15507 // Dump yuv buffer to a ppm file.
15508 pbcamera::StreamConfiguration outputConfig;
15509 status_t rc = fillPbStreamConfig(&outputConfig, streamId,
15510 channel, /*stream index*/0);
15511 if (rc == OK) {
15512 char buf[FILENAME_MAX] = {};
15513 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15514 result->requestId, streamId,
15515 outputConfig.image.width, outputConfig.image.height);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015516
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015517 hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
15518 } else {
15519 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
15520 "%s (%d).", __FUNCTION__, strerror(-rc), rc);
15521 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015522 }
15523 }
15524
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015525 if (channel == mPictureChannel) {
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015526 // Return the buffer to pic channel for encoding.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015527 mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
15528 frameworkOutputBuffer->buffer, result->requestId,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015529 halMetadata);
15530 } else {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015531 // Return the buffer to camera framework.
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015532 pthread_mutex_lock(&mMutex);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015533 handleBufferWithLock(frameworkOutputBuffer, result->requestId);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015534 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015535
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015536 channel->unregisterBuffer(outputBufferDef.get());
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015537 }
15538 }
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015539
15540 // Send HDR+ metadata to framework.
15541 {
15542 pthread_mutex_lock(&mMutex);
15543
15544 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15545 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
15546 pthread_mutex_unlock(&mMutex);
15547 }
15548
15549 // Remove the HDR+ pending request.
15550 {
15551 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15552 auto req = mHdrPlusPendingRequests.find(result->requestId);
15553 mHdrPlusPendingRequests.erase(req);
15554 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015555}
15556
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015557void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15558{
15559 if (failedResult == nullptr) {
15560 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15561 return;
15562 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015563
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015564 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015565
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015566 // Find the pending HDR+ request.
15567 HdrPlusPendingRequest pendingRequest;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015568 {
15569 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015570 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15571 if (req == mHdrPlusPendingRequests.end()) {
15572 ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
15573 return;
15574 }
15575 pendingRequest = req->second;
15576 }
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015577
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015578 for (auto &outputBuffer : failedResult->outputBuffers) {
15579 uint32_t streamId = outputBuffer.streamId;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015580
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015581 // Find the channel
15582 // Find the framework output buffer in the pending request.
15583 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15584 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15585 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15586 streamId);
15587 continue;
15588 }
15589
15590 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15591
15592 // Find the channel for the output buffer.
15593 QCamera3ProcessingChannel *channel =
15594 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15595
15596 // Find the output buffer def.
15597 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15598 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15599 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15600 continue;
15601 }
15602
15603 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
15604
15605 if (channel == mPictureChannel) {
15606 // Return the buffer to pic channel.
15607 mPictureChannel->returnYuvBuffer(outputBufferDef.get());
15608 } else {
15609 channel->unregisterBuffer(outputBufferDef.get());
15610 }
15611 }
15612
15613 // Remove the HDR+ pending request.
15614 {
15615 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15616 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15617 mHdrPlusPendingRequests.erase(req);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015618 }
15619
15620 pthread_mutex_lock(&mMutex);
15621
15622 // Find the pending buffers.
15623 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15624 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15625 if (pendingBuffers->frame_number == failedResult->requestId) {
15626 break;
15627 }
15628 pendingBuffers++;
15629 }
15630
15631 // Send out buffer errors for the pending buffers.
15632 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15633 std::vector<camera3_stream_buffer_t> streamBuffers;
15634 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15635 // Prepare a stream buffer.
15636 camera3_stream_buffer_t streamBuffer = {};
15637 streamBuffer.stream = buffer.stream;
15638 streamBuffer.buffer = buffer.buffer;
15639 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15640 streamBuffer.acquire_fence = -1;
15641 streamBuffer.release_fence = -1;
15642
15643 streamBuffers.push_back(streamBuffer);
15644
15645 // Send out error buffer event.
15646 camera3_notify_msg_t notify_msg = {};
15647 notify_msg.type = CAMERA3_MSG_ERROR;
15648 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15649 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15650 notify_msg.message.error.error_stream = buffer.stream;
15651
15652 orchestrateNotify(&notify_msg);
15653 }
15654
15655 camera3_capture_result_t result = {};
15656 result.frame_number = pendingBuffers->frame_number;
15657 result.num_output_buffers = streamBuffers.size();
15658 result.output_buffers = &streamBuffers[0];
15659
15660 // Send out result with buffer errors.
15661 orchestrateResult(&result);
15662
15663 // Remove pending buffers.
15664 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15665 }
15666
15667 // Remove pending request.
15668 auto halRequest = mPendingRequestsList.begin();
15669 while (halRequest != mPendingRequestsList.end()) {
15670 if (halRequest->frame_number == failedResult->requestId) {
15671 mPendingRequestsList.erase(halRequest);
15672 break;
15673 }
15674 halRequest++;
15675 }
15676
15677 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015678}
15679
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015680
15681ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15682 mParent(parent) {}
15683
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015684void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015685{
15686 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015687
15688 if (isReprocess) {
15689 mReprocessShutters.emplace(frameNumber, Shutter());
15690 } else {
15691 mShutters.emplace(frameNumber, Shutter());
15692 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015693}
15694
15695void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15696{
15697 std::lock_guard<std::mutex> lock(mLock);
15698
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015699 std::map<uint32_t, Shutter> *shutters = nullptr;
15700
15701 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015702 auto shutter = mShutters.find(frameNumber);
15703 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015704 shutter = mReprocessShutters.find(frameNumber);
15705 if (shutter == mReprocessShutters.end()) {
15706 // Shutter was already sent.
15707 return;
15708 }
15709 shutters = &mReprocessShutters;
15710 } else {
15711 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015712 }
15713
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015714 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015715 shutter->second.ready = true;
15716 shutter->second.timestamp = timestamp;
15717
15718 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015719 shutter = shutters->begin();
15720 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015721 if (!shutter->second.ready) {
15722 // If this shutter is not ready, the following shutters can't be sent.
15723 break;
15724 }
15725
15726 camera3_notify_msg_t msg = {};
15727 msg.type = CAMERA3_MSG_SHUTTER;
15728 msg.message.shutter.frame_number = shutter->first;
15729 msg.message.shutter.timestamp = shutter->second.timestamp;
15730 mParent->orchestrateNotify(&msg);
15731
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015732 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015733 }
15734}
15735
15736void ShutterDispatcher::clear(uint32_t frameNumber)
15737{
15738 std::lock_guard<std::mutex> lock(mLock);
15739 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015740 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015741}
15742
15743void ShutterDispatcher::clear()
15744{
15745 std::lock_guard<std::mutex> lock(mLock);
15746
15747 // Log errors for stale shutters.
15748 for (auto &shutter : mShutters) {
15749 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15750 __FUNCTION__, shutter.first, shutter.second.ready,
15751 shutter.second.timestamp);
15752 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015753
15754 // Log errors for stale reprocess shutters.
15755 for (auto &shutter : mReprocessShutters) {
15756 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15757 __FUNCTION__, shutter.first, shutter.second.ready,
15758 shutter.second.timestamp);
15759 }
15760
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015761 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015762 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015763}
15764
15765OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15766 mParent(parent) {}
15767
15768status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15769{
15770 std::lock_guard<std::mutex> lock(mLock);
15771 mStreamBuffers.clear();
15772 if (!streamList) {
15773 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15774 return -EINVAL;
15775 }
15776
15777 // Create a "frame-number -> buffer" map for each stream.
15778 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15779 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15780 }
15781
15782 return OK;
15783}
15784
15785status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15786{
15787 std::lock_guard<std::mutex> lock(mLock);
15788
15789 // Find the "frame-number -> buffer" map for the stream.
15790 auto buffers = mStreamBuffers.find(stream);
15791 if (buffers == mStreamBuffers.end()) {
15792 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15793 return -EINVAL;
15794 }
15795
15796 // Create an unready buffer for this frame number.
15797 buffers->second.emplace(frameNumber, Buffer());
15798 return OK;
15799}
15800
15801void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15802 const camera3_stream_buffer_t &buffer)
15803{
15804 std::lock_guard<std::mutex> lock(mLock);
15805
15806 // Find the frame number -> buffer map for the stream.
15807 auto buffers = mStreamBuffers.find(buffer.stream);
15808 if (buffers == mStreamBuffers.end()) {
15809 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15810 return;
15811 }
15812
15813 // Find the unready buffer this frame number and mark it ready.
15814 auto pendingBuffer = buffers->second.find(frameNumber);
15815 if (pendingBuffer == buffers->second.end()) {
15816 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15817 return;
15818 }
15819
15820 pendingBuffer->second.ready = true;
15821 pendingBuffer->second.buffer = buffer;
15822
15823 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15824 pendingBuffer = buffers->second.begin();
15825 while (pendingBuffer != buffers->second.end()) {
15826 if (!pendingBuffer->second.ready) {
15827 // If this buffer is not ready, the following buffers can't be sent.
15828 break;
15829 }
15830
15831 camera3_capture_result_t result = {};
15832 result.frame_number = pendingBuffer->first;
15833 result.num_output_buffers = 1;
15834 result.output_buffers = &pendingBuffer->second.buffer;
15835
15836 // Send out result with buffer errors.
15837 mParent->orchestrateResult(&result);
15838
15839 pendingBuffer = buffers->second.erase(pendingBuffer);
15840 }
15841}
15842
15843void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15844{
15845 std::lock_guard<std::mutex> lock(mLock);
15846
15847 // Log errors for stale buffers.
15848 for (auto &buffers : mStreamBuffers) {
15849 for (auto &buffer : buffers.second) {
15850 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15851 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15852 }
15853 buffers.second.clear();
15854 }
15855
15856 if (clearConfiguredStreams) {
15857 mStreamBuffers.clear();
15858 }
15859}
15860
Thierry Strudel3d639192016-09-09 11:52:26 -070015861}; //end namespace qcamera