blob: 5da7e9a1502628830cdaa6e0b9fda8a2e17789b9 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070075// mm_camera has 2 partial results: 3A, and final result.
76// HDR+ requests have 3 partial results: postview, next request ready, and final result.
77#define PARTIAL_RESULT_COUNT 3
Thierry Strudel3d639192016-09-09 11:52:26 -070078#define FRAME_SKIP_DELAY 0
79
80#define MAX_VALUE_8BIT ((1<<8)-1)
81#define MAX_VALUE_10BIT ((1<<10)-1)
82#define MAX_VALUE_12BIT ((1<<12)-1)
83
84#define VIDEO_4K_WIDTH 3840
85#define VIDEO_4K_HEIGHT 2160
86
Jason Leeb9e76432017-03-10 17:14:19 -080087#define MAX_EIS_WIDTH 3840
88#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070089
90#define MAX_RAW_STREAMS 1
91#define MAX_STALLING_STREAMS 1
92#define MAX_PROCESSED_STREAMS 3
93/* Batch mode is enabled only if FPS set is equal to or greater than this */
94#define MIN_FPS_FOR_BATCH_MODE (120)
95#define PREVIEW_FPS_FOR_HFR (30)
96#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080097#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070098#define MAX_HFR_BATCH_SIZE (8)
99#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +0100101#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700148std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -0700155bool gEnableMultipleHdrplusOutputs = false; // Whether to enable multiple output from Easel HDR+.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800157// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
158bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700159
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700160std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700161
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
164 {"On", CAM_CDS_MODE_ON},
165 {"Off", CAM_CDS_MODE_OFF},
166 {"Auto",CAM_CDS_MODE_AUTO}
167};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700168const QCamera3HardwareInterface::QCameraMap<
169 camera_metadata_enum_android_video_hdr_mode_t,
170 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
171 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
172 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
173};
174
Thierry Strudel54dc9782017-02-15 12:12:10 -0800175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_binning_correction_mode_t,
177 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
178 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
179 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
180};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700181
182const QCamera3HardwareInterface::QCameraMap<
183 camera_metadata_enum_android_ir_mode_t,
184 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
185 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
186 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
187 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
188};
Thierry Strudel3d639192016-09-09 11:52:26 -0700189
190const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_effect_mode_t,
192 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
193 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
194 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
195 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
196 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
197 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
198 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
199 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
202};
203
204const QCamera3HardwareInterface::QCameraMap<
205 camera_metadata_enum_android_control_awb_mode_t,
206 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
207 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
208 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
209 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
210 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
211 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
212 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
215 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
216};
217
218const QCamera3HardwareInterface::QCameraMap<
219 camera_metadata_enum_android_control_scene_mode_t,
220 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
221 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
222 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
223 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
227 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
228 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
229 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
230 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
231 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
232 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
233 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
234 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
235 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800236 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
237 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_af_mode_t,
242 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
245 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
246 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
247 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_color_correction_aberration_mode_t,
254 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
255 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
256 CAM_COLOR_CORRECTION_ABERRATION_OFF },
257 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
258 CAM_COLOR_CORRECTION_ABERRATION_FAST },
259 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
260 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_control_ae_antibanding_mode_t,
265 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
270};
271
272const QCamera3HardwareInterface::QCameraMap<
273 camera_metadata_enum_android_control_ae_mode_t,
274 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
275 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
278 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700279 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
280 { (camera_metadata_enum_android_control_ae_mode_t)
281 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_flash_mode_t,
286 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
287 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
288 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
289 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_statistics_face_detect_mode_t,
294 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
297 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
298};
299
300const QCamera3HardwareInterface::QCameraMap<
301 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
302 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
303 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
304 CAM_FOCUS_UNCALIBRATED },
305 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
306 CAM_FOCUS_APPROXIMATE },
307 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
308 CAM_FOCUS_CALIBRATED }
309};
310
311const QCamera3HardwareInterface::QCameraMap<
312 camera_metadata_enum_android_lens_state_t,
313 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
314 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
315 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
316};
317
318const int32_t available_thumbnail_sizes[] = {0, 0,
319 176, 144,
320 240, 144,
321 256, 144,
322 240, 160,
323 256, 154,
324 240, 240,
325 320, 240};
326
327const QCamera3HardwareInterface::QCameraMap<
328 camera_metadata_enum_android_sensor_test_pattern_mode_t,
329 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
335 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
336};
337
338/* Since there is no mapping for all the options some Android enum are not listed.
339 * Also, the order in this list is important because while mapping from HAL to Android it will
340 * traverse from lower to higher index which means that for HAL values that are map to different
341 * Android values, the traverse logic will select the first one found.
342 */
343const QCamera3HardwareInterface::QCameraMap<
344 camera_metadata_enum_android_sensor_reference_illuminant1_t,
345 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
361 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
362};
363
364const QCamera3HardwareInterface::QCameraMap<
365 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
366 { 60, CAM_HFR_MODE_60FPS},
367 { 90, CAM_HFR_MODE_90FPS},
368 { 120, CAM_HFR_MODE_120FPS},
369 { 150, CAM_HFR_MODE_150FPS},
370 { 180, CAM_HFR_MODE_180FPS},
371 { 210, CAM_HFR_MODE_210FPS},
372 { 240, CAM_HFR_MODE_240FPS},
373 { 480, CAM_HFR_MODE_480FPS},
374};
375
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_instant_aec_mode_t,
378 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
379 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
381 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
382};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800383
384const QCamera3HardwareInterface::QCameraMap<
385 qcamera3_ext_exposure_meter_mode_t,
386 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
387 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
388 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
389 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
390 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
391 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
392 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
393 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
394};
395
396const QCamera3HardwareInterface::QCameraMap<
397 qcamera3_ext_iso_mode_t,
398 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
399 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
400 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
401 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
402 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
403 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
404 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
405 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
406 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
407};
408
Thierry Strudel3d639192016-09-09 11:52:26 -0700409camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
410 .initialize = QCamera3HardwareInterface::initialize,
411 .configure_streams = QCamera3HardwareInterface::configure_streams,
412 .register_stream_buffers = NULL,
413 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
414 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
415 .get_metadata_vendor_tag_ops = NULL,
416 .dump = QCamera3HardwareInterface::dump,
417 .flush = QCamera3HardwareInterface::flush,
418 .reserved = {0},
419};
420
421// initialise to some default value
422uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
423
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700424static inline void logEaselEvent(const char *tag, const char *event) {
425 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
426 struct timespec ts = {};
427 static int64_t kMsPerSec = 1000;
428 static int64_t kNsPerMs = 1000000;
429 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
430 if (res != OK) {
431 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
432 } else {
433 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
434 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
435 }
436 }
437}
438
Thierry Strudel3d639192016-09-09 11:52:26 -0700439/*===========================================================================
440 * FUNCTION : QCamera3HardwareInterface
441 *
442 * DESCRIPTION: constructor of QCamera3HardwareInterface
443 *
444 * PARAMETERS :
445 * @cameraId : camera ID
446 *
447 * RETURN : none
448 *==========================================================================*/
449QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
450 const camera_module_callbacks_t *callbacks)
451 : mCameraId(cameraId),
452 mCameraHandle(NULL),
453 mCameraInitialized(false),
454 mCallbackOps(NULL),
455 mMetadataChannel(NULL),
456 mPictureChannel(NULL),
457 mRawChannel(NULL),
458 mSupportChannel(NULL),
459 mAnalysisChannel(NULL),
460 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700461 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700462 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800463 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100464 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800465 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700466 mChannelHandle(0),
467 mFirstConfiguration(true),
468 mFlush(false),
469 mFlushPerf(false),
470 mParamHeap(NULL),
471 mParameters(NULL),
472 mPrevParameters(NULL),
473 m_bIsVideo(false),
474 m_bIs4KVideo(false),
475 m_bEisSupportedSize(false),
476 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800477 m_bEis3PropertyEnabled(false),
Binhao Lin09245482017-08-31 18:25:29 -0700478 m_bAVTimerEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700479 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700480 mShutterDispatcher(this),
481 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mMinProcessedFrameDuration(0),
483 mMinJpegFrameDuration(0),
484 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100485 mExpectedFrameDuration(0),
486 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700487 mMetaFrameCount(0U),
488 mUpdateDebugLevel(false),
489 mCallbacks(callbacks),
490 mCaptureIntent(0),
491 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800492 /* DevCamDebug metadata internal m control*/
493 mDevCamDebugMetaEnable(0),
494 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700495 mBatchSize(0),
496 mToBeQueuedVidBufs(0),
497 mHFRVideoFps(DEFAULT_VIDEO_FPS),
498 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800499 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800500 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mFirstFrameNumberInBatch(0),
502 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800503 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700504 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
505 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000506 mPDSupported(false),
507 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700508 mInstantAEC(false),
509 mResetInstantAEC(false),
510 mInstantAECSettledFrameNumber(0),
511 mAecSkipDisplayFrameBound(0),
512 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700513 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800514 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700515 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700517 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700518 mState(CLOSED),
519 mIsDeviceLinked(false),
520 mIsMainCamera(true),
521 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700522 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800523 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800524 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700525 mZslEnabled(false),
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700526 mEaselMipiStarted(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800527 mIsApInputUsedForHdrPlus(false),
528 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700529 m_bSensorHDREnabled(false),
Shuzhen Wang3569d4a2017-09-04 19:10:28 -0700530 mAfTrigger(),
531 mSceneDistance(-1)
Thierry Strudel3d639192016-09-09 11:52:26 -0700532{
533 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCommon.init(gCamCapability[cameraId]);
535 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700536#ifndef USE_HAL_3_3
537 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
538#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700539 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700540#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700541 mCameraDevice.common.close = close_camera_device;
542 mCameraDevice.ops = &mCameraOps;
543 mCameraDevice.priv = this;
544 gCamCapability[cameraId]->version = CAM_HAL_V3;
545 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
546 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
547 gCamCapability[cameraId]->min_num_pp_bufs = 3;
548
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800549 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700550
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800551 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700552 mPendingLiveRequest = 0;
553 mCurrentRequestId = -1;
554 pthread_mutex_init(&mMutex, NULL);
555
556 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
557 mDefaultMetadata[i] = NULL;
558
559 // Getting system props of different kinds
560 char prop[PROPERTY_VALUE_MAX];
561 memset(prop, 0, sizeof(prop));
562 property_get("persist.camera.raw.dump", prop, "0");
563 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800564 property_get("persist.camera.hal3.force.hdr", prop, "0");
565 mForceHdrSnapshot = atoi(prop);
566
Thierry Strudel3d639192016-09-09 11:52:26 -0700567 if (mEnableRawDump)
568 LOGD("Raw dump from Camera HAL enabled");
569
570 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
571 memset(mLdafCalib, 0, sizeof(mLdafCalib));
572
Arnd Geis082a4d72017-08-24 10:33:07 -0700573 memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
Arnd Geis8cbfc182017-09-07 14:46:41 -0700574 mEaselFwUpdated = false;
Arnd Geis082a4d72017-08-24 10:33:07 -0700575
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 memset(prop, 0, sizeof(prop));
577 property_get("persist.camera.tnr.preview", prop, "0");
578 m_bTnrPreview = (uint8_t)atoi(prop);
579
580 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800581 property_get("persist.camera.swtnr.preview", prop, "1");
582 m_bSwTnrPreview = (uint8_t)atoi(prop);
583
584 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700585 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700586 m_bTnrVideo = (uint8_t)atoi(prop);
587
588 memset(prop, 0, sizeof(prop));
589 property_get("persist.camera.avtimer.debug", prop, "0");
590 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800591 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700592
Thierry Strudel54dc9782017-02-15 12:12:10 -0800593 memset(prop, 0, sizeof(prop));
594 property_get("persist.camera.cacmode.disable", prop, "0");
595 m_cacModeDisabled = (uint8_t)atoi(prop);
596
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700597 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700598 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700599
Thierry Strudel3d639192016-09-09 11:52:26 -0700600 //Load and read GPU library.
601 lib_surface_utils = NULL;
602 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700603 mSurfaceStridePadding = CAM_PAD_TO_64;
604#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700605 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
606 if (lib_surface_utils) {
607 *(void **)&LINK_get_surface_pixel_alignment =
608 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
609 if (LINK_get_surface_pixel_alignment) {
610 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
611 }
612 dlclose(lib_surface_utils);
613 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700614#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000615 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
616 mPDSupported = (0 <= mPDIndex) ? true : false;
617
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700618 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700619}
620
621/*===========================================================================
622 * FUNCTION : ~QCamera3HardwareInterface
623 *
624 * DESCRIPTION: destructor of QCamera3HardwareInterface
625 *
626 * PARAMETERS : none
627 *
628 * RETURN : none
629 *==========================================================================*/
630QCamera3HardwareInterface::~QCamera3HardwareInterface()
631{
632 LOGD("E");
633
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800634 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700635
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800636 // Disable power hint and enable the perf lock for close camera
637 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
638 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
639
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700640 // Close HDR+ client first before destroying HAL.
641 {
642 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
643 finishHdrPlusClientOpeningLocked(l);
644 if (gHdrPlusClient != nullptr) {
645 // Disable HDR+ mode.
646 disableHdrPlusModeLocked();
647 // Disconnect Easel if it's connected.
648 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
649 gHdrPlusClient = nullptr;
650 }
651 }
652
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800653 // unlink of dualcam during close camera
654 if (mIsDeviceLinked) {
655 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
656 &m_pDualCamCmdPtr->bundle_info;
657 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
658 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
659 pthread_mutex_lock(&gCamLock);
660
661 if (mIsMainCamera == 1) {
662 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
663 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
664 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
665 // related session id should be session id of linked session
666 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
667 } else {
668 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
669 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
670 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
671 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
672 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800673 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800674 pthread_mutex_unlock(&gCamLock);
675
676 rc = mCameraHandle->ops->set_dual_cam_cmd(
677 mCameraHandle->camera_handle);
678 if (rc < 0) {
679 LOGE("Dualcam: Unlink failed, but still proceed to close");
680 }
681 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700682
683 /* We need to stop all streams before deleting any stream */
684 if (mRawDumpChannel) {
685 mRawDumpChannel->stop();
686 }
687
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700688 if (mHdrPlusRawSrcChannel) {
689 mHdrPlusRawSrcChannel->stop();
690 }
691
Thierry Strudel3d639192016-09-09 11:52:26 -0700692 // NOTE: 'camera3_stream_t *' objects are already freed at
693 // this stage by the framework
694 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
695 it != mStreamInfo.end(); it++) {
696 QCamera3ProcessingChannel *channel = (*it)->channel;
697 if (channel) {
698 channel->stop();
699 }
700 }
701 if (mSupportChannel)
702 mSupportChannel->stop();
703
704 if (mAnalysisChannel) {
705 mAnalysisChannel->stop();
706 }
707 if (mMetadataChannel) {
708 mMetadataChannel->stop();
709 }
710 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700711 stopChannelLocked(/*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700712 }
713
714 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
715 it != mStreamInfo.end(); it++) {
716 QCamera3ProcessingChannel *channel = (*it)->channel;
717 if (channel)
718 delete channel;
719 free (*it);
720 }
721 if (mSupportChannel) {
722 delete mSupportChannel;
723 mSupportChannel = NULL;
724 }
725
726 if (mAnalysisChannel) {
727 delete mAnalysisChannel;
728 mAnalysisChannel = NULL;
729 }
730 if (mRawDumpChannel) {
731 delete mRawDumpChannel;
732 mRawDumpChannel = NULL;
733 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700734 if (mHdrPlusRawSrcChannel) {
735 delete mHdrPlusRawSrcChannel;
736 mHdrPlusRawSrcChannel = NULL;
737 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700738 if (mDummyBatchChannel) {
739 delete mDummyBatchChannel;
740 mDummyBatchChannel = NULL;
741 }
742
743 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800744 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700745
746 if (mMetadataChannel) {
747 delete mMetadataChannel;
748 mMetadataChannel = NULL;
749 }
750
751 /* Clean up all channels */
752 if (mCameraInitialized) {
753 if(!mFirstConfiguration){
754 //send the last unconfigure
755 cam_stream_size_info_t stream_config_info;
756 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
757 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
758 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800759 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700760 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700761 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
763 stream_config_info);
764 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
765 if (rc < 0) {
766 LOGE("set_parms failed for unconfigure");
767 }
768 }
769 deinitParameters();
770 }
771
772 if (mChannelHandle) {
773 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
774 mChannelHandle);
775 LOGH("deleting channel %d", mChannelHandle);
776 mChannelHandle = 0;
777 }
778
779 if (mState != CLOSED)
780 closeCamera();
781
782 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
783 req.mPendingBufferList.clear();
784 }
785 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700786 for (pendingRequestIterator i = mPendingRequestsList.begin();
787 i != mPendingRequestsList.end();) {
788 i = erasePendingRequest(i);
789 }
790 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
791 if (mDefaultMetadata[i])
792 free_camera_metadata(mDefaultMetadata[i]);
793
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800794 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700795
796 pthread_cond_destroy(&mRequestCond);
797
798 pthread_cond_destroy(&mBuffersCond);
799
800 pthread_mutex_destroy(&mMutex);
801 LOGD("X");
802}
803
804/*===========================================================================
805 * FUNCTION : erasePendingRequest
806 *
807 * DESCRIPTION: function to erase a desired pending request after freeing any
808 * allocated memory
809 *
810 * PARAMETERS :
811 * @i : iterator pointing to pending request to be erased
812 *
813 * RETURN : iterator pointing to the next request
814 *==========================================================================*/
815QCamera3HardwareInterface::pendingRequestIterator
816 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
817{
818 if (i->input_buffer != NULL) {
819 free(i->input_buffer);
820 i->input_buffer = NULL;
821 }
822 if (i->settings != NULL)
823 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100824
825 mExpectedInflightDuration -= i->expectedFrameDuration;
826 if (mExpectedInflightDuration < 0) {
827 LOGE("Negative expected in-flight duration!");
828 mExpectedInflightDuration = 0;
829 }
830
Thierry Strudel3d639192016-09-09 11:52:26 -0700831 return mPendingRequestsList.erase(i);
832}
833
834/*===========================================================================
835 * FUNCTION : camEvtHandle
836 *
837 * DESCRIPTION: Function registered to mm-camera-interface to handle events
838 *
839 * PARAMETERS :
840 * @camera_handle : interface layer camera handle
841 * @evt : ptr to event
842 * @user_data : user data ptr
843 *
844 * RETURN : none
845 *==========================================================================*/
846void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
847 mm_camera_event_t *evt,
848 void *user_data)
849{
850 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
851 if (obj && evt) {
852 switch(evt->server_event_type) {
853 case CAM_EVENT_TYPE_DAEMON_DIED:
854 pthread_mutex_lock(&obj->mMutex);
855 obj->mState = ERROR;
856 pthread_mutex_unlock(&obj->mMutex);
857 LOGE("Fatal, camera daemon died");
858 break;
859
860 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
861 LOGD("HAL got request pull from Daemon");
862 pthread_mutex_lock(&obj->mMutex);
863 obj->mWokenUpByDaemon = true;
864 obj->unblockRequestIfNecessary();
865 pthread_mutex_unlock(&obj->mMutex);
866 break;
867
868 default:
869 LOGW("Warning: Unhandled event %d",
870 evt->server_event_type);
871 break;
872 }
873 } else {
874 LOGE("NULL user_data/evt");
875 }
876}
877
878/*===========================================================================
879 * FUNCTION : openCamera
880 *
881 * DESCRIPTION: open camera
882 *
883 * PARAMETERS :
884 * @hw_device : double ptr for camera device struct
885 *
886 * RETURN : int32_t type of status
887 * NO_ERROR -- success
888 * none-zero failure code
889 *==========================================================================*/
890int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
891{
892 int rc = 0;
893 if (mState != CLOSED) {
894 *hw_device = NULL;
895 return PERMISSION_DENIED;
896 }
897
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700898 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800899 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700900 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
901 mCameraId);
902
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700903 if (mCameraHandle) {
904 LOGE("Failure: Camera already opened");
905 return ALREADY_EXISTS;
906 }
907
908 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700909 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700910 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700911 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700912 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700913 if (rc != 0) {
914 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
915 return rc;
916 }
Arnd Geis8cbfc182017-09-07 14:46:41 -0700917 mEaselFwUpdated = false;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700918 }
919 }
920
Thierry Strudel3d639192016-09-09 11:52:26 -0700921 rc = openCamera();
922 if (rc == 0) {
923 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800924 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700925 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700926
927 // Suspend Easel because opening camera failed.
928 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700929 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700930 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
931 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700932 if (suspendErr != 0) {
933 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
934 strerror(-suspendErr), suspendErr);
935 }
936 }
937 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800938 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700939
Thierry Strudel3d639192016-09-09 11:52:26 -0700940 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
941 mCameraId, rc);
942
943 if (rc == NO_ERROR) {
944 mState = OPENED;
945 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800946
Thierry Strudel3d639192016-09-09 11:52:26 -0700947 return rc;
948}
949
950/*===========================================================================
951 * FUNCTION : openCamera
952 *
953 * DESCRIPTION: open camera
954 *
955 * PARAMETERS : none
956 *
957 * RETURN : int32_t type of status
958 * NO_ERROR -- success
959 * none-zero failure code
960 *==========================================================================*/
961int QCamera3HardwareInterface::openCamera()
962{
963 int rc = 0;
964 char value[PROPERTY_VALUE_MAX];
965
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800966 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800967
Thierry Strudel3d639192016-09-09 11:52:26 -0700968 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
969 if (rc < 0) {
970 LOGE("Failed to reserve flash for camera id: %d",
971 mCameraId);
972 return UNKNOWN_ERROR;
973 }
974
975 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
976 if (rc) {
977 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
978 return rc;
979 }
980
981 if (!mCameraHandle) {
982 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
983 return -ENODEV;
984 }
985
986 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
987 camEvtHandle, (void *)this);
988
989 if (rc < 0) {
990 LOGE("Error, failed to register event callback");
991 /* Not closing camera here since it is already handled in destructor */
992 return FAILED_TRANSACTION;
993 }
994
995 mExifParams.debug_params =
996 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
997 if (mExifParams.debug_params) {
998 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
999 } else {
1000 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
1001 return NO_MEMORY;
1002 }
1003 mFirstConfiguration = true;
1004
1005 //Notify display HAL that a camera session is active.
1006 //But avoid calling the same during bootup because camera service might open/close
1007 //cameras at boot time during its initialization and display service will also internally
1008 //wait for camera service to initialize first while calling this display API, resulting in a
1009 //deadlock situation. Since boot time camera open/close calls are made only to fetch
1010 //capabilities, no need of this display bw optimization.
1011 //Use "service.bootanim.exit" property to know boot status.
1012 property_get("service.bootanim.exit", value, "0");
1013 if (atoi(value) == 1) {
1014 pthread_mutex_lock(&gCamLock);
1015 if (gNumCameraSessions++ == 0) {
1016 setCameraLaunchStatus(true);
1017 }
1018 pthread_mutex_unlock(&gCamLock);
1019 }
1020
1021 //fill the session id needed while linking dual cam
1022 pthread_mutex_lock(&gCamLock);
1023 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1024 &sessionId[mCameraId]);
1025 pthread_mutex_unlock(&gCamLock);
1026
1027 if (rc < 0) {
1028 LOGE("Error, failed to get sessiion id");
1029 return UNKNOWN_ERROR;
1030 } else {
1031 //Allocate related cam sync buffer
1032 //this is needed for the payload that goes along with bundling cmd for related
1033 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001034 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1035 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001036 if(rc != OK) {
1037 rc = NO_MEMORY;
1038 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1039 return NO_MEMORY;
1040 }
1041
1042 //Map memory for related cam sync buffer
1043 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001044 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1045 m_pDualCamCmdHeap->getFd(0),
1046 sizeof(cam_dual_camera_cmd_info_t),
1047 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001048 if(rc < 0) {
1049 LOGE("Dualcam: failed to map Related cam sync buffer");
1050 rc = FAILED_TRANSACTION;
1051 return NO_MEMORY;
1052 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001053 m_pDualCamCmdPtr =
1054 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001055 }
1056
1057 LOGH("mCameraId=%d",mCameraId);
1058
1059 return NO_ERROR;
1060}
1061
1062/*===========================================================================
1063 * FUNCTION : closeCamera
1064 *
1065 * DESCRIPTION: close camera
1066 *
1067 * PARAMETERS : none
1068 *
1069 * RETURN : int32_t type of status
1070 * NO_ERROR -- success
1071 * none-zero failure code
1072 *==========================================================================*/
1073int QCamera3HardwareInterface::closeCamera()
1074{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001075 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001076 int rc = NO_ERROR;
1077 char value[PROPERTY_VALUE_MAX];
1078
1079 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1080 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001081
1082 // unmap memory for related cam sync buffer
1083 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001084 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001085 if (NULL != m_pDualCamCmdHeap) {
1086 m_pDualCamCmdHeap->deallocate();
1087 delete m_pDualCamCmdHeap;
1088 m_pDualCamCmdHeap = NULL;
1089 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001090 }
1091
Thierry Strudel3d639192016-09-09 11:52:26 -07001092 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1093 mCameraHandle = NULL;
1094
1095 //reset session id to some invalid id
1096 pthread_mutex_lock(&gCamLock);
1097 sessionId[mCameraId] = 0xDEADBEEF;
1098 pthread_mutex_unlock(&gCamLock);
1099
1100 //Notify display HAL that there is no active camera session
1101 //but avoid calling the same during bootup. Refer to openCamera
1102 //for more details.
1103 property_get("service.bootanim.exit", value, "0");
1104 if (atoi(value) == 1) {
1105 pthread_mutex_lock(&gCamLock);
1106 if (--gNumCameraSessions == 0) {
1107 setCameraLaunchStatus(false);
1108 }
1109 pthread_mutex_unlock(&gCamLock);
1110 }
1111
Thierry Strudel3d639192016-09-09 11:52:26 -07001112 if (mExifParams.debug_params) {
1113 free(mExifParams.debug_params);
1114 mExifParams.debug_params = NULL;
1115 }
1116 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1117 LOGW("Failed to release flash for camera id: %d",
1118 mCameraId);
1119 }
1120 mState = CLOSED;
1121 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1122 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001123
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001124 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001125 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001126 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001127 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001128 if (rc != 0) {
1129 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1130 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001131 }
1132 }
1133
Thierry Strudel3d639192016-09-09 11:52:26 -07001134 return rc;
1135}
1136
1137/*===========================================================================
1138 * FUNCTION : initialize
1139 *
1140 * DESCRIPTION: Initialize frameworks callback functions
1141 *
1142 * PARAMETERS :
1143 * @callback_ops : callback function to frameworks
1144 *
1145 * RETURN :
1146 *
1147 *==========================================================================*/
1148int QCamera3HardwareInterface::initialize(
1149 const struct camera3_callback_ops *callback_ops)
1150{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001151 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001152 int rc;
1153
1154 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1155 pthread_mutex_lock(&mMutex);
1156
1157 // Validate current state
1158 switch (mState) {
1159 case OPENED:
1160 /* valid state */
1161 break;
1162 default:
1163 LOGE("Invalid state %d", mState);
1164 rc = -ENODEV;
1165 goto err1;
1166 }
1167
1168 rc = initParameters();
1169 if (rc < 0) {
1170 LOGE("initParamters failed %d", rc);
1171 goto err1;
1172 }
1173 mCallbackOps = callback_ops;
1174
1175 mChannelHandle = mCameraHandle->ops->add_channel(
1176 mCameraHandle->camera_handle, NULL, NULL, this);
1177 if (mChannelHandle == 0) {
1178 LOGE("add_channel failed");
1179 rc = -ENOMEM;
1180 pthread_mutex_unlock(&mMutex);
1181 return rc;
1182 }
1183
1184 pthread_mutex_unlock(&mMutex);
1185 mCameraInitialized = true;
1186 mState = INITIALIZED;
1187 LOGI("X");
1188 return 0;
1189
1190err1:
1191 pthread_mutex_unlock(&mMutex);
1192 return rc;
1193}
1194
1195/*===========================================================================
1196 * FUNCTION : validateStreamDimensions
1197 *
1198 * DESCRIPTION: Check if the configuration requested are those advertised
1199 *
1200 * PARAMETERS :
1201 * @stream_list : streams to be configured
1202 *
1203 * RETURN :
1204 *
1205 *==========================================================================*/
1206int QCamera3HardwareInterface::validateStreamDimensions(
1207 camera3_stream_configuration_t *streamList)
1208{
1209 int rc = NO_ERROR;
1210 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001211 uint32_t depthWidth = 0;
1212 uint32_t depthHeight = 0;
1213 if (mPDSupported) {
1214 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1215 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1216 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001217
1218 camera3_stream_t *inputStream = NULL;
1219 /*
1220 * Loop through all streams to find input stream if it exists*
1221 */
1222 for (size_t i = 0; i< streamList->num_streams; i++) {
1223 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1224 if (inputStream != NULL) {
1225 LOGE("Error, Multiple input streams requested");
1226 return -EINVAL;
1227 }
1228 inputStream = streamList->streams[i];
1229 }
1230 }
1231 /*
1232 * Loop through all streams requested in configuration
1233 * Check if unsupported sizes have been requested on any of them
1234 */
1235 for (size_t j = 0; j < streamList->num_streams; j++) {
1236 bool sizeFound = false;
1237 camera3_stream_t *newStream = streamList->streams[j];
1238
1239 uint32_t rotatedHeight = newStream->height;
1240 uint32_t rotatedWidth = newStream->width;
1241 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1242 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1243 rotatedHeight = newStream->width;
1244 rotatedWidth = newStream->height;
1245 }
1246
1247 /*
1248 * Sizes are different for each type of stream format check against
1249 * appropriate table.
1250 */
1251 switch (newStream->format) {
1252 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1253 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1254 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001255 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1256 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1257 mPDSupported) {
1258 if ((depthWidth == newStream->width) &&
1259 (depthHeight == newStream->height)) {
1260 sizeFound = true;
1261 }
1262 break;
1263 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001264 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1265 for (size_t i = 0; i < count; i++) {
1266 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1267 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1268 sizeFound = true;
1269 break;
1270 }
1271 }
1272 break;
1273 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001274 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1275 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001276 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001277 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001278 if ((depthSamplesCount == newStream->width) &&
1279 (1 == newStream->height)) {
1280 sizeFound = true;
1281 }
1282 break;
1283 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001284 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1285 /* Verify set size against generated sizes table */
1286 for (size_t i = 0; i < count; i++) {
1287 if (((int32_t)rotatedWidth ==
1288 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1289 ((int32_t)rotatedHeight ==
1290 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1291 sizeFound = true;
1292 break;
1293 }
1294 }
1295 break;
1296 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1297 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1298 default:
1299 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1300 || newStream->stream_type == CAMERA3_STREAM_INPUT
1301 || IS_USAGE_ZSL(newStream->usage)) {
1302 if (((int32_t)rotatedWidth ==
1303 gCamCapability[mCameraId]->active_array_size.width) &&
1304 ((int32_t)rotatedHeight ==
1305 gCamCapability[mCameraId]->active_array_size.height)) {
1306 sizeFound = true;
1307 break;
1308 }
1309 /* We could potentially break here to enforce ZSL stream
1310 * set from frameworks always is full active array size
1311 * but it is not clear from the spc if framework will always
1312 * follow that, also we have logic to override to full array
1313 * size, so keeping the logic lenient at the moment
1314 */
1315 }
1316 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1317 MAX_SIZES_CNT);
1318 for (size_t i = 0; i < count; i++) {
1319 if (((int32_t)rotatedWidth ==
1320 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1321 ((int32_t)rotatedHeight ==
1322 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1323 sizeFound = true;
1324 break;
1325 }
1326 }
1327 break;
1328 } /* End of switch(newStream->format) */
1329
1330 /* We error out even if a single stream has unsupported size set */
1331 if (!sizeFound) {
1332 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1333 rotatedWidth, rotatedHeight, newStream->format,
1334 gCamCapability[mCameraId]->active_array_size.width,
1335 gCamCapability[mCameraId]->active_array_size.height);
1336 rc = -EINVAL;
1337 break;
1338 }
1339 } /* End of for each stream */
1340 return rc;
1341}
1342
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001343/*===========================================================================
1344 * FUNCTION : validateUsageFlags
1345 *
1346 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1347 *
1348 * PARAMETERS :
1349 * @stream_list : streams to be configured
1350 *
1351 * RETURN :
1352 * NO_ERROR if the usage flags are supported
1353 * error code if usage flags are not supported
1354 *
1355 *==========================================================================*/
1356int QCamera3HardwareInterface::validateUsageFlags(
1357 const camera3_stream_configuration_t* streamList)
1358{
1359 for (size_t j = 0; j < streamList->num_streams; j++) {
1360 const camera3_stream_t *newStream = streamList->streams[j];
1361
1362 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1363 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1364 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1365 continue;
1366 }
1367
Jason Leec4cf5032017-05-24 18:31:41 -07001368 // Here we only care whether it's EIS3 or not
1369 char is_type_value[PROPERTY_VALUE_MAX];
1370 property_get("persist.camera.is_type", is_type_value, "4");
1371 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1372 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1373 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1374 isType = IS_TYPE_NONE;
1375
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001376 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1377 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1378 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1379 bool forcePreviewUBWC = true;
1380 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1381 forcePreviewUBWC = false;
1382 }
1383 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001384 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001385 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001386 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001387 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001388 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001389
1390 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1391 // So color spaces will always match.
1392
1393 // Check whether underlying formats of shared streams match.
1394 if (isVideo && isPreview && videoFormat != previewFormat) {
1395 LOGE("Combined video and preview usage flag is not supported");
1396 return -EINVAL;
1397 }
1398 if (isPreview && isZSL && previewFormat != zslFormat) {
1399 LOGE("Combined preview and zsl usage flag is not supported");
1400 return -EINVAL;
1401 }
1402 if (isVideo && isZSL && videoFormat != zslFormat) {
1403 LOGE("Combined video and zsl usage flag is not supported");
1404 return -EINVAL;
1405 }
1406 }
1407 return NO_ERROR;
1408}
1409
1410/*===========================================================================
1411 * FUNCTION : validateUsageFlagsForEis
1412 *
1413 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1414 *
1415 * PARAMETERS :
1416 * @stream_list : streams to be configured
1417 *
1418 * RETURN :
1419 * NO_ERROR if the usage flags are supported
1420 * error code if usage flags are not supported
1421 *
1422 *==========================================================================*/
1423int QCamera3HardwareInterface::validateUsageFlagsForEis(
1424 const camera3_stream_configuration_t* streamList)
1425{
1426 for (size_t j = 0; j < streamList->num_streams; j++) {
1427 const camera3_stream_t *newStream = streamList->streams[j];
1428
1429 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1430 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1431
1432 // Because EIS is "hard-coded" for certain use case, and current
1433 // implementation doesn't support shared preview and video on the same
1434 // stream, return failure if EIS is forced on.
1435 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1436 LOGE("Combined video and preview usage flag is not supported due to EIS");
1437 return -EINVAL;
1438 }
1439 }
1440 return NO_ERROR;
1441}
1442
Thierry Strudel3d639192016-09-09 11:52:26 -07001443/*==============================================================================
1444 * FUNCTION : isSupportChannelNeeded
1445 *
1446 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1447 *
1448 * PARAMETERS :
1449 * @stream_list : streams to be configured
1450 * @stream_config_info : the config info for streams to be configured
1451 *
1452 * RETURN : Boolen true/false decision
1453 *
1454 *==========================================================================*/
1455bool QCamera3HardwareInterface::isSupportChannelNeeded(
1456 camera3_stream_configuration_t *streamList,
1457 cam_stream_size_info_t stream_config_info)
1458{
1459 uint32_t i;
1460 bool pprocRequested = false;
1461 /* Check for conditions where PProc pipeline does not have any streams*/
1462 for (i = 0; i < stream_config_info.num_streams; i++) {
1463 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1464 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1465 pprocRequested = true;
1466 break;
1467 }
1468 }
1469
1470 if (pprocRequested == false )
1471 return true;
1472
1473 /* Dummy stream needed if only raw or jpeg streams present */
1474 for (i = 0; i < streamList->num_streams; i++) {
1475 switch(streamList->streams[i]->format) {
1476 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1477 case HAL_PIXEL_FORMAT_RAW10:
1478 case HAL_PIXEL_FORMAT_RAW16:
1479 case HAL_PIXEL_FORMAT_BLOB:
1480 break;
1481 default:
1482 return false;
1483 }
1484 }
1485 return true;
1486}
1487
1488/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001489 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001490 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001491 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001492 *
1493 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001494 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001495 *
1496 * RETURN : int32_t type of status
1497 * NO_ERROR -- success
1498 * none-zero failure code
1499 *
1500 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001501int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001502{
1503 int32_t rc = NO_ERROR;
1504
1505 cam_dimension_t max_dim = {0, 0};
1506 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1507 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1508 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1509 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1510 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1511 }
1512
1513 clear_metadata_buffer(mParameters);
1514
1515 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1516 max_dim);
1517 if (rc != NO_ERROR) {
1518 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1519 return rc;
1520 }
1521
1522 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1523 if (rc != NO_ERROR) {
1524 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1525 return rc;
1526 }
1527
1528 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001529 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001530
1531 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1532 mParameters);
1533 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001534 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001535 return rc;
1536 }
1537
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001538 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001539 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1540 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1541 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1542 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1543 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001544
1545 return rc;
1546}
1547
1548/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001549 * FUNCTION : getCurrentSensorModeInfo
1550 *
1551 * DESCRIPTION: Get sensor mode information that is currently selected.
1552 *
1553 * PARAMETERS :
1554 * @sensorModeInfo : sensor mode information (output)
1555 *
1556 * RETURN : int32_t type of status
1557 * NO_ERROR -- success
1558 * none-zero failure code
1559 *
1560 *==========================================================================*/
1561int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1562{
1563 int32_t rc = NO_ERROR;
1564
1565 clear_metadata_buffer(mParameters);
1566 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1567
1568 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1569 mParameters);
1570 if (rc != NO_ERROR) {
1571 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1572 return rc;
1573 }
1574
1575 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1576 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1577 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1578 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1579 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1580 sensorModeInfo.num_raw_bits);
1581
1582 return rc;
1583}
1584
1585/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001586 * FUNCTION : addToPPFeatureMask
1587 *
1588 * DESCRIPTION: add additional features to pp feature mask based on
1589 * stream type and usecase
1590 *
1591 * PARAMETERS :
1592 * @stream_format : stream type for feature mask
1593 * @stream_idx : stream idx within postprocess_mask list to change
1594 *
1595 * RETURN : NULL
1596 *
1597 *==========================================================================*/
1598void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1599 uint32_t stream_idx)
1600{
1601 char feature_mask_value[PROPERTY_VALUE_MAX];
1602 cam_feature_mask_t feature_mask;
1603 int args_converted;
1604 int property_len;
1605
1606 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001607#ifdef _LE_CAMERA_
1608 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1609 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1610 property_len = property_get("persist.camera.hal3.feature",
1611 feature_mask_value, swtnr_feature_mask_value);
1612#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001613 property_len = property_get("persist.camera.hal3.feature",
1614 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001615#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001616 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1617 (feature_mask_value[1] == 'x')) {
1618 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1619 } else {
1620 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1621 }
1622 if (1 != args_converted) {
1623 feature_mask = 0;
1624 LOGE("Wrong feature mask %s", feature_mask_value);
1625 return;
1626 }
1627
1628 switch (stream_format) {
1629 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1630 /* Add LLVD to pp feature mask only if video hint is enabled */
1631 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1632 mStreamConfigInfo.postprocess_mask[stream_idx]
1633 |= CAM_QTI_FEATURE_SW_TNR;
1634 LOGH("Added SW TNR to pp feature mask");
1635 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1636 mStreamConfigInfo.postprocess_mask[stream_idx]
1637 |= CAM_QCOM_FEATURE_LLVD;
1638 LOGH("Added LLVD SeeMore to pp feature mask");
1639 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001640 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1641 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1642 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1643 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001644 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1645 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1646 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1647 CAM_QTI_FEATURE_BINNING_CORRECTION;
1648 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001649 break;
1650 }
1651 default:
1652 break;
1653 }
1654 LOGD("PP feature mask %llx",
1655 mStreamConfigInfo.postprocess_mask[stream_idx]);
1656}
1657
1658/*==============================================================================
1659 * FUNCTION : updateFpsInPreviewBuffer
1660 *
1661 * DESCRIPTION: update FPS information in preview buffer.
1662 *
1663 * PARAMETERS :
1664 * @metadata : pointer to metadata buffer
1665 * @frame_number: frame_number to look for in pending buffer list
1666 *
1667 * RETURN : None
1668 *
1669 *==========================================================================*/
1670void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1671 uint32_t frame_number)
1672{
1673 // Mark all pending buffers for this particular request
1674 // with corresponding framerate information
1675 for (List<PendingBuffersInRequest>::iterator req =
1676 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1677 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1678 for(List<PendingBufferInfo>::iterator j =
1679 req->mPendingBufferList.begin();
1680 j != req->mPendingBufferList.end(); j++) {
1681 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1682 if ((req->frame_number == frame_number) &&
1683 (channel->getStreamTypeMask() &
1684 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1685 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1686 CAM_INTF_PARM_FPS_RANGE, metadata) {
1687 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1688 struct private_handle_t *priv_handle =
1689 (struct private_handle_t *)(*(j->buffer));
1690 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1691 }
1692 }
1693 }
1694 }
1695}
1696
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001697/*==============================================================================
1698 * FUNCTION : updateTimeStampInPendingBuffers
1699 *
1700 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1701 * of a frame number
1702 *
1703 * PARAMETERS :
1704 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1705 * @timestamp : timestamp to be set
1706 *
1707 * RETURN : None
1708 *
1709 *==========================================================================*/
1710void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1711 uint32_t frameNumber, nsecs_t timestamp)
1712{
1713 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1714 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
Binhao Lin09245482017-08-31 18:25:29 -07001715 // WAR: save the av_timestamp to the next frame
1716 if(req->frame_number == frameNumber + 1) {
1717 req->av_timestamp = timestamp;
1718 }
1719
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001720 if (req->frame_number != frameNumber)
1721 continue;
1722
1723 for (auto k = req->mPendingBufferList.begin();
1724 k != req->mPendingBufferList.end(); k++ ) {
Binhao Lin09245482017-08-31 18:25:29 -07001725 // WAR: update timestamp when it's not VT usecase
1726 QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
1727 if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
1728 m_bAVTimerEnabled)) {
1729 struct private_handle_t *priv_handle =
1730 (struct private_handle_t *) (*(k->buffer));
1731 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1732 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001733 }
1734 }
1735 return;
1736}
1737
Thierry Strudel3d639192016-09-09 11:52:26 -07001738/*===========================================================================
1739 * FUNCTION : configureStreams
1740 *
1741 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1742 * and output streams.
1743 *
1744 * PARAMETERS :
1745 * @stream_list : streams to be configured
1746 *
1747 * RETURN :
1748 *
1749 *==========================================================================*/
1750int QCamera3HardwareInterface::configureStreams(
1751 camera3_stream_configuration_t *streamList)
1752{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001753 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001754 int rc = 0;
1755
1756 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001757 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001758 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001759 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001760
1761 return rc;
1762}
1763
1764/*===========================================================================
1765 * FUNCTION : configureStreamsPerfLocked
1766 *
1767 * DESCRIPTION: configureStreams while perfLock is held.
1768 *
1769 * PARAMETERS :
1770 * @stream_list : streams to be configured
1771 *
1772 * RETURN : int32_t type of status
1773 * NO_ERROR -- success
1774 * none-zero failure code
1775 *==========================================================================*/
1776int QCamera3HardwareInterface::configureStreamsPerfLocked(
1777 camera3_stream_configuration_t *streamList)
1778{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001779 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001780 int rc = 0;
1781
1782 // Sanity check stream_list
1783 if (streamList == NULL) {
1784 LOGE("NULL stream configuration");
1785 return BAD_VALUE;
1786 }
1787 if (streamList->streams == NULL) {
1788 LOGE("NULL stream list");
1789 return BAD_VALUE;
1790 }
1791
1792 if (streamList->num_streams < 1) {
1793 LOGE("Bad number of streams requested: %d",
1794 streamList->num_streams);
1795 return BAD_VALUE;
1796 }
1797
1798 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1799 LOGE("Maximum number of streams %d exceeded: %d",
1800 MAX_NUM_STREAMS, streamList->num_streams);
1801 return BAD_VALUE;
1802 }
1803
Jason Leec4cf5032017-05-24 18:31:41 -07001804 mOpMode = streamList->operation_mode;
1805 LOGD("mOpMode: %d", mOpMode);
1806
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001807 rc = validateUsageFlags(streamList);
1808 if (rc != NO_ERROR) {
1809 return rc;
1810 }
1811
Chien-Yu Chen11c8edc2017-09-11 20:54:24 -07001812 // Disable HDR+ if it's enabled;
Chien-Yu Chen153c5172017-09-08 11:33:19 -07001813 {
1814 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1815 finishHdrPlusClientOpeningLocked(l);
1816 disableHdrPlusModeLocked();
1817 }
1818
Thierry Strudel3d639192016-09-09 11:52:26 -07001819 /* first invalidate all the steams in the mStreamList
1820 * if they appear again, they will be validated */
1821 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1822 it != mStreamInfo.end(); it++) {
1823 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1824 if (channel) {
1825 channel->stop();
1826 }
1827 (*it)->status = INVALID;
1828 }
1829
1830 if (mRawDumpChannel) {
1831 mRawDumpChannel->stop();
1832 delete mRawDumpChannel;
1833 mRawDumpChannel = NULL;
1834 }
1835
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001836 if (mHdrPlusRawSrcChannel) {
1837 mHdrPlusRawSrcChannel->stop();
1838 delete mHdrPlusRawSrcChannel;
1839 mHdrPlusRawSrcChannel = NULL;
1840 }
1841
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 if (mSupportChannel)
1843 mSupportChannel->stop();
1844
1845 if (mAnalysisChannel) {
1846 mAnalysisChannel->stop();
1847 }
1848 if (mMetadataChannel) {
1849 /* If content of mStreamInfo is not 0, there is metadata stream */
1850 mMetadataChannel->stop();
1851 }
1852 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07001853 stopChannelLocked(/*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001854 }
1855
1856 pthread_mutex_lock(&mMutex);
1857
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001858 mPictureChannel = NULL;
1859
Thierry Strudel3d639192016-09-09 11:52:26 -07001860 // Check state
1861 switch (mState) {
1862 case INITIALIZED:
1863 case CONFIGURED:
1864 case STARTED:
1865 /* valid state */
1866 break;
1867 default:
1868 LOGE("Invalid state %d", mState);
1869 pthread_mutex_unlock(&mMutex);
1870 return -ENODEV;
1871 }
1872
1873 /* Check whether we have video stream */
1874 m_bIs4KVideo = false;
1875 m_bIsVideo = false;
1876 m_bEisSupportedSize = false;
1877 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001878 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001879 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001880 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001881 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001882 uint32_t videoWidth = 0U;
1883 uint32_t videoHeight = 0U;
1884 size_t rawStreamCnt = 0;
1885 size_t stallStreamCnt = 0;
1886 size_t processedStreamCnt = 0;
1887 // Number of streams on ISP encoder path
1888 size_t numStreamsOnEncoder = 0;
1889 size_t numYuv888OnEncoder = 0;
1890 bool bYuv888OverrideJpeg = false;
1891 cam_dimension_t largeYuv888Size = {0, 0};
1892 cam_dimension_t maxViewfinderSize = {0, 0};
1893 bool bJpegExceeds4K = false;
1894 bool bJpegOnEncoder = false;
1895 bool bUseCommonFeatureMask = false;
1896 cam_feature_mask_t commonFeatureMask = 0;
1897 bool bSmallJpegSize = false;
1898 uint32_t width_ratio;
1899 uint32_t height_ratio;
1900 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1901 camera3_stream_t *inputStream = NULL;
1902 bool isJpeg = false;
1903 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001904 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001905 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001906
1907 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1908
1909 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001910 uint8_t eis_prop_set;
1911 uint32_t maxEisWidth = 0;
1912 uint32_t maxEisHeight = 0;
1913
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001914 // Initialize all instant AEC related variables
1915 mInstantAEC = false;
1916 mResetInstantAEC = false;
1917 mInstantAECSettledFrameNumber = 0;
1918 mAecSkipDisplayFrameBound = 0;
1919 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001920 mCurrFeatureState = 0;
1921 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001922
Binhao Lin09245482017-08-31 18:25:29 -07001923 m_bAVTimerEnabled = false;
1924
Thierry Strudel3d639192016-09-09 11:52:26 -07001925 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1926
1927 size_t count = IS_TYPE_MAX;
1928 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1929 for (size_t i = 0; i < count; i++) {
1930 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001931 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1932 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 break;
1934 }
1935 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001936
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001937 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001938 maxEisWidth = MAX_EIS_WIDTH;
1939 maxEisHeight = MAX_EIS_HEIGHT;
1940 }
1941
1942 /* EIS setprop control */
1943 char eis_prop[PROPERTY_VALUE_MAX];
1944 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001945 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001946 eis_prop_set = (uint8_t)atoi(eis_prop);
1947
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001948 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001949 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1950
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001951 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1952 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001953
Thierry Strudel3d639192016-09-09 11:52:26 -07001954 /* stream configurations */
1955 for (size_t i = 0; i < streamList->num_streams; i++) {
1956 camera3_stream_t *newStream = streamList->streams[i];
1957 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1958 "height = %d, rotation = %d, usage = 0x%x",
1959 i, newStream->stream_type, newStream->format,
1960 newStream->width, newStream->height, newStream->rotation,
1961 newStream->usage);
1962 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1963 newStream->stream_type == CAMERA3_STREAM_INPUT){
1964 isZsl = true;
1965 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001966 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1967 IS_USAGE_PREVIEW(newStream->usage)) {
1968 isPreview = true;
1969 }
1970
Thierry Strudel3d639192016-09-09 11:52:26 -07001971 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1972 inputStream = newStream;
1973 }
1974
Emilian Peev7650c122017-01-19 08:24:33 -08001975 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1976 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001977 isJpeg = true;
1978 jpegSize.width = newStream->width;
1979 jpegSize.height = newStream->height;
1980 if (newStream->width > VIDEO_4K_WIDTH ||
1981 newStream->height > VIDEO_4K_HEIGHT)
1982 bJpegExceeds4K = true;
1983 }
1984
1985 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1986 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1987 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001988 // In HAL3 we can have multiple different video streams.
1989 // The variables video width and height are used below as
1990 // dimensions of the biggest of them
1991 if (videoWidth < newStream->width ||
1992 videoHeight < newStream->height) {
1993 videoWidth = newStream->width;
1994 videoHeight = newStream->height;
1995 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001996 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1997 (VIDEO_4K_HEIGHT <= newStream->height)) {
1998 m_bIs4KVideo = true;
1999 }
2000 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
2001 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002002
Thierry Strudel3d639192016-09-09 11:52:26 -07002003 }
2004 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
2005 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
2006 switch (newStream->format) {
2007 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002008 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2009 depthPresent = true;
2010 break;
2011 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002012 stallStreamCnt++;
2013 if (isOnEncoder(maxViewfinderSize, newStream->width,
2014 newStream->height)) {
2015 numStreamsOnEncoder++;
2016 bJpegOnEncoder = true;
2017 }
2018 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
2019 newStream->width);
2020 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2021 newStream->height);;
2022 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2023 "FATAL: max_downscale_factor cannot be zero and so assert");
2024 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2025 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2026 LOGH("Setting small jpeg size flag to true");
2027 bSmallJpegSize = true;
2028 }
2029 break;
2030 case HAL_PIXEL_FORMAT_RAW10:
2031 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2032 case HAL_PIXEL_FORMAT_RAW16:
2033 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002034 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2035 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2036 pdStatCount++;
2037 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002038 break;
2039 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2040 processedStreamCnt++;
2041 if (isOnEncoder(maxViewfinderSize, newStream->width,
2042 newStream->height)) {
2043 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2044 !IS_USAGE_ZSL(newStream->usage)) {
2045 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2046 }
2047 numStreamsOnEncoder++;
2048 }
2049 break;
2050 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2051 processedStreamCnt++;
2052 if (isOnEncoder(maxViewfinderSize, newStream->width,
2053 newStream->height)) {
2054 // If Yuv888 size is not greater than 4K, set feature mask
2055 // to SUPERSET so that it support concurrent request on
2056 // YUV and JPEG.
2057 if (newStream->width <= VIDEO_4K_WIDTH &&
2058 newStream->height <= VIDEO_4K_HEIGHT) {
2059 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2060 }
2061 numStreamsOnEncoder++;
2062 numYuv888OnEncoder++;
2063 largeYuv888Size.width = newStream->width;
2064 largeYuv888Size.height = newStream->height;
2065 }
2066 break;
2067 default:
2068 processedStreamCnt++;
2069 if (isOnEncoder(maxViewfinderSize, newStream->width,
2070 newStream->height)) {
2071 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2072 numStreamsOnEncoder++;
2073 }
2074 break;
2075 }
2076
2077 }
2078 }
2079
2080 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2081 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2082 !m_bIsVideo) {
2083 m_bEisEnable = false;
2084 }
2085
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002086 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2087 pthread_mutex_unlock(&mMutex);
2088 return -EINVAL;
2089 }
2090
Thierry Strudel54dc9782017-02-15 12:12:10 -08002091 uint8_t forceEnableTnr = 0;
2092 char tnr_prop[PROPERTY_VALUE_MAX];
2093 memset(tnr_prop, 0, sizeof(tnr_prop));
2094 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2095 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2096
Thierry Strudel3d639192016-09-09 11:52:26 -07002097 /* Logic to enable/disable TNR based on specific config size/etc.*/
2098 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002099 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2100 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002101 else if (forceEnableTnr)
2102 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002103
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002104 char videoHdrProp[PROPERTY_VALUE_MAX];
2105 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2106 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2107 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2108
2109 if (hdr_mode_prop == 1 && m_bIsVideo &&
2110 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2111 m_bVideoHdrEnabled = true;
2112 else
2113 m_bVideoHdrEnabled = false;
2114
2115
Thierry Strudel3d639192016-09-09 11:52:26 -07002116 /* Check if num_streams is sane */
2117 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2118 rawStreamCnt > MAX_RAW_STREAMS ||
2119 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2120 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2121 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2122 pthread_mutex_unlock(&mMutex);
2123 return -EINVAL;
2124 }
2125 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002126 if (isZsl && m_bIs4KVideo) {
2127 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 }
2131 /* Check if stream sizes are sane */
2132 if (numStreamsOnEncoder > 2) {
2133 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2134 pthread_mutex_unlock(&mMutex);
2135 return -EINVAL;
2136 } else if (1 < numStreamsOnEncoder){
2137 bUseCommonFeatureMask = true;
2138 LOGH("Multiple streams above max viewfinder size, common mask needed");
2139 }
2140
2141 /* Check if BLOB size is greater than 4k in 4k recording case */
2142 if (m_bIs4KVideo && bJpegExceeds4K) {
2143 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2144 pthread_mutex_unlock(&mMutex);
2145 return -EINVAL;
2146 }
2147
Emilian Peev7650c122017-01-19 08:24:33 -08002148 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2149 depthPresent) {
2150 LOGE("HAL doesn't support depth streams in HFR mode!");
2151 pthread_mutex_unlock(&mMutex);
2152 return -EINVAL;
2153 }
2154
Thierry Strudel3d639192016-09-09 11:52:26 -07002155 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2156 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2157 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2158 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2159 // configurations:
2160 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2161 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2162 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2163 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2164 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2165 __func__);
2166 pthread_mutex_unlock(&mMutex);
2167 return -EINVAL;
2168 }
2169
2170 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2171 // the YUV stream's size is greater or equal to the JPEG size, set common
2172 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2173 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2174 jpegSize.width, jpegSize.height) &&
2175 largeYuv888Size.width > jpegSize.width &&
2176 largeYuv888Size.height > jpegSize.height) {
2177 bYuv888OverrideJpeg = true;
2178 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2179 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2180 }
2181
2182 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2183 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2184 commonFeatureMask);
2185 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2186 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2187
2188 rc = validateStreamDimensions(streamList);
2189 if (rc == NO_ERROR) {
2190 rc = validateStreamRotations(streamList);
2191 }
2192 if (rc != NO_ERROR) {
2193 LOGE("Invalid stream configuration requested!");
2194 pthread_mutex_unlock(&mMutex);
2195 return rc;
2196 }
2197
Emilian Peev0f3c3162017-03-15 12:57:46 +00002198 if (1 < pdStatCount) {
2199 LOGE("HAL doesn't support multiple PD streams");
2200 pthread_mutex_unlock(&mMutex);
2201 return -EINVAL;
2202 }
2203
2204 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2205 (1 == pdStatCount)) {
2206 LOGE("HAL doesn't support PD streams in HFR mode!");
2207 pthread_mutex_unlock(&mMutex);
2208 return -EINVAL;
2209 }
2210
Thierry Strudel3d639192016-09-09 11:52:26 -07002211 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2212 for (size_t i = 0; i < streamList->num_streams; i++) {
2213 camera3_stream_t *newStream = streamList->streams[i];
2214 LOGH("newStream type = %d, stream format = %d "
2215 "stream size : %d x %d, stream rotation = %d",
2216 newStream->stream_type, newStream->format,
2217 newStream->width, newStream->height, newStream->rotation);
2218 //if the stream is in the mStreamList validate it
2219 bool stream_exists = false;
2220 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2221 it != mStreamInfo.end(); it++) {
2222 if ((*it)->stream == newStream) {
2223 QCamera3ProcessingChannel *channel =
2224 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2225 stream_exists = true;
2226 if (channel)
2227 delete channel;
2228 (*it)->status = VALID;
2229 (*it)->stream->priv = NULL;
2230 (*it)->channel = NULL;
2231 }
2232 }
2233 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2234 //new stream
2235 stream_info_t* stream_info;
2236 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2237 if (!stream_info) {
2238 LOGE("Could not allocate stream info");
2239 rc = -ENOMEM;
2240 pthread_mutex_unlock(&mMutex);
2241 return rc;
2242 }
2243 stream_info->stream = newStream;
2244 stream_info->status = VALID;
2245 stream_info->channel = NULL;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07002246 stream_info->id = i;
Thierry Strudel3d639192016-09-09 11:52:26 -07002247 mStreamInfo.push_back(stream_info);
2248 }
2249 /* Covers Opaque ZSL and API1 F/W ZSL */
2250 if (IS_USAGE_ZSL(newStream->usage)
2251 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2252 if (zslStream != NULL) {
2253 LOGE("Multiple input/reprocess streams requested!");
2254 pthread_mutex_unlock(&mMutex);
2255 return BAD_VALUE;
2256 }
2257 zslStream = newStream;
2258 }
2259 /* Covers YUV reprocess */
2260 if (inputStream != NULL) {
2261 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2262 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2263 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2264 && inputStream->width == newStream->width
2265 && inputStream->height == newStream->height) {
2266 if (zslStream != NULL) {
2267 /* This scenario indicates multiple YUV streams with same size
2268 * as input stream have been requested, since zsl stream handle
2269 * is solely use for the purpose of overriding the size of streams
2270 * which share h/w streams we will just make a guess here as to
2271 * which of the stream is a ZSL stream, this will be refactored
2272 * once we make generic logic for streams sharing encoder output
2273 */
2274 LOGH("Warning, Multiple ip/reprocess streams requested!");
2275 }
2276 zslStream = newStream;
2277 }
2278 }
2279 }
2280
2281 /* If a zsl stream is set, we know that we have configured at least one input or
2282 bidirectional stream */
2283 if (NULL != zslStream) {
2284 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2285 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2286 mInputStreamInfo.format = zslStream->format;
2287 mInputStreamInfo.usage = zslStream->usage;
2288 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2289 mInputStreamInfo.dim.width,
2290 mInputStreamInfo.dim.height,
2291 mInputStreamInfo.format, mInputStreamInfo.usage);
2292 }
2293
2294 cleanAndSortStreamInfo();
2295 if (mMetadataChannel) {
2296 delete mMetadataChannel;
2297 mMetadataChannel = NULL;
2298 }
2299 if (mSupportChannel) {
2300 delete mSupportChannel;
2301 mSupportChannel = NULL;
2302 }
2303
2304 if (mAnalysisChannel) {
2305 delete mAnalysisChannel;
2306 mAnalysisChannel = NULL;
2307 }
2308
2309 if (mDummyBatchChannel) {
2310 delete mDummyBatchChannel;
2311 mDummyBatchChannel = NULL;
2312 }
2313
Emilian Peev7650c122017-01-19 08:24:33 -08002314 if (mDepthChannel) {
2315 mDepthChannel = NULL;
2316 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002317 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002318
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002319 mShutterDispatcher.clear();
2320 mOutputBufferDispatcher.clear();
2321
Thierry Strudel2896d122017-02-23 19:18:03 -08002322 char is_type_value[PROPERTY_VALUE_MAX];
2323 property_get("persist.camera.is_type", is_type_value, "4");
2324 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2325
Binhao Line406f062017-05-03 14:39:44 -07002326 char property_value[PROPERTY_VALUE_MAX];
2327 property_get("persist.camera.gzoom.at", property_value, "0");
2328 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002329 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2330 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2331 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2332 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002333
2334 property_get("persist.camera.gzoom.4k", property_value, "0");
2335 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2336
Thierry Strudel3d639192016-09-09 11:52:26 -07002337 //Create metadata channel and initialize it
2338 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2339 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2340 gCamCapability[mCameraId]->color_arrangement);
2341 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2342 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002343 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002344 if (mMetadataChannel == NULL) {
2345 LOGE("failed to allocate metadata channel");
2346 rc = -ENOMEM;
2347 pthread_mutex_unlock(&mMutex);
2348 return rc;
2349 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002350 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002351 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2352 if (rc < 0) {
2353 LOGE("metadata channel initialization failed");
2354 delete mMetadataChannel;
2355 mMetadataChannel = NULL;
2356 pthread_mutex_unlock(&mMutex);
2357 return rc;
2358 }
2359
Thierry Strudel2896d122017-02-23 19:18:03 -08002360 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002361 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002362 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002363 // Keep track of preview/video streams indices.
2364 // There could be more than one preview streams, but only one video stream.
2365 int32_t video_stream_idx = -1;
2366 int32_t preview_stream_idx[streamList->num_streams];
2367 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002368 bool previewTnr[streamList->num_streams];
2369 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2370 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2371 // Loop through once to determine preview TNR conditions before creating channels.
2372 for (size_t i = 0; i < streamList->num_streams; i++) {
2373 camera3_stream_t *newStream = streamList->streams[i];
2374 uint32_t stream_usage = newStream->usage;
2375 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2376 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2377 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2378 video_stream_idx = (int32_t)i;
2379 else
2380 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2381 }
2382 }
2383 // By default, preview stream TNR is disabled.
2384 // Enable TNR to the preview stream if all conditions below are satisfied:
2385 // 1. preview resolution == video resolution.
2386 // 2. video stream TNR is enabled.
2387 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2388 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2389 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2390 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2391 if (m_bTnrEnabled && m_bTnrVideo &&
2392 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2393 video_stream->width == preview_stream->width &&
2394 video_stream->height == preview_stream->height) {
2395 previewTnr[preview_stream_idx[i]] = true;
2396 }
2397 }
2398
Thierry Strudel3d639192016-09-09 11:52:26 -07002399 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2400 /* Allocate channel objects for the requested streams */
2401 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002402
Thierry Strudel3d639192016-09-09 11:52:26 -07002403 camera3_stream_t *newStream = streamList->streams[i];
2404 uint32_t stream_usage = newStream->usage;
2405 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2406 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2407 struct camera_info *p_info = NULL;
2408 pthread_mutex_lock(&gCamLock);
2409 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2410 pthread_mutex_unlock(&gCamLock);
2411 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2412 || IS_USAGE_ZSL(newStream->usage)) &&
2413 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002414 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002415 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002416 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2417 if (bUseCommonFeatureMask)
2418 zsl_ppmask = commonFeatureMask;
2419 else
2420 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002421 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002422 if (numStreamsOnEncoder > 0)
2423 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2424 else
2425 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002426 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002427 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002428 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002429 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002430 LOGH("Input stream configured, reprocess config");
2431 } else {
2432 //for non zsl streams find out the format
2433 switch (newStream->format) {
2434 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2435 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002436 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2438 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2439 /* add additional features to pp feature mask */
2440 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2441 mStreamConfigInfo.num_streams);
2442
2443 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2444 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2445 CAM_STREAM_TYPE_VIDEO;
2446 if (m_bTnrEnabled && m_bTnrVideo) {
2447 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2448 CAM_QCOM_FEATURE_CPP_TNR;
2449 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2450 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2451 ~CAM_QCOM_FEATURE_CDS;
2452 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002453 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2454 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2455 CAM_QTI_FEATURE_PPEISCORE;
2456 }
Binhao Line406f062017-05-03 14:39:44 -07002457 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2458 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2459 CAM_QCOM_FEATURE_GOOG_ZOOM;
2460 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002461 } else {
2462 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2463 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002464 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002465 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2466 CAM_QCOM_FEATURE_CPP_TNR;
2467 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2468 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2469 ~CAM_QCOM_FEATURE_CDS;
2470 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002471 if(!m_bSwTnrPreview) {
2472 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2473 ~CAM_QTI_FEATURE_SW_TNR;
2474 }
Binhao Line406f062017-05-03 14:39:44 -07002475 if (is_goog_zoom_preview_enabled) {
2476 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2477 CAM_QCOM_FEATURE_GOOG_ZOOM;
2478 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002479 padding_info.width_padding = mSurfaceStridePadding;
2480 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002481 previewSize.width = (int32_t)newStream->width;
2482 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002483 }
2484 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2485 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2486 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2487 newStream->height;
2488 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2489 newStream->width;
2490 }
2491 }
2492 break;
2493 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002494 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002495 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2496 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2497 if (bUseCommonFeatureMask)
2498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2499 commonFeatureMask;
2500 else
2501 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2502 CAM_QCOM_FEATURE_NONE;
2503 } else {
2504 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2505 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2506 }
2507 break;
2508 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002509 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002510 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2511 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2512 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2513 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2514 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002515 /* Remove rotation if it is not supported
2516 for 4K LiveVideo snapshot case (online processing) */
2517 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2518 CAM_QCOM_FEATURE_ROTATION)) {
2519 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2520 &= ~CAM_QCOM_FEATURE_ROTATION;
2521 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002522 } else {
2523 if (bUseCommonFeatureMask &&
2524 isOnEncoder(maxViewfinderSize, newStream->width,
2525 newStream->height)) {
2526 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2527 } else {
2528 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2529 }
2530 }
2531 if (isZsl) {
2532 if (zslStream) {
2533 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2534 (int32_t)zslStream->width;
2535 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2536 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002537 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2538 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002539 } else {
2540 LOGE("Error, No ZSL stream identified");
2541 pthread_mutex_unlock(&mMutex);
2542 return -EINVAL;
2543 }
2544 } else if (m_bIs4KVideo) {
2545 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2546 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2547 } else if (bYuv888OverrideJpeg) {
2548 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2549 (int32_t)largeYuv888Size.width;
2550 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2551 (int32_t)largeYuv888Size.height;
2552 }
2553 break;
2554 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2555 case HAL_PIXEL_FORMAT_RAW16:
2556 case HAL_PIXEL_FORMAT_RAW10:
2557 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2558 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2559 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002560 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2561 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2562 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2563 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2564 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2565 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2566 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2567 gCamCapability[mCameraId]->dt[mPDIndex];
2568 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2569 gCamCapability[mCameraId]->vc[mPDIndex];
2570 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002571 break;
2572 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002573 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002574 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2575 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2576 break;
2577 }
2578 }
2579
2580 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2581 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2582 gCamCapability[mCameraId]->color_arrangement);
2583
2584 if (newStream->priv == NULL) {
2585 //New stream, construct channel
2586 switch (newStream->stream_type) {
2587 case CAMERA3_STREAM_INPUT:
2588 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2589 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2590 break;
2591 case CAMERA3_STREAM_BIDIRECTIONAL:
2592 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2593 GRALLOC_USAGE_HW_CAMERA_WRITE;
2594 break;
2595 case CAMERA3_STREAM_OUTPUT:
2596 /* For video encoding stream, set read/write rarely
2597 * flag so that they may be set to un-cached */
2598 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2599 newStream->usage |=
2600 (GRALLOC_USAGE_SW_READ_RARELY |
2601 GRALLOC_USAGE_SW_WRITE_RARELY |
2602 GRALLOC_USAGE_HW_CAMERA_WRITE);
2603 else if (IS_USAGE_ZSL(newStream->usage))
2604 {
2605 LOGD("ZSL usage flag skipping");
2606 }
2607 else if (newStream == zslStream
2608 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2609 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2610 } else
2611 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2612 break;
2613 default:
2614 LOGE("Invalid stream_type %d", newStream->stream_type);
2615 break;
2616 }
2617
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002618 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002619 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2620 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2621 QCamera3ProcessingChannel *channel = NULL;
2622 switch (newStream->format) {
2623 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2624 if ((newStream->usage &
2625 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2626 (streamList->operation_mode ==
2627 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2628 ) {
2629 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2630 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002631 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002632 this,
2633 newStream,
2634 (cam_stream_type_t)
2635 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2636 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2637 mMetadataChannel,
2638 0); //heap buffers are not required for HFR video channel
2639 if (channel == NULL) {
2640 LOGE("allocation of channel failed");
2641 pthread_mutex_unlock(&mMutex);
2642 return -ENOMEM;
2643 }
2644 //channel->getNumBuffers() will return 0 here so use
2645 //MAX_INFLIGH_HFR_REQUESTS
2646 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2647 newStream->priv = channel;
2648 LOGI("num video buffers in HFR mode: %d",
2649 MAX_INFLIGHT_HFR_REQUESTS);
2650 } else {
2651 /* Copy stream contents in HFR preview only case to create
2652 * dummy batch channel so that sensor streaming is in
2653 * HFR mode */
2654 if (!m_bIsVideo && (streamList->operation_mode ==
2655 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2656 mDummyBatchStream = *newStream;
2657 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002658 int bufferCount = MAX_INFLIGHT_REQUESTS;
2659 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2660 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002661 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2662 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2663 bufferCount = m_bIs4KVideo ?
2664 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2665 }
2666
Thierry Strudel2896d122017-02-23 19:18:03 -08002667 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002668 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2669 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002670 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002671 this,
2672 newStream,
2673 (cam_stream_type_t)
2674 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2675 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2676 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002677 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002678 if (channel == NULL) {
2679 LOGE("allocation of channel failed");
2680 pthread_mutex_unlock(&mMutex);
2681 return -ENOMEM;
2682 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002683 /* disable UBWC for preview, though supported,
2684 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002685 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002686 (previewSize.width == (int32_t)videoWidth)&&
2687 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002688 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002689 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002690 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002691 /* When goog_zoom is linked to the preview or video stream,
2692 * disable ubwc to the linked stream */
2693 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2694 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2695 channel->setUBWCEnabled(false);
2696 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002697 newStream->max_buffers = channel->getNumBuffers();
2698 newStream->priv = channel;
2699 }
2700 break;
2701 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2702 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2703 mChannelHandle,
2704 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002705 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 this,
2707 newStream,
2708 (cam_stream_type_t)
2709 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2710 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2711 mMetadataChannel);
2712 if (channel == NULL) {
2713 LOGE("allocation of YUV channel failed");
2714 pthread_mutex_unlock(&mMutex);
2715 return -ENOMEM;
2716 }
2717 newStream->max_buffers = channel->getNumBuffers();
2718 newStream->priv = channel;
2719 break;
2720 }
2721 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2722 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002723 case HAL_PIXEL_FORMAT_RAW10: {
2724 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2725 (HAL_DATASPACE_DEPTH != newStream->data_space))
2726 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002727 mRawChannel = new QCamera3RawChannel(
2728 mCameraHandle->camera_handle, mChannelHandle,
2729 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002730 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002731 this, newStream,
2732 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002733 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002734 if (mRawChannel == NULL) {
2735 LOGE("allocation of raw channel failed");
2736 pthread_mutex_unlock(&mMutex);
2737 return -ENOMEM;
2738 }
2739 newStream->max_buffers = mRawChannel->getNumBuffers();
2740 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2741 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002742 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002743 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002744 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2745 mDepthChannel = new QCamera3DepthChannel(
2746 mCameraHandle->camera_handle, mChannelHandle,
2747 mCameraHandle->ops, NULL, NULL, &padding_info,
2748 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2749 mMetadataChannel);
2750 if (NULL == mDepthChannel) {
2751 LOGE("Allocation of depth channel failed");
2752 pthread_mutex_unlock(&mMutex);
2753 return NO_MEMORY;
2754 }
2755 newStream->priv = mDepthChannel;
2756 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2757 } else {
2758 // Max live snapshot inflight buffer is 1. This is to mitigate
2759 // frame drop issues for video snapshot. The more buffers being
2760 // allocated, the more frame drops there are.
2761 mPictureChannel = new QCamera3PicChannel(
2762 mCameraHandle->camera_handle, mChannelHandle,
2763 mCameraHandle->ops, captureResultCb,
2764 setBufferErrorStatus, &padding_info, this, newStream,
2765 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2766 m_bIs4KVideo, isZsl, mMetadataChannel,
2767 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2768 if (mPictureChannel == NULL) {
2769 LOGE("allocation of channel failed");
2770 pthread_mutex_unlock(&mMutex);
2771 return -ENOMEM;
2772 }
2773 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2774 newStream->max_buffers = mPictureChannel->getNumBuffers();
2775 mPictureChannel->overrideYuvSize(
2776 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2777 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002778 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002779 break;
2780
2781 default:
2782 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002783 pthread_mutex_unlock(&mMutex);
2784 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002785 }
2786 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2787 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2788 } else {
2789 LOGE("Error, Unknown stream type");
2790 pthread_mutex_unlock(&mMutex);
2791 return -EINVAL;
2792 }
2793
2794 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002795 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002796 // Here we only care whether it's EIS3 or not
2797 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2798 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2799 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2800 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002801 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002802 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002803 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002804 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2805 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2806 }
2807 }
2808
2809 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2810 it != mStreamInfo.end(); it++) {
2811 if ((*it)->stream == newStream) {
2812 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2813 break;
2814 }
2815 }
2816 } else {
2817 // Channel already exists for this stream
2818 // Do nothing for now
2819 }
2820 padding_info = gCamCapability[mCameraId]->padding_info;
2821
Emilian Peev7650c122017-01-19 08:24:33 -08002822 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002823 * since there is no real stream associated with it
2824 */
Emilian Peev7650c122017-01-19 08:24:33 -08002825 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002826 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2827 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002828 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002829 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002830 }
2831
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002832 // Let buffer dispatcher know the configured streams.
2833 mOutputBufferDispatcher.configureStreams(streamList);
2834
Thierry Strudel2896d122017-02-23 19:18:03 -08002835 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2836 onlyRaw = false;
2837 }
2838
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002839 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002840 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002841 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002842 cam_analysis_info_t analysisInfo;
2843 int32_t ret = NO_ERROR;
2844 ret = mCommon.getAnalysisInfo(
2845 FALSE,
2846 analysisFeatureMask,
2847 &analysisInfo);
2848 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002849 cam_color_filter_arrangement_t analysis_color_arrangement =
2850 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2851 CAM_FILTER_ARRANGEMENT_Y :
2852 gCamCapability[mCameraId]->color_arrangement);
2853 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2854 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002855 cam_dimension_t analysisDim;
2856 analysisDim = mCommon.getMatchingDimension(previewSize,
2857 analysisInfo.analysis_recommended_res);
2858
2859 mAnalysisChannel = new QCamera3SupportChannel(
2860 mCameraHandle->camera_handle,
2861 mChannelHandle,
2862 mCameraHandle->ops,
2863 &analysisInfo.analysis_padding_info,
2864 analysisFeatureMask,
2865 CAM_STREAM_TYPE_ANALYSIS,
2866 &analysisDim,
2867 (analysisInfo.analysis_format
2868 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2869 : CAM_FORMAT_YUV_420_NV21),
2870 analysisInfo.hw_analysis_supported,
2871 gCamCapability[mCameraId]->color_arrangement,
2872 this,
2873 0); // force buffer count to 0
2874 } else {
2875 LOGW("getAnalysisInfo failed, ret = %d", ret);
2876 }
2877 if (!mAnalysisChannel) {
2878 LOGW("Analysis channel cannot be created");
2879 }
2880 }
2881
Thierry Strudel3d639192016-09-09 11:52:26 -07002882 //RAW DUMP channel
2883 if (mEnableRawDump && isRawStreamRequested == false){
2884 cam_dimension_t rawDumpSize;
2885 rawDumpSize = getMaxRawSize(mCameraId);
2886 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2887 setPAAFSupport(rawDumpFeatureMask,
2888 CAM_STREAM_TYPE_RAW,
2889 gCamCapability[mCameraId]->color_arrangement);
2890 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2891 mChannelHandle,
2892 mCameraHandle->ops,
2893 rawDumpSize,
2894 &padding_info,
2895 this, rawDumpFeatureMask);
2896 if (!mRawDumpChannel) {
2897 LOGE("Raw Dump channel cannot be created");
2898 pthread_mutex_unlock(&mMutex);
2899 return -ENOMEM;
2900 }
2901 }
2902
Thierry Strudel3d639192016-09-09 11:52:26 -07002903 if (mAnalysisChannel) {
2904 cam_analysis_info_t analysisInfo;
2905 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2906 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2907 CAM_STREAM_TYPE_ANALYSIS;
2908 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2909 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002910 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002911 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2912 &analysisInfo);
2913 if (rc != NO_ERROR) {
2914 LOGE("getAnalysisInfo failed, ret = %d", rc);
2915 pthread_mutex_unlock(&mMutex);
2916 return rc;
2917 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002918 cam_color_filter_arrangement_t analysis_color_arrangement =
2919 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2920 CAM_FILTER_ARRANGEMENT_Y :
2921 gCamCapability[mCameraId]->color_arrangement);
2922 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2923 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2924 analysis_color_arrangement);
2925
Thierry Strudel3d639192016-09-09 11:52:26 -07002926 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002927 mCommon.getMatchingDimension(previewSize,
2928 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002929 mStreamConfigInfo.num_streams++;
2930 }
2931
Thierry Strudel2896d122017-02-23 19:18:03 -08002932 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002933 cam_analysis_info_t supportInfo;
2934 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2935 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2936 setPAAFSupport(callbackFeatureMask,
2937 CAM_STREAM_TYPE_CALLBACK,
2938 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002939 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002940 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002941 if (ret != NO_ERROR) {
2942 /* Ignore the error for Mono camera
2943 * because the PAAF bit mask is only set
2944 * for CAM_STREAM_TYPE_ANALYSIS stream type
2945 */
2946 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2947 LOGW("getAnalysisInfo failed, ret = %d", ret);
2948 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002949 }
2950 mSupportChannel = new QCamera3SupportChannel(
2951 mCameraHandle->camera_handle,
2952 mChannelHandle,
2953 mCameraHandle->ops,
2954 &gCamCapability[mCameraId]->padding_info,
2955 callbackFeatureMask,
2956 CAM_STREAM_TYPE_CALLBACK,
2957 &QCamera3SupportChannel::kDim,
2958 CAM_FORMAT_YUV_420_NV21,
2959 supportInfo.hw_analysis_supported,
2960 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002961 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002962 if (!mSupportChannel) {
2963 LOGE("dummy channel cannot be created");
2964 pthread_mutex_unlock(&mMutex);
2965 return -ENOMEM;
2966 }
2967 }
2968
2969 if (mSupportChannel) {
2970 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2971 QCamera3SupportChannel::kDim;
2972 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2973 CAM_STREAM_TYPE_CALLBACK;
2974 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2975 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2976 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2977 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2978 gCamCapability[mCameraId]->color_arrangement);
2979 mStreamConfigInfo.num_streams++;
2980 }
2981
2982 if (mRawDumpChannel) {
2983 cam_dimension_t rawSize;
2984 rawSize = getMaxRawSize(mCameraId);
2985 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2986 rawSize;
2987 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2988 CAM_STREAM_TYPE_RAW;
2989 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2990 CAM_QCOM_FEATURE_NONE;
2991 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2992 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2993 gCamCapability[mCameraId]->color_arrangement);
2994 mStreamConfigInfo.num_streams++;
2995 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002996
2997 if (mHdrPlusRawSrcChannel) {
2998 cam_dimension_t rawSize;
2999 rawSize = getMaxRawSize(mCameraId);
3000 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
3001 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
3002 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
3003 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3004 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3005 gCamCapability[mCameraId]->color_arrangement);
3006 mStreamConfigInfo.num_streams++;
3007 }
3008
Thierry Strudel3d639192016-09-09 11:52:26 -07003009 /* In HFR mode, if video stream is not added, create a dummy channel so that
3010 * ISP can create a batch mode even for preview only case. This channel is
3011 * never 'start'ed (no stream-on), it is only 'initialized' */
3012 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
3013 !m_bIsVideo) {
3014 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3015 setPAAFSupport(dummyFeatureMask,
3016 CAM_STREAM_TYPE_VIDEO,
3017 gCamCapability[mCameraId]->color_arrangement);
3018 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
3019 mChannelHandle,
3020 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003021 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07003022 this,
3023 &mDummyBatchStream,
3024 CAM_STREAM_TYPE_VIDEO,
3025 dummyFeatureMask,
3026 mMetadataChannel);
3027 if (NULL == mDummyBatchChannel) {
3028 LOGE("creation of mDummyBatchChannel failed."
3029 "Preview will use non-hfr sensor mode ");
3030 }
3031 }
3032 if (mDummyBatchChannel) {
3033 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3034 mDummyBatchStream.width;
3035 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3036 mDummyBatchStream.height;
3037 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3038 CAM_STREAM_TYPE_VIDEO;
3039 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3040 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3041 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3042 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3043 gCamCapability[mCameraId]->color_arrangement);
3044 mStreamConfigInfo.num_streams++;
3045 }
3046
3047 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3048 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003049 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003050 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003051
3052 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3053 for (pendingRequestIterator i = mPendingRequestsList.begin();
3054 i != mPendingRequestsList.end();) {
3055 i = erasePendingRequest(i);
3056 }
3057 mPendingFrameDropList.clear();
3058 // Initialize/Reset the pending buffers list
3059 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3060 req.mPendingBufferList.clear();
3061 }
3062 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003063 mExpectedInflightDuration = 0;
3064 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003065
Thierry Strudel3d639192016-09-09 11:52:26 -07003066 mCurJpegMeta.clear();
3067 //Get min frame duration for this streams configuration
3068 deriveMinFrameDuration();
3069
Chien-Yu Chenee335912017-02-09 17:53:20 -08003070 mFirstPreviewIntentSeen = false;
3071
Thierry Strudel3d639192016-09-09 11:52:26 -07003072 // Update state
3073 mState = CONFIGURED;
3074
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003075 mFirstMetadataCallback = true;
3076
Thierry Strudel3d639192016-09-09 11:52:26 -07003077 pthread_mutex_unlock(&mMutex);
3078
3079 return rc;
3080}
3081
3082/*===========================================================================
3083 * FUNCTION : validateCaptureRequest
3084 *
3085 * DESCRIPTION: validate a capture request from camera service
3086 *
3087 * PARAMETERS :
3088 * @request : request from framework to process
3089 *
3090 * RETURN :
3091 *
3092 *==========================================================================*/
3093int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003094 camera3_capture_request_t *request,
3095 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003096{
3097 ssize_t idx = 0;
3098 const camera3_stream_buffer_t *b;
3099 CameraMetadata meta;
3100
3101 /* Sanity check the request */
3102 if (request == NULL) {
3103 LOGE("NULL capture request");
3104 return BAD_VALUE;
3105 }
3106
3107 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3108 /*settings cannot be null for the first request*/
3109 return BAD_VALUE;
3110 }
3111
3112 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003113 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3114 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003115 LOGE("Request %d: No output buffers provided!",
3116 __FUNCTION__, frameNumber);
3117 return BAD_VALUE;
3118 }
3119 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3120 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3121 request->num_output_buffers, MAX_NUM_STREAMS);
3122 return BAD_VALUE;
3123 }
3124 if (request->input_buffer != NULL) {
3125 b = request->input_buffer;
3126 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3127 LOGE("Request %d: Buffer %ld: Status not OK!",
3128 frameNumber, (long)idx);
3129 return BAD_VALUE;
3130 }
3131 if (b->release_fence != -1) {
3132 LOGE("Request %d: Buffer %ld: Has a release fence!",
3133 frameNumber, (long)idx);
3134 return BAD_VALUE;
3135 }
3136 if (b->buffer == NULL) {
3137 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3138 frameNumber, (long)idx);
3139 return BAD_VALUE;
3140 }
3141 }
3142
3143 // Validate all buffers
3144 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003145 if (b == NULL) {
3146 return BAD_VALUE;
3147 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003148 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003149 QCamera3ProcessingChannel *channel =
3150 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3151 if (channel == NULL) {
3152 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3153 frameNumber, (long)idx);
3154 return BAD_VALUE;
3155 }
3156 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3157 LOGE("Request %d: Buffer %ld: Status not OK!",
3158 frameNumber, (long)idx);
3159 return BAD_VALUE;
3160 }
3161 if (b->release_fence != -1) {
3162 LOGE("Request %d: Buffer %ld: Has a release fence!",
3163 frameNumber, (long)idx);
3164 return BAD_VALUE;
3165 }
3166 if (b->buffer == NULL) {
3167 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3168 frameNumber, (long)idx);
3169 return BAD_VALUE;
3170 }
3171 if (*(b->buffer) == NULL) {
3172 LOGE("Request %d: Buffer %ld: NULL private handle!",
3173 frameNumber, (long)idx);
3174 return BAD_VALUE;
3175 }
3176 idx++;
3177 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003178 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003179 return NO_ERROR;
3180}
3181
3182/*===========================================================================
3183 * FUNCTION : deriveMinFrameDuration
3184 *
3185 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3186 * on currently configured streams.
3187 *
3188 * PARAMETERS : NONE
3189 *
3190 * RETURN : NONE
3191 *
3192 *==========================================================================*/
3193void QCamera3HardwareInterface::deriveMinFrameDuration()
3194{
3195 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003196 bool hasRaw = false;
3197
3198 mMinRawFrameDuration = 0;
3199 mMinJpegFrameDuration = 0;
3200 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003201
3202 maxJpegDim = 0;
3203 maxProcessedDim = 0;
3204 maxRawDim = 0;
3205
3206 // Figure out maximum jpeg, processed, and raw dimensions
3207 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3208 it != mStreamInfo.end(); it++) {
3209
3210 // Input stream doesn't have valid stream_type
3211 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3212 continue;
3213
3214 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3215 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3216 if (dimension > maxJpegDim)
3217 maxJpegDim = dimension;
3218 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3219 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3220 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003221 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003222 if (dimension > maxRawDim)
3223 maxRawDim = dimension;
3224 } else {
3225 if (dimension > maxProcessedDim)
3226 maxProcessedDim = dimension;
3227 }
3228 }
3229
3230 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3231 MAX_SIZES_CNT);
3232
3233 //Assume all jpeg dimensions are in processed dimensions.
3234 if (maxJpegDim > maxProcessedDim)
3235 maxProcessedDim = maxJpegDim;
3236 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003237 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003238 maxRawDim = INT32_MAX;
3239
3240 for (size_t i = 0; i < count; i++) {
3241 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3242 gCamCapability[mCameraId]->raw_dim[i].height;
3243 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3244 maxRawDim = dimension;
3245 }
3246 }
3247
3248 //Find minimum durations for processed, jpeg, and raw
3249 for (size_t i = 0; i < count; i++) {
3250 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3251 gCamCapability[mCameraId]->raw_dim[i].height) {
3252 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3253 break;
3254 }
3255 }
3256 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3257 for (size_t i = 0; i < count; i++) {
3258 if (maxProcessedDim ==
3259 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3260 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3261 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3262 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3263 break;
3264 }
3265 }
3266}
3267
3268/*===========================================================================
3269 * FUNCTION : getMinFrameDuration
3270 *
3271 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3272 * and current request configuration.
3273 *
3274 * PARAMETERS : @request: requset sent by the frameworks
3275 *
3276 * RETURN : min farme duration for a particular request
3277 *
3278 *==========================================================================*/
3279int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3280{
3281 bool hasJpegStream = false;
3282 bool hasRawStream = false;
3283 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3284 const camera3_stream_t *stream = request->output_buffers[i].stream;
3285 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3286 hasJpegStream = true;
3287 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3288 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3289 stream->format == HAL_PIXEL_FORMAT_RAW16)
3290 hasRawStream = true;
3291 }
3292
3293 if (!hasJpegStream)
3294 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3295 else
3296 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3297}
3298
3299/*===========================================================================
3300 * FUNCTION : handleBuffersDuringFlushLock
3301 *
3302 * DESCRIPTION: Account for buffers returned from back-end during flush
3303 * This function is executed while mMutex is held by the caller.
3304 *
3305 * PARAMETERS :
3306 * @buffer: image buffer for the callback
3307 *
3308 * RETURN :
3309 *==========================================================================*/
3310void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3311{
3312 bool buffer_found = false;
3313 for (List<PendingBuffersInRequest>::iterator req =
3314 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3315 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3316 for (List<PendingBufferInfo>::iterator i =
3317 req->mPendingBufferList.begin();
3318 i != req->mPendingBufferList.end(); i++) {
3319 if (i->buffer == buffer->buffer) {
3320 mPendingBuffersMap.numPendingBufsAtFlush--;
3321 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3322 buffer->buffer, req->frame_number,
3323 mPendingBuffersMap.numPendingBufsAtFlush);
3324 buffer_found = true;
3325 break;
3326 }
3327 }
3328 if (buffer_found) {
3329 break;
3330 }
3331 }
3332 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3333 //signal the flush()
3334 LOGD("All buffers returned to HAL. Continue flush");
3335 pthread_cond_signal(&mBuffersCond);
3336 }
3337}
3338
Thierry Strudel3d639192016-09-09 11:52:26 -07003339/*===========================================================================
3340 * FUNCTION : handleBatchMetadata
3341 *
3342 * DESCRIPTION: Handles metadata buffer callback in batch mode
3343 *
3344 * PARAMETERS : @metadata_buf: metadata buffer
3345 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3346 * the meta buf in this method
3347 *
3348 * RETURN :
3349 *
3350 *==========================================================================*/
3351void QCamera3HardwareInterface::handleBatchMetadata(
3352 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3353{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003354 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003355
3356 if (NULL == metadata_buf) {
3357 LOGE("metadata_buf is NULL");
3358 return;
3359 }
3360 /* In batch mode, the metdata will contain the frame number and timestamp of
3361 * the last frame in the batch. Eg: a batch containing buffers from request
3362 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3363 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3364 * multiple process_capture_results */
3365 metadata_buffer_t *metadata =
3366 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3367 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3368 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3369 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3370 uint32_t frame_number = 0, urgent_frame_number = 0;
3371 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3372 bool invalid_metadata = false;
3373 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3374 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003375 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003376
3377 int32_t *p_frame_number_valid =
3378 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3379 uint32_t *p_frame_number =
3380 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3381 int64_t *p_capture_time =
3382 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3383 int32_t *p_urgent_frame_number_valid =
3384 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3385 uint32_t *p_urgent_frame_number =
3386 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3387
3388 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3389 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3390 (NULL == p_urgent_frame_number)) {
3391 LOGE("Invalid metadata");
3392 invalid_metadata = true;
3393 } else {
3394 frame_number_valid = *p_frame_number_valid;
3395 last_frame_number = *p_frame_number;
3396 last_frame_capture_time = *p_capture_time;
3397 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3398 last_urgent_frame_number = *p_urgent_frame_number;
3399 }
3400
3401 /* In batchmode, when no video buffers are requested, set_parms are sent
3402 * for every capture_request. The difference between consecutive urgent
3403 * frame numbers and frame numbers should be used to interpolate the
3404 * corresponding frame numbers and time stamps */
3405 pthread_mutex_lock(&mMutex);
3406 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003407 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3408 if(idx < 0) {
3409 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3410 last_urgent_frame_number);
3411 mState = ERROR;
3412 pthread_mutex_unlock(&mMutex);
3413 return;
3414 }
3415 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003416 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3417 first_urgent_frame_number;
3418
3419 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3420 urgent_frame_number_valid,
3421 first_urgent_frame_number, last_urgent_frame_number);
3422 }
3423
3424 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003425 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3426 if(idx < 0) {
3427 LOGE("Invalid frame number received: %d. Irrecoverable error",
3428 last_frame_number);
3429 mState = ERROR;
3430 pthread_mutex_unlock(&mMutex);
3431 return;
3432 }
3433 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003434 frameNumDiff = last_frame_number + 1 -
3435 first_frame_number;
3436 mPendingBatchMap.removeItem(last_frame_number);
3437
3438 LOGD("frm: valid: %d frm_num: %d - %d",
3439 frame_number_valid,
3440 first_frame_number, last_frame_number);
3441
3442 }
3443 pthread_mutex_unlock(&mMutex);
3444
3445 if (urgent_frame_number_valid || frame_number_valid) {
3446 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3447 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3448 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3449 urgentFrameNumDiff, last_urgent_frame_number);
3450 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3451 LOGE("frameNumDiff: %d frameNum: %d",
3452 frameNumDiff, last_frame_number);
3453 }
3454
3455 for (size_t i = 0; i < loopCount; i++) {
3456 /* handleMetadataWithLock is called even for invalid_metadata for
3457 * pipeline depth calculation */
3458 if (!invalid_metadata) {
3459 /* Infer frame number. Batch metadata contains frame number of the
3460 * last frame */
3461 if (urgent_frame_number_valid) {
3462 if (i < urgentFrameNumDiff) {
3463 urgent_frame_number =
3464 first_urgent_frame_number + i;
3465 LOGD("inferred urgent frame_number: %d",
3466 urgent_frame_number);
3467 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3468 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3469 } else {
3470 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3471 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3472 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3473 }
3474 }
3475
3476 /* Infer frame number. Batch metadata contains frame number of the
3477 * last frame */
3478 if (frame_number_valid) {
3479 if (i < frameNumDiff) {
3480 frame_number = first_frame_number + i;
3481 LOGD("inferred frame_number: %d", frame_number);
3482 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3483 CAM_INTF_META_FRAME_NUMBER, frame_number);
3484 } else {
3485 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3486 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3487 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3488 }
3489 }
3490
3491 if (last_frame_capture_time) {
3492 //Infer timestamp
3493 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003494 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003496 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003497 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3498 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3499 LOGD("batch capture_time: %lld, capture_time: %lld",
3500 last_frame_capture_time, capture_time);
3501 }
3502 }
3503 pthread_mutex_lock(&mMutex);
3504 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003505 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003506 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3507 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003508 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003509 pthread_mutex_unlock(&mMutex);
3510 }
3511
3512 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003513 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003514 mMetadataChannel->bufDone(metadata_buf);
3515 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003516 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003517 }
3518}
3519
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003520void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3521 camera3_error_msg_code_t errorCode)
3522{
3523 camera3_notify_msg_t notify_msg;
3524 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3525 notify_msg.type = CAMERA3_MSG_ERROR;
3526 notify_msg.message.error.error_code = errorCode;
3527 notify_msg.message.error.error_stream = NULL;
3528 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003529 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003530
3531 return;
3532}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003533
3534/*===========================================================================
3535 * FUNCTION : sendPartialMetadataWithLock
3536 *
3537 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3538 *
3539 * PARAMETERS : @metadata: metadata buffer
3540 * @requestIter: The iterator for the pending capture request for
3541 * which the partial result is being sen
3542 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3543 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003544 * @isJumpstartMetadata: Whether this is a partial metadata for
3545 * jumpstart, i.e. even though it doesn't map to a valid partial
3546 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003547 *
3548 * RETURN :
3549 *
3550 *==========================================================================*/
3551
3552void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3553 metadata_buffer_t *metadata,
3554 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003555 bool lastUrgentMetadataInBatch,
3556 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003557{
3558 camera3_capture_result_t result;
3559 memset(&result, 0, sizeof(camera3_capture_result_t));
3560
3561 requestIter->partial_result_cnt++;
3562
3563 // Extract 3A metadata
3564 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003565 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3566 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003567 // Populate metadata result
3568 result.frame_number = requestIter->frame_number;
3569 result.num_output_buffers = 0;
3570 result.output_buffers = NULL;
3571 result.partial_result = requestIter->partial_result_cnt;
3572
3573 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003574 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003575 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3576 // Notify HDR+ client about the partial metadata.
3577 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3578 result.partial_result == PARTIAL_RESULT_COUNT);
3579 }
3580 }
3581
3582 orchestrateResult(&result);
3583 LOGD("urgent frame_number = %u", result.frame_number);
3584 free_camera_metadata((camera_metadata_t *)result.result);
3585}
3586
Thierry Strudel3d639192016-09-09 11:52:26 -07003587/*===========================================================================
3588 * FUNCTION : handleMetadataWithLock
3589 *
3590 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3591 *
3592 * PARAMETERS : @metadata_buf: metadata buffer
3593 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3594 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003595 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3596 * last urgent metadata in a batch. Always true for non-batch mode
3597 * @lastMetadataInBatch: Boolean to indicate whether this is the
3598 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003599 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3600 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003601 *
3602 * RETURN :
3603 *
3604 *==========================================================================*/
3605void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003606 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003607 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3608 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003609{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003610 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003611 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3612 //during flush do not send metadata from this thread
3613 LOGD("not sending metadata during flush or when mState is error");
3614 if (free_and_bufdone_meta_buf) {
3615 mMetadataChannel->bufDone(metadata_buf);
3616 free(metadata_buf);
3617 }
3618 return;
3619 }
3620
3621 //not in flush
3622 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3623 int32_t frame_number_valid, urgent_frame_number_valid;
3624 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003625 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003626 nsecs_t currentSysTime;
3627
3628 int32_t *p_frame_number_valid =
3629 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3630 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3631 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003632 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003633 int32_t *p_urgent_frame_number_valid =
3634 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3635 uint32_t *p_urgent_frame_number =
3636 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3637 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3638 metadata) {
3639 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3640 *p_frame_number_valid, *p_frame_number);
3641 }
3642
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003643 camera_metadata_t *resultMetadata = nullptr;
3644
Thierry Strudel3d639192016-09-09 11:52:26 -07003645 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3646 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3647 LOGE("Invalid metadata");
3648 if (free_and_bufdone_meta_buf) {
3649 mMetadataChannel->bufDone(metadata_buf);
3650 free(metadata_buf);
3651 }
3652 goto done_metadata;
3653 }
3654 frame_number_valid = *p_frame_number_valid;
3655 frame_number = *p_frame_number;
3656 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003657 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003658 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3659 urgent_frame_number = *p_urgent_frame_number;
3660 currentSysTime = systemTime(CLOCK_MONOTONIC);
3661
Jason Lee603176d2017-05-31 11:43:27 -07003662 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3663 const int tries = 3;
3664 nsecs_t bestGap, measured;
3665 for (int i = 0; i < tries; ++i) {
3666 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3667 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3668 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3669 const nsecs_t gap = tmono2 - tmono;
3670 if (i == 0 || gap < bestGap) {
3671 bestGap = gap;
3672 measured = tbase - ((tmono + tmono2) >> 1);
3673 }
3674 }
3675 capture_time -= measured;
3676 }
3677
Thierry Strudel3d639192016-09-09 11:52:26 -07003678 // Detect if buffers from any requests are overdue
3679 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003680 int64_t timeout;
3681 {
3682 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3683 // If there is a pending HDR+ request, the following requests may be blocked until the
3684 // HDR+ request is done. So allow a longer timeout.
3685 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3686 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003687 if (timeout < mExpectedInflightDuration) {
3688 timeout = mExpectedInflightDuration;
3689 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003690 }
3691
3692 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003693 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003694 assert(missed.stream->priv);
3695 if (missed.stream->priv) {
3696 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3697 assert(ch->mStreams[0]);
3698 if (ch->mStreams[0]) {
3699 LOGE("Cancel missing frame = %d, buffer = %p,"
3700 "stream type = %d, stream format = %d",
3701 req.frame_number, missed.buffer,
3702 ch->mStreams[0]->getMyType(), missed.stream->format);
3703 ch->timeoutFrame(req.frame_number);
3704 }
3705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003706 }
3707 }
3708 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003709 //For the very first metadata callback, regardless whether it contains valid
3710 //frame number, send the partial metadata for the jumpstarting requests.
3711 //Note that this has to be done even if the metadata doesn't contain valid
3712 //urgent frame number, because in the case only 1 request is ever submitted
3713 //to HAL, there won't be subsequent valid urgent frame number.
3714 if (mFirstMetadataCallback) {
3715 for (pendingRequestIterator i =
3716 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3717 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003718 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3719 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003720 }
3721 }
3722 mFirstMetadataCallback = false;
3723 }
3724
Thierry Strudel3d639192016-09-09 11:52:26 -07003725 //Partial result on process_capture_result for timestamp
3726 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003727 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003728
3729 //Recieved an urgent Frame Number, handle it
3730 //using partial results
3731 for (pendingRequestIterator i =
3732 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3733 LOGD("Iterator Frame = %d urgent frame = %d",
3734 i->frame_number, urgent_frame_number);
3735
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003736 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003737 (i->partial_result_cnt == 0)) {
3738 LOGE("Error: HAL missed urgent metadata for frame number %d",
3739 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003740 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003741 }
3742
3743 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003744 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003745 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3746 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003747 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3748 // Instant AEC settled for this frame.
3749 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3750 mInstantAECSettledFrameNumber = urgent_frame_number;
3751 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003752 break;
3753 }
3754 }
3755 }
3756
3757 if (!frame_number_valid) {
3758 LOGD("Not a valid normal frame number, used as SOF only");
3759 if (free_and_bufdone_meta_buf) {
3760 mMetadataChannel->bufDone(metadata_buf);
3761 free(metadata_buf);
3762 }
3763 goto done_metadata;
3764 }
3765 LOGH("valid frame_number = %u, capture_time = %lld",
3766 frame_number, capture_time);
3767
Emilian Peev4e0fe952017-06-30 12:40:09 -07003768 handleDepthDataLocked(metadata->depth_data, frame_number,
3769 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003770
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003771 // Check whether any stream buffer corresponding to this is dropped or not
3772 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3773 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3774 for (auto & pendingRequest : mPendingRequestsList) {
3775 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3776 mInstantAECSettledFrameNumber)) {
3777 camera3_notify_msg_t notify_msg = {};
3778 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003779 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003780 QCamera3ProcessingChannel *channel =
3781 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003782 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003783 if (p_cam_frame_drop) {
3784 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003785 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003786 // Got the stream ID for drop frame.
3787 dropFrame = true;
3788 break;
3789 }
3790 }
3791 } else {
3792 // This is instant AEC case.
3793 // For instant AEC drop the stream untill AEC is settled.
3794 dropFrame = true;
3795 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003796
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003797 if (dropFrame) {
3798 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3799 if (p_cam_frame_drop) {
3800 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003801 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003802 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003803 } else {
3804 // For instant AEC, inform frame drop and frame number
3805 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3806 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003807 pendingRequest.frame_number, streamID,
3808 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003809 }
3810 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003811 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003812 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003813 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003814 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003815 if (p_cam_frame_drop) {
3816 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003817 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003818 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003819 } else {
3820 // For instant AEC, inform frame drop and frame number
3821 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3822 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003823 pendingRequest.frame_number, streamID,
3824 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003825 }
3826 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003827 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003828 PendingFrameDrop.stream_ID = streamID;
3829 // Add the Frame drop info to mPendingFrameDropList
3830 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003831 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003832 }
3833 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003834 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003835
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003836 for (auto & pendingRequest : mPendingRequestsList) {
3837 // Find the pending request with the frame number.
3838 if (pendingRequest.frame_number == frame_number) {
3839 // Update the sensor timestamp.
3840 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003841
Thierry Strudel3d639192016-09-09 11:52:26 -07003842
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003843 /* Set the timestamp in display metadata so that clients aware of
3844 private_handle such as VT can use this un-modified timestamps.
3845 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003846 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003847
Thierry Strudel3d639192016-09-09 11:52:26 -07003848 // Find channel requiring metadata, meaning internal offline postprocess
3849 // is needed.
3850 //TODO: for now, we don't support two streams requiring metadata at the same time.
3851 // (because we are not making copies, and metadata buffer is not reference counted.
3852 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003853 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3854 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003855 if (iter->need_metadata) {
3856 internalPproc = true;
3857 QCamera3ProcessingChannel *channel =
3858 (QCamera3ProcessingChannel *)iter->stream->priv;
3859 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003860 if(p_is_metabuf_queued != NULL) {
3861 *p_is_metabuf_queued = true;
3862 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003863 break;
3864 }
3865 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003866 for (auto itr = pendingRequest.internalRequestList.begin();
3867 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003868 if (itr->need_metadata) {
3869 internalPproc = true;
3870 QCamera3ProcessingChannel *channel =
3871 (QCamera3ProcessingChannel *)itr->stream->priv;
3872 channel->queueReprocMetadata(metadata_buf);
3873 break;
3874 }
3875 }
3876
Thierry Strudel54dc9782017-02-15 12:12:10 -08003877 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003878
3879 bool *enableZsl = nullptr;
3880 if (gExposeEnableZslKey) {
3881 enableZsl = &pendingRequest.enableZsl;
3882 }
3883
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003884 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003885 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003886 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003887
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003888 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003889
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003890 if (pendingRequest.blob_request) {
3891 //Dump tuning metadata if enabled and available
3892 char prop[PROPERTY_VALUE_MAX];
3893 memset(prop, 0, sizeof(prop));
3894 property_get("persist.camera.dumpmetadata", prop, "0");
3895 int32_t enabled = atoi(prop);
3896 if (enabled && metadata->is_tuning_params_valid) {
3897 dumpMetadataToFile(metadata->tuning_params,
3898 mMetaFrameCount,
3899 enabled,
3900 "Snapshot",
3901 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003902 }
3903 }
3904
3905 if (!internalPproc) {
3906 LOGD("couldn't find need_metadata for this metadata");
3907 // Return metadata buffer
3908 if (free_and_bufdone_meta_buf) {
3909 mMetadataChannel->bufDone(metadata_buf);
3910 free(metadata_buf);
3911 }
3912 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003913
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003914 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003915 }
3916 }
3917
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003918 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3919
3920 // Try to send out capture result metadata.
3921 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003922 return;
3923
Thierry Strudel3d639192016-09-09 11:52:26 -07003924done_metadata:
3925 for (pendingRequestIterator i = mPendingRequestsList.begin();
3926 i != mPendingRequestsList.end() ;i++) {
3927 i->pipeline_depth++;
3928 }
3929 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3930 unblockRequestIfNecessary();
3931}
3932
3933/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003934 * FUNCTION : handleDepthDataWithLock
3935 *
3936 * DESCRIPTION: Handles incoming depth data
3937 *
3938 * PARAMETERS : @depthData : Depth data
3939 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003940 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003941 *
3942 * RETURN :
3943 *
3944 *==========================================================================*/
3945void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003946 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003947 uint32_t currentFrameNumber;
3948 buffer_handle_t *depthBuffer;
3949
3950 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003951 return;
3952 }
3953
3954 camera3_stream_buffer_t resultBuffer =
3955 {.acquire_fence = -1,
3956 .release_fence = -1,
3957 .status = CAMERA3_BUFFER_STATUS_OK,
3958 .buffer = nullptr,
3959 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003960 do {
3961 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3962 if (nullptr == depthBuffer) {
3963 break;
3964 }
3965
Emilian Peev7650c122017-01-19 08:24:33 -08003966 resultBuffer.buffer = depthBuffer;
3967 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003968 if (valid) {
3969 int32_t rc = mDepthChannel->populateDepthData(depthData,
3970 frameNumber);
3971 if (NO_ERROR != rc) {
3972 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3973 } else {
3974 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3975 }
Emilian Peev7650c122017-01-19 08:24:33 -08003976 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003977 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003978 }
3979 } else if (currentFrameNumber > frameNumber) {
3980 break;
3981 } else {
3982 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3983 {{currentFrameNumber, mDepthChannel->getStream(),
3984 CAMERA3_MSG_ERROR_BUFFER}}};
3985 orchestrateNotify(&notify_msg);
3986
3987 LOGE("Depth buffer for frame number: %d is missing "
3988 "returning back!", currentFrameNumber);
3989 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3990 }
3991 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003992 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003993 } while (currentFrameNumber < frameNumber);
3994}
3995
3996/*===========================================================================
3997 * FUNCTION : notifyErrorFoPendingDepthData
3998 *
3999 * DESCRIPTION: Returns error for any pending depth buffers
4000 *
4001 * PARAMETERS : depthCh - depth channel that needs to get flushed
4002 *
4003 * RETURN :
4004 *
4005 *==========================================================================*/
4006void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
4007 QCamera3DepthChannel *depthCh) {
4008 uint32_t currentFrameNumber;
4009 buffer_handle_t *depthBuffer;
4010
4011 if (nullptr == depthCh) {
4012 return;
4013 }
4014
4015 camera3_notify_msg_t notify_msg =
4016 {.type = CAMERA3_MSG_ERROR,
4017 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4018 camera3_stream_buffer_t resultBuffer =
4019 {.acquire_fence = -1,
4020 .release_fence = -1,
4021 .buffer = nullptr,
4022 .stream = depthCh->getStream(),
4023 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004024
4025 while (nullptr !=
4026 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4027 depthCh->unmapBuffer(currentFrameNumber);
4028
4029 notify_msg.message.error.frame_number = currentFrameNumber;
4030 orchestrateNotify(&notify_msg);
4031
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004032 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004033 };
4034}
4035
4036/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004037 * FUNCTION : hdrPlusPerfLock
4038 *
4039 * DESCRIPTION: perf lock for HDR+ using custom intent
4040 *
4041 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4042 *
4043 * RETURN : None
4044 *
4045 *==========================================================================*/
4046void QCamera3HardwareInterface::hdrPlusPerfLock(
4047 mm_camera_super_buf_t *metadata_buf)
4048{
4049 if (NULL == metadata_buf) {
4050 LOGE("metadata_buf is NULL");
4051 return;
4052 }
4053 metadata_buffer_t *metadata =
4054 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4055 int32_t *p_frame_number_valid =
4056 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4057 uint32_t *p_frame_number =
4058 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4059
4060 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4061 LOGE("%s: Invalid metadata", __func__);
4062 return;
4063 }
4064
Wei Wang01385482017-08-03 10:49:34 -07004065 //acquire perf lock for 2 secs after the last HDR frame is captured
4066 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004067 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4068 if ((p_frame_number != NULL) &&
4069 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004070 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004071 }
4072 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004073}
4074
4075/*===========================================================================
4076 * FUNCTION : handleInputBufferWithLock
4077 *
4078 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4079 *
4080 * PARAMETERS : @frame_number: frame number of the input buffer
4081 *
4082 * RETURN :
4083 *
4084 *==========================================================================*/
4085void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4086{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004087 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004088 pendingRequestIterator i = mPendingRequestsList.begin();
4089 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4090 i++;
4091 }
4092 if (i != mPendingRequestsList.end() && i->input_buffer) {
4093 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004094 CameraMetadata settings;
4095 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4096 if(i->settings) {
4097 settings = i->settings;
4098 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4099 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004100 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004101 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004102 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004103 } else {
4104 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004105 }
4106
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004107 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4108 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4109 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004110
4111 camera3_capture_result result;
4112 memset(&result, 0, sizeof(camera3_capture_result));
4113 result.frame_number = frame_number;
4114 result.result = i->settings;
4115 result.input_buffer = i->input_buffer;
4116 result.partial_result = PARTIAL_RESULT_COUNT;
4117
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004118 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004119 LOGD("Input request metadata and input buffer frame_number = %u",
4120 i->frame_number);
4121 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004122
4123 // Dispatch result metadata that may be just unblocked by this reprocess result.
4124 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004125 } else {
4126 LOGE("Could not find input request for frame number %d", frame_number);
4127 }
4128}
4129
4130/*===========================================================================
4131 * FUNCTION : handleBufferWithLock
4132 *
4133 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4134 *
4135 * PARAMETERS : @buffer: image buffer for the callback
4136 * @frame_number: frame number of the image buffer
4137 *
4138 * RETURN :
4139 *
4140 *==========================================================================*/
4141void QCamera3HardwareInterface::handleBufferWithLock(
4142 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4143{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004144 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004145
4146 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4147 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4148 }
4149
Thierry Strudel3d639192016-09-09 11:52:26 -07004150 /* Nothing to be done during error state */
4151 if ((ERROR == mState) || (DEINIT == mState)) {
4152 return;
4153 }
4154 if (mFlushPerf) {
4155 handleBuffersDuringFlushLock(buffer);
4156 return;
4157 }
4158 //not in flush
4159 // If the frame number doesn't exist in the pending request list,
4160 // directly send the buffer to the frameworks, and update pending buffers map
4161 // Otherwise, book-keep the buffer.
4162 pendingRequestIterator i = mPendingRequestsList.begin();
4163 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4164 i++;
4165 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004166
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004167 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004168 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004169 // For a reprocessing request, try to send out result metadata.
4170 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004171 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004172 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004173
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004174 // Check if this frame was dropped.
4175 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4176 m != mPendingFrameDropList.end(); m++) {
4177 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4178 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4179 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4180 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4181 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4182 frame_number, streamID);
4183 m = mPendingFrameDropList.erase(m);
4184 break;
4185 }
4186 }
4187
Binhao Lin09245482017-08-31 18:25:29 -07004188 // WAR for encoder avtimer timestamp issue
4189 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4190 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
4191 m_bAVTimerEnabled) {
4192 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
4193 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
4194 if (req->frame_number != frame_number)
4195 continue;
4196 if(req->av_timestamp == 0) {
4197 buffer->status |= CAMERA3_BUFFER_STATUS_ERROR;
4198 }
4199 else {
4200 struct private_handle_t *priv_handle =
4201 (struct private_handle_t *) (*(buffer->buffer));
4202 setMetaData(priv_handle, SET_VT_TIMESTAMP, &(req->av_timestamp));
4203 }
4204 }
4205 }
4206
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004207 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4208 LOGH("result frame_number = %d, buffer = %p",
4209 frame_number, buffer->buffer);
4210
4211 mPendingBuffersMap.removeBuf(buffer->buffer);
4212 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4213
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004214 if (mPreviewStarted == false) {
4215 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4216 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004217 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4218
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004219 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4220 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4221 mPreviewStarted = true;
4222
4223 // Set power hint for preview
4224 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4225 }
4226 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004227}
4228
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004229void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004230 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004231{
4232 // Find the pending request for this result metadata.
4233 auto requestIter = mPendingRequestsList.begin();
4234 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4235 requestIter++;
4236 }
4237
4238 if (requestIter == mPendingRequestsList.end()) {
4239 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4240 return;
4241 }
4242
4243 // Update the result metadata
4244 requestIter->resultMetadata = resultMetadata;
4245
4246 // Check what type of request this is.
4247 bool liveRequest = false;
4248 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004249 // HDR+ request doesn't have partial results.
4250 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004251 } else if (requestIter->input_buffer != nullptr) {
4252 // Reprocessing request result is the same as settings.
4253 requestIter->resultMetadata = requestIter->settings;
4254 // Reprocessing request doesn't have partial results.
4255 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4256 } else {
4257 liveRequest = true;
Chien-Yu Chen0a921f92017-08-27 17:25:33 -07004258 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004259 mPendingLiveRequest--;
4260
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004261 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004262 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004263 // For a live request, send the metadata to HDR+ client.
4264 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4265 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4266 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4267 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004268 }
4269 }
4270
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004271 // Remove len shading map if it's not requested.
4272 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4273 CameraMetadata metadata;
4274 metadata.acquire(resultMetadata);
4275 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4276 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4277 &requestIter->requestedLensShadingMapMode, 1);
4278
4279 requestIter->resultMetadata = metadata.release();
4280 }
4281
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004282 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4283}
4284
4285void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4286 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004287 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4288 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004289 bool readyToSend = true;
4290
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004291 // Iterate through the pending requests to send out result metadata that are ready. Also if
4292 // this result metadata belongs to a live request, notify errors for previous live requests
4293 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004294 auto iter = mPendingRequestsList.begin();
4295 while (iter != mPendingRequestsList.end()) {
4296 // Check if current pending request is ready. If it's not ready, the following pending
4297 // requests are also not ready.
4298 if (readyToSend && iter->resultMetadata == nullptr) {
4299 readyToSend = false;
4300 }
4301
4302 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4303
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004304 camera3_capture_result_t result = {};
4305 result.frame_number = iter->frame_number;
4306 result.result = iter->resultMetadata;
4307 result.partial_result = iter->partial_result_cnt;
4308
4309 // If this pending buffer has result metadata, we may be able to send out shutter callback
4310 // and result metadata.
4311 if (iter->resultMetadata != nullptr) {
4312 if (!readyToSend) {
4313 // If any of the previous pending request is not ready, this pending request is
4314 // also not ready to send in order to keep shutter callbacks and result metadata
4315 // in order.
4316 iter++;
4317 continue;
4318 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004319 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004320 // If the result metadata belongs to a live request, notify errors for previous pending
4321 // live requests.
4322 mPendingLiveRequest--;
4323
4324 CameraMetadata dummyMetadata;
4325 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4326 result.result = dummyMetadata.release();
4327
4328 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004329
4330 // partial_result should be PARTIAL_RESULT_CNT in case of
4331 // ERROR_RESULT.
4332 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4333 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004334 } else {
4335 iter++;
4336 continue;
4337 }
4338
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004339 result.output_buffers = nullptr;
4340 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004341 orchestrateResult(&result);
4342
4343 // For reprocessing, result metadata is the same as settings so do not free it here to
4344 // avoid double free.
4345 if (result.result != iter->settings) {
4346 free_camera_metadata((camera_metadata_t *)result.result);
4347 }
4348 iter->resultMetadata = nullptr;
4349 iter = erasePendingRequest(iter);
4350 }
4351
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004352 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004353 for (auto &iter : mPendingRequestsList) {
4354 // Increment pipeline depth for the following pending requests.
4355 if (iter.frame_number > frameNumber) {
4356 iter.pipeline_depth++;
4357 }
4358 }
4359 }
4360
4361 unblockRequestIfNecessary();
4362}
4363
Thierry Strudel3d639192016-09-09 11:52:26 -07004364/*===========================================================================
4365 * FUNCTION : unblockRequestIfNecessary
4366 *
4367 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4368 * that mMutex is held when this function is called.
4369 *
4370 * PARAMETERS :
4371 *
4372 * RETURN :
4373 *
4374 *==========================================================================*/
4375void QCamera3HardwareInterface::unblockRequestIfNecessary()
4376{
4377 // Unblock process_capture_request
4378 pthread_cond_signal(&mRequestCond);
4379}
4380
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004381/*===========================================================================
4382 * FUNCTION : isHdrSnapshotRequest
4383 *
4384 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4385 *
4386 * PARAMETERS : camera3 request structure
4387 *
4388 * RETURN : boolean decision variable
4389 *
4390 *==========================================================================*/
4391bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4392{
4393 if (request == NULL) {
4394 LOGE("Invalid request handle");
4395 assert(0);
4396 return false;
4397 }
4398
4399 if (!mForceHdrSnapshot) {
4400 CameraMetadata frame_settings;
4401 frame_settings = request->settings;
4402
4403 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4404 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4405 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4406 return false;
4407 }
4408 } else {
4409 return false;
4410 }
4411
4412 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4413 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4414 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4415 return false;
4416 }
4417 } else {
4418 return false;
4419 }
4420 }
4421
4422 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4423 if (request->output_buffers[i].stream->format
4424 == HAL_PIXEL_FORMAT_BLOB) {
4425 return true;
4426 }
4427 }
4428
4429 return false;
4430}
4431/*===========================================================================
4432 * FUNCTION : orchestrateRequest
4433 *
4434 * DESCRIPTION: Orchestrates a capture request from camera service
4435 *
4436 * PARAMETERS :
4437 * @request : request from framework to process
4438 *
4439 * RETURN : Error status codes
4440 *
4441 *==========================================================================*/
4442int32_t QCamera3HardwareInterface::orchestrateRequest(
4443 camera3_capture_request_t *request)
4444{
4445
4446 uint32_t originalFrameNumber = request->frame_number;
4447 uint32_t originalOutputCount = request->num_output_buffers;
4448 const camera_metadata_t *original_settings = request->settings;
4449 List<InternalRequest> internallyRequestedStreams;
4450 List<InternalRequest> emptyInternalList;
4451
4452 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4453 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4454 uint32_t internalFrameNumber;
4455 CameraMetadata modified_meta;
4456
4457
4458 /* Add Blob channel to list of internally requested streams */
4459 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4460 if (request->output_buffers[i].stream->format
4461 == HAL_PIXEL_FORMAT_BLOB) {
4462 InternalRequest streamRequested;
4463 streamRequested.meteringOnly = 1;
4464 streamRequested.need_metadata = 0;
4465 streamRequested.stream = request->output_buffers[i].stream;
4466 internallyRequestedStreams.push_back(streamRequested);
4467 }
4468 }
4469 request->num_output_buffers = 0;
4470 auto itr = internallyRequestedStreams.begin();
4471
4472 /* Modify setting to set compensation */
4473 modified_meta = request->settings;
4474 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4475 uint8_t aeLock = 1;
4476 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4477 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4478 camera_metadata_t *modified_settings = modified_meta.release();
4479 request->settings = modified_settings;
4480
4481 /* Capture Settling & -2x frame */
4482 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4483 request->frame_number = internalFrameNumber;
4484 processCaptureRequest(request, internallyRequestedStreams);
4485
4486 request->num_output_buffers = originalOutputCount;
4487 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4488 request->frame_number = internalFrameNumber;
4489 processCaptureRequest(request, emptyInternalList);
4490 request->num_output_buffers = 0;
4491
4492 modified_meta = modified_settings;
4493 expCompensation = 0;
4494 aeLock = 1;
4495 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4496 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4497 modified_settings = modified_meta.release();
4498 request->settings = modified_settings;
4499
4500 /* Capture Settling & 0X frame */
4501
4502 itr = internallyRequestedStreams.begin();
4503 if (itr == internallyRequestedStreams.end()) {
4504 LOGE("Error Internally Requested Stream list is empty");
4505 assert(0);
4506 } else {
4507 itr->need_metadata = 0;
4508 itr->meteringOnly = 1;
4509 }
4510
4511 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4512 request->frame_number = internalFrameNumber;
4513 processCaptureRequest(request, internallyRequestedStreams);
4514
4515 itr = internallyRequestedStreams.begin();
4516 if (itr == internallyRequestedStreams.end()) {
4517 ALOGE("Error Internally Requested Stream list is empty");
4518 assert(0);
4519 } else {
4520 itr->need_metadata = 1;
4521 itr->meteringOnly = 0;
4522 }
4523
4524 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4525 request->frame_number = internalFrameNumber;
4526 processCaptureRequest(request, internallyRequestedStreams);
4527
4528 /* Capture 2X frame*/
4529 modified_meta = modified_settings;
4530 expCompensation = GB_HDR_2X_STEP_EV;
4531 aeLock = 1;
4532 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4533 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4534 modified_settings = modified_meta.release();
4535 request->settings = modified_settings;
4536
4537 itr = internallyRequestedStreams.begin();
4538 if (itr == internallyRequestedStreams.end()) {
4539 ALOGE("Error Internally Requested Stream list is empty");
4540 assert(0);
4541 } else {
4542 itr->need_metadata = 0;
4543 itr->meteringOnly = 1;
4544 }
4545 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4546 request->frame_number = internalFrameNumber;
4547 processCaptureRequest(request, internallyRequestedStreams);
4548
4549 itr = internallyRequestedStreams.begin();
4550 if (itr == internallyRequestedStreams.end()) {
4551 ALOGE("Error Internally Requested Stream list is empty");
4552 assert(0);
4553 } else {
4554 itr->need_metadata = 1;
4555 itr->meteringOnly = 0;
4556 }
4557
4558 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4559 request->frame_number = internalFrameNumber;
4560 processCaptureRequest(request, internallyRequestedStreams);
4561
4562
4563 /* Capture 2X on original streaming config*/
4564 internallyRequestedStreams.clear();
4565
4566 /* Restore original settings pointer */
4567 request->settings = original_settings;
4568 } else {
4569 uint32_t internalFrameNumber;
4570 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4571 request->frame_number = internalFrameNumber;
4572 return processCaptureRequest(request, internallyRequestedStreams);
4573 }
4574
4575 return NO_ERROR;
4576}
4577
4578/*===========================================================================
4579 * FUNCTION : orchestrateResult
4580 *
4581 * DESCRIPTION: Orchestrates a capture result to camera service
4582 *
4583 * PARAMETERS :
4584 * @request : request from framework to process
4585 *
4586 * RETURN :
4587 *
4588 *==========================================================================*/
4589void QCamera3HardwareInterface::orchestrateResult(
4590 camera3_capture_result_t *result)
4591{
4592 uint32_t frameworkFrameNumber;
4593 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4594 frameworkFrameNumber);
4595 if (rc != NO_ERROR) {
4596 LOGE("Cannot find translated frameworkFrameNumber");
4597 assert(0);
4598 } else {
4599 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004600 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004601 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004602 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004603 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4604 camera_metadata_entry_t entry;
4605 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4606 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004607 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004608 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4609 if (ret != OK)
4610 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004611 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004612 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004613 result->frame_number = frameworkFrameNumber;
4614 mCallbackOps->process_capture_result(mCallbackOps, result);
4615 }
4616 }
4617}
4618
4619/*===========================================================================
4620 * FUNCTION : orchestrateNotify
4621 *
4622 * DESCRIPTION: Orchestrates a notify to camera service
4623 *
4624 * PARAMETERS :
4625 * @request : request from framework to process
4626 *
4627 * RETURN :
4628 *
4629 *==========================================================================*/
4630void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4631{
4632 uint32_t frameworkFrameNumber;
4633 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004634 int32_t rc = NO_ERROR;
4635
4636 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004637 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004638
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004639 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004640 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4641 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4642 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004643 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004644 LOGE("Cannot find translated frameworkFrameNumber");
4645 assert(0);
4646 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004647 }
4648 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004649
4650 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4651 LOGD("Internal Request drop the notifyCb");
4652 } else {
4653 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4654 mCallbackOps->notify(mCallbackOps, notify_msg);
4655 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004656}
4657
4658/*===========================================================================
4659 * FUNCTION : FrameNumberRegistry
4660 *
4661 * DESCRIPTION: Constructor
4662 *
4663 * PARAMETERS :
4664 *
4665 * RETURN :
4666 *
4667 *==========================================================================*/
4668FrameNumberRegistry::FrameNumberRegistry()
4669{
4670 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4671}
4672
4673/*===========================================================================
4674 * FUNCTION : ~FrameNumberRegistry
4675 *
4676 * DESCRIPTION: Destructor
4677 *
4678 * PARAMETERS :
4679 *
4680 * RETURN :
4681 *
4682 *==========================================================================*/
4683FrameNumberRegistry::~FrameNumberRegistry()
4684{
4685}
4686
4687/*===========================================================================
4688 * FUNCTION : PurgeOldEntriesLocked
4689 *
4690 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4691 *
4692 * PARAMETERS :
4693 *
4694 * RETURN : NONE
4695 *
4696 *==========================================================================*/
4697void FrameNumberRegistry::purgeOldEntriesLocked()
4698{
4699 while (_register.begin() != _register.end()) {
4700 auto itr = _register.begin();
4701 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4702 _register.erase(itr);
4703 } else {
4704 return;
4705 }
4706 }
4707}
4708
4709/*===========================================================================
4710 * FUNCTION : allocStoreInternalFrameNumber
4711 *
4712 * DESCRIPTION: Method to note down a framework request and associate a new
4713 * internal request number against it
4714 *
4715 * PARAMETERS :
4716 * @fFrameNumber: Identifier given by framework
4717 * @internalFN : Output parameter which will have the newly generated internal
4718 * entry
4719 *
4720 * RETURN : Error code
4721 *
4722 *==========================================================================*/
4723int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4724 uint32_t &internalFrameNumber)
4725{
4726 Mutex::Autolock lock(mRegistryLock);
4727 internalFrameNumber = _nextFreeInternalNumber++;
4728 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4729 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4730 purgeOldEntriesLocked();
4731 return NO_ERROR;
4732}
4733
4734/*===========================================================================
4735 * FUNCTION : generateStoreInternalFrameNumber
4736 *
4737 * DESCRIPTION: Method to associate a new internal request number independent
4738 * of any associate with framework requests
4739 *
4740 * PARAMETERS :
4741 * @internalFrame#: Output parameter which will have the newly generated internal
4742 *
4743 *
4744 * RETURN : Error code
4745 *
4746 *==========================================================================*/
4747int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4748{
4749 Mutex::Autolock lock(mRegistryLock);
4750 internalFrameNumber = _nextFreeInternalNumber++;
4751 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4752 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4753 purgeOldEntriesLocked();
4754 return NO_ERROR;
4755}
4756
4757/*===========================================================================
4758 * FUNCTION : getFrameworkFrameNumber
4759 *
4760 * DESCRIPTION: Method to query the framework framenumber given an internal #
4761 *
4762 * PARAMETERS :
4763 * @internalFrame#: Internal reference
4764 * @frameworkframenumber: Output parameter holding framework frame entry
4765 *
4766 * RETURN : Error code
4767 *
4768 *==========================================================================*/
4769int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4770 uint32_t &frameworkFrameNumber)
4771{
4772 Mutex::Autolock lock(mRegistryLock);
4773 auto itr = _register.find(internalFrameNumber);
4774 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004775 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004776 return -ENOENT;
4777 }
4778
4779 frameworkFrameNumber = itr->second;
4780 purgeOldEntriesLocked();
4781 return NO_ERROR;
4782}
Thierry Strudel3d639192016-09-09 11:52:26 -07004783
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004784status_t QCamera3HardwareInterface::fillPbStreamConfig(
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004785 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
4786 uint32_t streamIndex) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004787 if (config == nullptr) {
4788 LOGE("%s: config is null", __FUNCTION__);
4789 return BAD_VALUE;
4790 }
4791
4792 if (channel == nullptr) {
4793 LOGE("%s: channel is null", __FUNCTION__);
4794 return BAD_VALUE;
4795 }
4796
4797 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4798 if (stream == nullptr) {
4799 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4800 return NAME_NOT_FOUND;
4801 }
4802
4803 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4804 if (streamInfo == nullptr) {
4805 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4806 return NAME_NOT_FOUND;
4807 }
4808
4809 config->id = pbStreamId;
4810 config->image.width = streamInfo->dim.width;
4811 config->image.height = streamInfo->dim.height;
4812 config->image.padding = 0;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004813
4814 int bytesPerPixel = 0;
4815
4816 switch (streamInfo->fmt) {
4817 case CAM_FORMAT_YUV_420_NV21:
4818 config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
4819 bytesPerPixel = 1;
4820 break;
4821 case CAM_FORMAT_YUV_420_NV12:
4822 case CAM_FORMAT_YUV_420_NV12_VENUS:
4823 config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
4824 bytesPerPixel = 1;
4825 break;
4826 default:
4827 ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
4828 return BAD_VALUE;
4829 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004830
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004831 uint32_t totalPlaneSize = 0;
4832
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004833 // Fill plane information.
4834 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4835 pbcamera::PlaneConfiguration plane;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004836 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004837 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4838 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004839
4840 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004841 }
4842
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004843 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004844 return OK;
4845}
4846
Thierry Strudel3d639192016-09-09 11:52:26 -07004847/*===========================================================================
4848 * FUNCTION : processCaptureRequest
4849 *
4850 * DESCRIPTION: process a capture request from camera service
4851 *
4852 * PARAMETERS :
4853 * @request : request from framework to process
4854 *
4855 * RETURN :
4856 *
4857 *==========================================================================*/
4858int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004859 camera3_capture_request_t *request,
4860 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004861{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004862 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004863 int rc = NO_ERROR;
4864 int32_t request_id;
4865 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004866 bool isVidBufRequested = false;
4867 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004868 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004869
4870 pthread_mutex_lock(&mMutex);
4871
4872 // Validate current state
4873 switch (mState) {
4874 case CONFIGURED:
4875 case STARTED:
4876 /* valid state */
4877 break;
4878
4879 case ERROR:
4880 pthread_mutex_unlock(&mMutex);
4881 handleCameraDeviceError();
4882 return -ENODEV;
4883
4884 default:
4885 LOGE("Invalid state %d", mState);
4886 pthread_mutex_unlock(&mMutex);
4887 return -ENODEV;
4888 }
4889
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004890 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004891 if (rc != NO_ERROR) {
4892 LOGE("incoming request is not valid");
4893 pthread_mutex_unlock(&mMutex);
4894 return rc;
4895 }
4896
4897 meta = request->settings;
4898
4899 // For first capture request, send capture intent, and
4900 // stream on all streams
4901 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004902 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004903 // send an unconfigure to the backend so that the isp
4904 // resources are deallocated
4905 if (!mFirstConfiguration) {
4906 cam_stream_size_info_t stream_config_info;
4907 int32_t hal_version = CAM_HAL_V3;
4908 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4909 stream_config_info.buffer_info.min_buffers =
4910 MIN_INFLIGHT_REQUESTS;
4911 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004912 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004913 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004914 clear_metadata_buffer(mParameters);
4915 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4916 CAM_INTF_PARM_HAL_VERSION, hal_version);
4917 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4918 CAM_INTF_META_STREAM_INFO, stream_config_info);
4919 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4920 mParameters);
4921 if (rc < 0) {
4922 LOGE("set_parms for unconfigure failed");
4923 pthread_mutex_unlock(&mMutex);
4924 return rc;
4925 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004926
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004928 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004929 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004930 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004931 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004932 property_get("persist.camera.is_type", is_type_value, "4");
4933 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4934 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4935 property_get("persist.camera.is_type_preview", is_type_value, "4");
4936 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4937 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004938
4939 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4940 int32_t hal_version = CAM_HAL_V3;
4941 uint8_t captureIntent =
4942 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4943 mCaptureIntent = captureIntent;
4944 clear_metadata_buffer(mParameters);
4945 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4946 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4947 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004948 if (mFirstConfiguration) {
4949 // configure instant AEC
4950 // Instant AEC is a session based parameter and it is needed only
4951 // once per complete session after open camera.
4952 // i.e. This is set only once for the first capture request, after open camera.
4953 setInstantAEC(meta);
4954 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004955 uint8_t fwkVideoStabMode=0;
4956 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4957 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4958 }
4959
Xue Tuecac74e2017-04-17 13:58:15 -07004960 // If EIS setprop is enabled then only turn it on for video/preview
4961 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004962 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004963 int32_t vsMode;
4964 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4965 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4966 rc = BAD_VALUE;
4967 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004968 LOGD("setEis %d", setEis);
4969 bool eis3Supported = false;
4970 size_t count = IS_TYPE_MAX;
4971 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4972 for (size_t i = 0; i < count; i++) {
4973 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4974 eis3Supported = true;
4975 break;
4976 }
4977 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004978
4979 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004980 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004981 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4982 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004983 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4984 is_type = isTypePreview;
4985 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4986 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4987 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004988 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004989 } else {
4990 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004991 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004992 } else {
4993 is_type = IS_TYPE_NONE;
4994 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004995 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004996 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004997 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4998 }
4999 }
5000
5001 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5002 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
5003
Thierry Strudel54dc9782017-02-15 12:12:10 -08005004 //Disable tintless only if the property is set to 0
5005 memset(prop, 0, sizeof(prop));
5006 property_get("persist.camera.tintless.enable", prop, "1");
5007 int32_t tintless_value = atoi(prop);
5008
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5010 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08005011
Thierry Strudel3d639192016-09-09 11:52:26 -07005012 //Disable CDS for HFR mode or if DIS/EIS is on.
5013 //CDS is a session parameter in the backend/ISP, so need to be set/reset
5014 //after every configure_stream
5015 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
5016 (m_bIsVideo)) {
5017 int32_t cds = CAM_CDS_MODE_OFF;
5018 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5019 CAM_INTF_PARM_CDS_MODE, cds))
5020 LOGE("Failed to disable CDS for HFR mode");
5021
5022 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005023
5024 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
5025 uint8_t* use_av_timer = NULL;
5026
5027 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005028 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005029 use_av_timer = &m_debug_avtimer;
Binhao Lin09245482017-08-31 18:25:29 -07005030 m_bAVTimerEnabled = true;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005031 }
5032 else{
5033 use_av_timer =
5034 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005035 if (use_av_timer) {
Binhao Lin09245482017-08-31 18:25:29 -07005036 m_bAVTimerEnabled = true;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005037 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
5038 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005039 }
5040
5041 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
5042 rc = BAD_VALUE;
5043 }
5044 }
5045
Thierry Strudel3d639192016-09-09 11:52:26 -07005046 setMobicat();
5047
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005048 uint8_t nrMode = 0;
5049 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5050 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5051 }
5052
Thierry Strudel3d639192016-09-09 11:52:26 -07005053 /* Set fps and hfr mode while sending meta stream info so that sensor
5054 * can configure appropriate streaming mode */
5055 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005056 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5057 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005058 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5059 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005060 if (rc == NO_ERROR) {
5061 int32_t max_fps =
5062 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005063 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005064 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5065 }
5066 /* For HFR, more buffers are dequeued upfront to improve the performance */
5067 if (mBatchSize) {
5068 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5069 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5070 }
5071 }
5072 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005073 LOGE("setHalFpsRange failed");
5074 }
5075 }
5076 if (meta.exists(ANDROID_CONTROL_MODE)) {
5077 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5078 rc = extractSceneMode(meta, metaMode, mParameters);
5079 if (rc != NO_ERROR) {
5080 LOGE("extractSceneMode failed");
5081 }
5082 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005083 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005084
Thierry Strudel04e026f2016-10-10 11:27:36 -07005085 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5086 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5087 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5088 rc = setVideoHdrMode(mParameters, vhdr);
5089 if (rc != NO_ERROR) {
5090 LOGE("setVideoHDR is failed");
5091 }
5092 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005093
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005094 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005095 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005096 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005097 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5099 sensorModeFullFov)) {
5100 rc = BAD_VALUE;
5101 }
5102 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005103 //TODO: validate the arguments, HSV scenemode should have only the
5104 //advertised fps ranges
5105
5106 /*set the capture intent, hal version, tintless, stream info,
5107 *and disenable parameters to the backend*/
5108 LOGD("set_parms META_STREAM_INFO " );
5109 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005110 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5111 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005112 mStreamConfigInfo.type[i],
5113 mStreamConfigInfo.stream_sizes[i].width,
5114 mStreamConfigInfo.stream_sizes[i].height,
5115 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005116 mStreamConfigInfo.format[i],
5117 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005118 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005119
Thierry Strudel3d639192016-09-09 11:52:26 -07005120 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5121 mParameters);
5122 if (rc < 0) {
5123 LOGE("set_parms failed for hal version, stream info");
5124 }
5125
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005126 cam_sensor_mode_info_t sensorModeInfo = {};
5127 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005128 if (rc != NO_ERROR) {
5129 LOGE("Failed to get sensor output size");
5130 pthread_mutex_unlock(&mMutex);
5131 goto error_exit;
5132 }
5133
5134 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5135 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005136 sensorModeInfo.active_array_size.width,
5137 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005138
5139 /* Set batchmode before initializing channel. Since registerBuffer
5140 * internally initializes some of the channels, better set batchmode
5141 * even before first register buffer */
5142 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5143 it != mStreamInfo.end(); it++) {
5144 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5145 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5146 && mBatchSize) {
5147 rc = channel->setBatchSize(mBatchSize);
5148 //Disable per frame map unmap for HFR/batchmode case
5149 rc |= channel->setPerFrameMapUnmap(false);
5150 if (NO_ERROR != rc) {
5151 LOGE("Channel init failed %d", rc);
5152 pthread_mutex_unlock(&mMutex);
5153 goto error_exit;
5154 }
5155 }
5156 }
5157
5158 //First initialize all streams
5159 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5160 it != mStreamInfo.end(); it++) {
5161 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005162
5163 /* Initial value of NR mode is needed before stream on */
5164 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005165 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5166 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005167 setEis) {
5168 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5169 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5170 is_type = mStreamConfigInfo.is_type[i];
5171 break;
5172 }
5173 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005174 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005175 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005176 rc = channel->initialize(IS_TYPE_NONE);
5177 }
5178 if (NO_ERROR != rc) {
5179 LOGE("Channel initialization failed %d", rc);
5180 pthread_mutex_unlock(&mMutex);
5181 goto error_exit;
5182 }
5183 }
5184
5185 if (mRawDumpChannel) {
5186 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5187 if (rc != NO_ERROR) {
5188 LOGE("Error: Raw Dump Channel init failed");
5189 pthread_mutex_unlock(&mMutex);
5190 goto error_exit;
5191 }
5192 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005193 if (mHdrPlusRawSrcChannel) {
5194 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5195 if (rc != NO_ERROR) {
5196 LOGE("Error: HDR+ RAW Source Channel init failed");
5197 pthread_mutex_unlock(&mMutex);
5198 goto error_exit;
5199 }
5200 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005201 if (mSupportChannel) {
5202 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5203 if (rc < 0) {
5204 LOGE("Support channel initialization failed");
5205 pthread_mutex_unlock(&mMutex);
5206 goto error_exit;
5207 }
5208 }
5209 if (mAnalysisChannel) {
5210 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5211 if (rc < 0) {
5212 LOGE("Analysis channel initialization failed");
5213 pthread_mutex_unlock(&mMutex);
5214 goto error_exit;
5215 }
5216 }
5217 if (mDummyBatchChannel) {
5218 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5219 if (rc < 0) {
5220 LOGE("mDummyBatchChannel setBatchSize failed");
5221 pthread_mutex_unlock(&mMutex);
5222 goto error_exit;
5223 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005224 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005225 if (rc < 0) {
5226 LOGE("mDummyBatchChannel initialization failed");
5227 pthread_mutex_unlock(&mMutex);
5228 goto error_exit;
5229 }
5230 }
5231
5232 // Set bundle info
5233 rc = setBundleInfo();
5234 if (rc < 0) {
5235 LOGE("setBundleInfo failed %d", rc);
5236 pthread_mutex_unlock(&mMutex);
5237 goto error_exit;
5238 }
5239
5240 //update settings from app here
5241 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5242 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5243 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5244 }
5245 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5246 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5247 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5248 }
5249 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5250 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5251 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5252
5253 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5254 (mLinkedCameraId != mCameraId) ) {
5255 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5256 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005257 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005258 goto error_exit;
5259 }
5260 }
5261
5262 // add bundle related cameras
5263 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5264 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005265 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5266 &m_pDualCamCmdPtr->bundle_info;
5267 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005268 if (mIsDeviceLinked)
5269 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5270 else
5271 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5272
5273 pthread_mutex_lock(&gCamLock);
5274
5275 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5276 LOGE("Dualcam: Invalid Session Id ");
5277 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005278 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005279 goto error_exit;
5280 }
5281
5282 if (mIsMainCamera == 1) {
5283 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5284 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005285 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005286 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005287 // related session id should be session id of linked session
5288 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5289 } else {
5290 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5291 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005292 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005293 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005294 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5295 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005296 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005297 pthread_mutex_unlock(&gCamLock);
5298
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005299 rc = mCameraHandle->ops->set_dual_cam_cmd(
5300 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005301 if (rc < 0) {
5302 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005303 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005304 goto error_exit;
5305 }
5306 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005307 goto no_error;
5308error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005309 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005310 return rc;
5311no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005312 mWokenUpByDaemon = false;
5313 mPendingLiveRequest = 0;
5314 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005315 }
5316
5317 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005318 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005319
5320 if (mFlushPerf) {
5321 //we cannot accept any requests during flush
5322 LOGE("process_capture_request cannot proceed during flush");
5323 pthread_mutex_unlock(&mMutex);
5324 return NO_ERROR; //should return an error
5325 }
5326
5327 if (meta.exists(ANDROID_REQUEST_ID)) {
5328 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5329 mCurrentRequestId = request_id;
5330 LOGD("Received request with id: %d", request_id);
5331 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5332 LOGE("Unable to find request id field, \
5333 & no previous id available");
5334 pthread_mutex_unlock(&mMutex);
5335 return NAME_NOT_FOUND;
5336 } else {
5337 LOGD("Re-using old request id");
5338 request_id = mCurrentRequestId;
5339 }
5340
5341 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5342 request->num_output_buffers,
5343 request->input_buffer,
5344 frameNumber);
5345 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005346 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005347 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005348 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005349 uint32_t snapshotStreamId = 0;
5350 for (size_t i = 0; i < request->num_output_buffers; i++) {
5351 const camera3_stream_buffer_t& output = request->output_buffers[i];
5352 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5353
Emilian Peev7650c122017-01-19 08:24:33 -08005354 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5355 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005356 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005357 blob_request = 1;
5358 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5359 }
5360
5361 if (output.acquire_fence != -1) {
5362 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5363 close(output.acquire_fence);
5364 if (rc != OK) {
5365 LOGE("sync wait failed %d", rc);
5366 pthread_mutex_unlock(&mMutex);
5367 return rc;
5368 }
5369 }
5370
Emilian Peev0f3c3162017-03-15 12:57:46 +00005371 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5372 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005373 depthRequestPresent = true;
5374 continue;
5375 }
5376
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005377 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005378 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005379
5380 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5381 isVidBufRequested = true;
5382 }
5383 }
5384
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005385 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5386 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5387 itr++) {
5388 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5389 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5390 channel->getStreamID(channel->getStreamTypeMask());
5391
5392 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5393 isVidBufRequested = true;
5394 }
5395 }
5396
Thierry Strudel3d639192016-09-09 11:52:26 -07005397 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005398 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005399 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005400 }
5401 if (blob_request && mRawDumpChannel) {
5402 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005403 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005404 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005405 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005406 }
5407
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005408 {
5409 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5410 // Request a RAW buffer if
5411 // 1. mHdrPlusRawSrcChannel is valid.
5412 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5413 // 3. There is no pending HDR+ request.
5414 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5415 mHdrPlusPendingRequests.size() == 0) {
5416 streamsArray.stream_request[streamsArray.num_streams].streamID =
5417 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5418 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5419 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005420 }
5421
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005422 //extract capture intent
5423 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5424 mCaptureIntent =
5425 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5426 }
5427
5428 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5429 mCacMode =
5430 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5431 }
5432
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005433 uint8_t requestedLensShadingMapMode;
5434 // Get the shading map mode.
5435 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5436 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5437 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5438 } else {
5439 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5440 }
5441
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005442 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005443 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005444
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005445 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005446 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005447 // If this request has a still capture intent, try to submit an HDR+ request.
5448 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5449 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5450 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5451 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005452 }
5453
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005454 if (hdrPlusRequest) {
5455 // For a HDR+ request, just set the frame parameters.
5456 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5457 if (rc < 0) {
5458 LOGE("fail to set frame parameters");
5459 pthread_mutex_unlock(&mMutex);
5460 return rc;
5461 }
5462 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005463 /* Parse the settings:
5464 * - For every request in NORMAL MODE
5465 * - For every request in HFR mode during preview only case
5466 * - For first request of every batch in HFR mode during video
5467 * recording. In batchmode the same settings except frame number is
5468 * repeated in each request of the batch.
5469 */
5470 if (!mBatchSize ||
5471 (mBatchSize && !isVidBufRequested) ||
5472 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005473 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005474 if (rc < 0) {
5475 LOGE("fail to set frame parameters");
5476 pthread_mutex_unlock(&mMutex);
5477 return rc;
5478 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005479
5480 {
5481 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5482 // will be reported in result metadata.
5483 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5484 if (mHdrPlusModeEnabled) {
5485 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5486 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5487 }
5488 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005489 }
5490 /* For batchMode HFR, setFrameParameters is not called for every
5491 * request. But only frame number of the latest request is parsed.
5492 * Keep track of first and last frame numbers in a batch so that
5493 * metadata for the frame numbers of batch can be duplicated in
5494 * handleBatchMetadta */
5495 if (mBatchSize) {
5496 if (!mToBeQueuedVidBufs) {
5497 //start of the batch
5498 mFirstFrameNumberInBatch = request->frame_number;
5499 }
5500 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5501 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5502 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005503 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005504 return BAD_VALUE;
5505 }
5506 }
5507 if (mNeedSensorRestart) {
5508 /* Unlock the mutex as restartSensor waits on the channels to be
5509 * stopped, which in turn calls stream callback functions -
5510 * handleBufferWithLock and handleMetadataWithLock */
5511 pthread_mutex_unlock(&mMutex);
5512 rc = dynamicUpdateMetaStreamInfo();
5513 if (rc != NO_ERROR) {
5514 LOGE("Restarting the sensor failed");
5515 return BAD_VALUE;
5516 }
5517 mNeedSensorRestart = false;
5518 pthread_mutex_lock(&mMutex);
5519 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005520 if(mResetInstantAEC) {
5521 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5522 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5523 mResetInstantAEC = false;
5524 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005525 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005526 if (request->input_buffer->acquire_fence != -1) {
5527 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5528 close(request->input_buffer->acquire_fence);
5529 if (rc != OK) {
5530 LOGE("input buffer sync wait failed %d", rc);
5531 pthread_mutex_unlock(&mMutex);
5532 return rc;
5533 }
5534 }
5535 }
5536
5537 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5538 mLastCustIntentFrmNum = frameNumber;
5539 }
5540 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005541 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005542 pendingRequestIterator latestRequest;
5543 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005544 pendingRequest.num_buffers = depthRequestPresent ?
5545 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005546 pendingRequest.request_id = request_id;
5547 pendingRequest.blob_request = blob_request;
5548 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005549 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005550 if (request->input_buffer) {
5551 pendingRequest.input_buffer =
5552 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5553 *(pendingRequest.input_buffer) = *(request->input_buffer);
5554 pInputBuffer = pendingRequest.input_buffer;
5555 } else {
5556 pendingRequest.input_buffer = NULL;
5557 pInputBuffer = NULL;
5558 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005559 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005560
5561 pendingRequest.pipeline_depth = 0;
5562 pendingRequest.partial_result_cnt = 0;
5563 extractJpegMetadata(mCurJpegMeta, request);
5564 pendingRequest.jpegMetadata = mCurJpegMeta;
5565 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005566 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005567 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
Shuzhen Wang77b049a2017-08-30 12:24:36 -07005568 pendingRequest.hybrid_ae_enable =
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005569 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5570 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005571
Samuel Ha68ba5172016-12-15 18:41:12 -08005572 /* DevCamDebug metadata processCaptureRequest */
5573 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5574 mDevCamDebugMetaEnable =
5575 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5576 }
5577 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5578 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005579
5580 //extract CAC info
5581 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5582 mCacMode =
5583 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5584 }
5585 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005586 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005587 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5588 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005589
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005590 // extract enableZsl info
5591 if (gExposeEnableZslKey) {
5592 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5593 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5594 mZslEnabled = pendingRequest.enableZsl;
5595 } else {
5596 pendingRequest.enableZsl = mZslEnabled;
5597 }
5598 }
5599
Thierry Strudel3d639192016-09-09 11:52:26 -07005600 PendingBuffersInRequest bufsForCurRequest;
5601 bufsForCurRequest.frame_number = frameNumber;
5602 // Mark current timestamp for the new request
5603 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Binhao Lin09245482017-08-31 18:25:29 -07005604 bufsForCurRequest.av_timestamp = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005605 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005606
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005607 if (hdrPlusRequest) {
5608 // Save settings for this request.
5609 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5610 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5611
5612 // Add to pending HDR+ request queue.
5613 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5614 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5615
5616 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5617 }
5618
Thierry Strudel3d639192016-09-09 11:52:26 -07005619 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005620 if ((request->output_buffers[i].stream->data_space ==
5621 HAL_DATASPACE_DEPTH) &&
5622 (HAL_PIXEL_FORMAT_BLOB ==
5623 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005624 continue;
5625 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005626 RequestedBufferInfo requestedBuf;
5627 memset(&requestedBuf, 0, sizeof(requestedBuf));
5628 requestedBuf.stream = request->output_buffers[i].stream;
5629 requestedBuf.buffer = NULL;
5630 pendingRequest.buffers.push_back(requestedBuf);
5631
5632 // Add to buffer handle the pending buffers list
5633 PendingBufferInfo bufferInfo;
5634 bufferInfo.buffer = request->output_buffers[i].buffer;
5635 bufferInfo.stream = request->output_buffers[i].stream;
5636 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5637 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5638 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5639 frameNumber, bufferInfo.buffer,
5640 channel->getStreamTypeMask(), bufferInfo.stream->format);
5641 }
5642 // Add this request packet into mPendingBuffersMap
5643 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5644 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5645 mPendingBuffersMap.get_num_overall_buffers());
5646
5647 latestRequest = mPendingRequestsList.insert(
5648 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005649
5650 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5651 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005652 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005653 for (size_t i = 0; i < request->num_output_buffers; i++) {
5654 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5655 }
5656
Thierry Strudel3d639192016-09-09 11:52:26 -07005657 if(mFlush) {
5658 LOGI("mFlush is true");
5659 pthread_mutex_unlock(&mMutex);
5660 return NO_ERROR;
5661 }
5662
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005663 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5664 // channel.
5665 if (!hdrPlusRequest) {
5666 int indexUsed;
5667 // Notify metadata channel we receive a request
5668 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005669
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005670 if(request->input_buffer != NULL){
5671 LOGD("Input request, frame_number %d", frameNumber);
5672 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5673 if (NO_ERROR != rc) {
5674 LOGE("fail to set reproc parameters");
5675 pthread_mutex_unlock(&mMutex);
5676 return rc;
5677 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005678 }
5679
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005680 // Call request on other streams
5681 uint32_t streams_need_metadata = 0;
5682 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5683 for (size_t i = 0; i < request->num_output_buffers; i++) {
5684 const camera3_stream_buffer_t& output = request->output_buffers[i];
5685 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5686
5687 if (channel == NULL) {
5688 LOGW("invalid channel pointer for stream");
5689 continue;
5690 }
5691
5692 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5693 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5694 output.buffer, request->input_buffer, frameNumber);
5695 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005696 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005697 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5698 if (rc < 0) {
5699 LOGE("Fail to request on picture channel");
5700 pthread_mutex_unlock(&mMutex);
5701 return rc;
5702 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005703 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005704 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5705 assert(NULL != mDepthChannel);
5706 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005707
Emilian Peev7650c122017-01-19 08:24:33 -08005708 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5709 if (rc < 0) {
5710 LOGE("Fail to map on depth buffer");
5711 pthread_mutex_unlock(&mMutex);
5712 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005713 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005714 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005715 } else {
5716 LOGD("snapshot request with buffer %p, frame_number %d",
5717 output.buffer, frameNumber);
5718 if (!request->settings) {
5719 rc = channel->request(output.buffer, frameNumber,
5720 NULL, mPrevParameters, indexUsed);
5721 } else {
5722 rc = channel->request(output.buffer, frameNumber,
5723 NULL, mParameters, indexUsed);
5724 }
5725 if (rc < 0) {
5726 LOGE("Fail to request on picture channel");
5727 pthread_mutex_unlock(&mMutex);
5728 return rc;
5729 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005730
Emilian Peev7650c122017-01-19 08:24:33 -08005731 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5732 uint32_t j = 0;
5733 for (j = 0; j < streamsArray.num_streams; j++) {
5734 if (streamsArray.stream_request[j].streamID == streamId) {
5735 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5736 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5737 else
5738 streamsArray.stream_request[j].buf_index = indexUsed;
5739 break;
5740 }
5741 }
5742 if (j == streamsArray.num_streams) {
5743 LOGE("Did not find matching stream to update index");
5744 assert(0);
5745 }
5746
5747 pendingBufferIter->need_metadata = true;
5748 streams_need_metadata++;
5749 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005750 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005751 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5752 bool needMetadata = false;
5753 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5754 rc = yuvChannel->request(output.buffer, frameNumber,
5755 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5756 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005757 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005758 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005759 pthread_mutex_unlock(&mMutex);
5760 return rc;
5761 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005762
5763 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5764 uint32_t j = 0;
5765 for (j = 0; j < streamsArray.num_streams; j++) {
5766 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005767 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5768 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5769 else
5770 streamsArray.stream_request[j].buf_index = indexUsed;
5771 break;
5772 }
5773 }
5774 if (j == streamsArray.num_streams) {
5775 LOGE("Did not find matching stream to update index");
5776 assert(0);
5777 }
5778
5779 pendingBufferIter->need_metadata = needMetadata;
5780 if (needMetadata)
5781 streams_need_metadata += 1;
5782 LOGD("calling YUV channel request, need_metadata is %d",
5783 needMetadata);
5784 } else {
5785 LOGD("request with buffer %p, frame_number %d",
5786 output.buffer, frameNumber);
5787
5788 rc = channel->request(output.buffer, frameNumber, indexUsed);
5789
5790 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5791 uint32_t j = 0;
5792 for (j = 0; j < streamsArray.num_streams; j++) {
5793 if (streamsArray.stream_request[j].streamID == streamId) {
5794 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5795 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5796 else
5797 streamsArray.stream_request[j].buf_index = indexUsed;
5798 break;
5799 }
5800 }
5801 if (j == streamsArray.num_streams) {
5802 LOGE("Did not find matching stream to update index");
5803 assert(0);
5804 }
5805
5806 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5807 && mBatchSize) {
5808 mToBeQueuedVidBufs++;
5809 if (mToBeQueuedVidBufs == mBatchSize) {
5810 channel->queueBatchBuf();
5811 }
5812 }
5813 if (rc < 0) {
5814 LOGE("request failed");
5815 pthread_mutex_unlock(&mMutex);
5816 return rc;
5817 }
5818 }
5819 pendingBufferIter++;
5820 }
5821
5822 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5823 itr++) {
5824 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5825
5826 if (channel == NULL) {
5827 LOGE("invalid channel pointer for stream");
5828 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005829 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005830 return BAD_VALUE;
5831 }
5832
5833 InternalRequest requestedStream;
5834 requestedStream = (*itr);
5835
5836
5837 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5838 LOGD("snapshot request internally input buffer %p, frame_number %d",
5839 request->input_buffer, frameNumber);
5840 if(request->input_buffer != NULL){
5841 rc = channel->request(NULL, frameNumber,
5842 pInputBuffer, &mReprocMeta, indexUsed, true,
5843 requestedStream.meteringOnly);
5844 if (rc < 0) {
5845 LOGE("Fail to request on picture channel");
5846 pthread_mutex_unlock(&mMutex);
5847 return rc;
5848 }
5849 } else {
5850 LOGD("snapshot request with frame_number %d", frameNumber);
5851 if (!request->settings) {
5852 rc = channel->request(NULL, frameNumber,
5853 NULL, mPrevParameters, indexUsed, true,
5854 requestedStream.meteringOnly);
5855 } else {
5856 rc = channel->request(NULL, frameNumber,
5857 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5858 }
5859 if (rc < 0) {
5860 LOGE("Fail to request on picture channel");
5861 pthread_mutex_unlock(&mMutex);
5862 return rc;
5863 }
5864
5865 if ((*itr).meteringOnly != 1) {
5866 requestedStream.need_metadata = 1;
5867 streams_need_metadata++;
5868 }
5869 }
5870
5871 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5872 uint32_t j = 0;
5873 for (j = 0; j < streamsArray.num_streams; j++) {
5874 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005875 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5876 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5877 else
5878 streamsArray.stream_request[j].buf_index = indexUsed;
5879 break;
5880 }
5881 }
5882 if (j == streamsArray.num_streams) {
5883 LOGE("Did not find matching stream to update index");
5884 assert(0);
5885 }
5886
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005887 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005888 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005889 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005890 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005891 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005892 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005893 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005894 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005895
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005896 //If 2 streams have need_metadata set to true, fail the request, unless
5897 //we copy/reference count the metadata buffer
5898 if (streams_need_metadata > 1) {
5899 LOGE("not supporting request in which two streams requires"
5900 " 2 HAL metadata for reprocessing");
5901 pthread_mutex_unlock(&mMutex);
5902 return -EINVAL;
5903 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005904
Emilian Peev656e4fa2017-06-02 16:47:04 +01005905 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5906 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5907 if (depthRequestPresent && mDepthChannel) {
5908 if (request->settings) {
5909 camera_metadata_ro_entry entry;
5910 if (find_camera_metadata_ro_entry(request->settings,
5911 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5912 if (entry.data.u8[0]) {
5913 pdafEnable = CAM_PD_DATA_ENABLED;
5914 } else {
5915 pdafEnable = CAM_PD_DATA_SKIP;
5916 }
5917 mDepthCloudMode = pdafEnable;
5918 } else {
5919 pdafEnable = mDepthCloudMode;
5920 }
5921 } else {
5922 pdafEnable = mDepthCloudMode;
5923 }
5924 }
5925
Emilian Peev7650c122017-01-19 08:24:33 -08005926 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5927 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5928 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5929 pthread_mutex_unlock(&mMutex);
5930 return BAD_VALUE;
5931 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005932
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005933 if (request->input_buffer == NULL) {
5934 /* Set the parameters to backend:
5935 * - For every request in NORMAL MODE
5936 * - For every request in HFR mode during preview only case
5937 * - Once every batch in HFR mode during video recording
5938 */
5939 if (!mBatchSize ||
5940 (mBatchSize && !isVidBufRequested) ||
5941 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5942 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5943 mBatchSize, isVidBufRequested,
5944 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005945
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005946 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5947 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5948 uint32_t m = 0;
5949 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5950 if (streamsArray.stream_request[k].streamID ==
5951 mBatchedStreamsArray.stream_request[m].streamID)
5952 break;
5953 }
5954 if (m == mBatchedStreamsArray.num_streams) {
5955 mBatchedStreamsArray.stream_request\
5956 [mBatchedStreamsArray.num_streams].streamID =
5957 streamsArray.stream_request[k].streamID;
5958 mBatchedStreamsArray.stream_request\
5959 [mBatchedStreamsArray.num_streams].buf_index =
5960 streamsArray.stream_request[k].buf_index;
5961 mBatchedStreamsArray.num_streams =
5962 mBatchedStreamsArray.num_streams + 1;
5963 }
5964 }
5965 streamsArray = mBatchedStreamsArray;
5966 }
5967 /* Update stream id of all the requested buffers */
5968 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5969 streamsArray)) {
5970 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005971 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005972 return BAD_VALUE;
5973 }
5974
5975 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5976 mParameters);
5977 if (rc < 0) {
5978 LOGE("set_parms failed");
5979 }
5980 /* reset to zero coz, the batch is queued */
5981 mToBeQueuedVidBufs = 0;
5982 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5983 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5984 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005985 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5986 uint32_t m = 0;
5987 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5988 if (streamsArray.stream_request[k].streamID ==
5989 mBatchedStreamsArray.stream_request[m].streamID)
5990 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005991 }
5992 if (m == mBatchedStreamsArray.num_streams) {
5993 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5994 streamID = streamsArray.stream_request[k].streamID;
5995 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5996 buf_index = streamsArray.stream_request[k].buf_index;
5997 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5998 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005999 }
6000 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006001 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006002
6003 // Start all streams after the first setting is sent, so that the
6004 // setting can be applied sooner: (0 + apply_delay)th frame.
6005 if (mState == CONFIGURED && mChannelHandle) {
6006 //Then start them.
6007 LOGH("Start META Channel");
6008 rc = mMetadataChannel->start();
6009 if (rc < 0) {
6010 LOGE("META channel start failed");
6011 pthread_mutex_unlock(&mMutex);
6012 return rc;
6013 }
6014
6015 if (mAnalysisChannel) {
6016 rc = mAnalysisChannel->start();
6017 if (rc < 0) {
6018 LOGE("Analysis channel start failed");
6019 mMetadataChannel->stop();
6020 pthread_mutex_unlock(&mMutex);
6021 return rc;
6022 }
6023 }
6024
6025 if (mSupportChannel) {
6026 rc = mSupportChannel->start();
6027 if (rc < 0) {
6028 LOGE("Support channel start failed");
6029 mMetadataChannel->stop();
6030 /* Although support and analysis are mutually exclusive today
6031 adding it in anycase for future proofing */
6032 if (mAnalysisChannel) {
6033 mAnalysisChannel->stop();
6034 }
6035 pthread_mutex_unlock(&mMutex);
6036 return rc;
6037 }
6038 }
6039 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6040 it != mStreamInfo.end(); it++) {
6041 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
6042 LOGH("Start Processing Channel mask=%d",
6043 channel->getStreamTypeMask());
6044 rc = channel->start();
6045 if (rc < 0) {
6046 LOGE("channel start failed");
6047 pthread_mutex_unlock(&mMutex);
6048 return rc;
6049 }
6050 }
6051
6052 if (mRawDumpChannel) {
6053 LOGD("Starting raw dump stream");
6054 rc = mRawDumpChannel->start();
6055 if (rc != NO_ERROR) {
6056 LOGE("Error Starting Raw Dump Channel");
6057 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6058 it != mStreamInfo.end(); it++) {
6059 QCamera3Channel *channel =
6060 (QCamera3Channel *)(*it)->stream->priv;
6061 LOGH("Stopping Processing Channel mask=%d",
6062 channel->getStreamTypeMask());
6063 channel->stop();
6064 }
6065 if (mSupportChannel)
6066 mSupportChannel->stop();
6067 if (mAnalysisChannel) {
6068 mAnalysisChannel->stop();
6069 }
6070 mMetadataChannel->stop();
6071 pthread_mutex_unlock(&mMutex);
6072 return rc;
6073 }
6074 }
6075
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006076 // Configure modules for stream on.
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006077 rc = startChannelLocked();
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006078 if (rc != NO_ERROR) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006079 LOGE("startChannelLocked failed %d", rc);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006080 pthread_mutex_unlock(&mMutex);
6081 return rc;
6082 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006083 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006084 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006085 }
6086
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006087 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chenjie Luo4a761802017-06-13 17:35:54 +00006088 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006089 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006090 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006091 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6092 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6093 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6094 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006095
6096 if (isSessionHdrPlusModeCompatible()) {
6097 rc = enableHdrPlusModeLocked();
6098 if (rc != OK) {
6099 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6100 pthread_mutex_unlock(&mMutex);
6101 return rc;
6102 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006103 }
6104
6105 mFirstPreviewIntentSeen = true;
6106 }
6107 }
6108
Thierry Strudel3d639192016-09-09 11:52:26 -07006109 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6110
6111 mState = STARTED;
6112 // Added a timed condition wait
6113 struct timespec ts;
6114 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006115 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006116 if (rc < 0) {
6117 isValidTimeout = 0;
6118 LOGE("Error reading the real time clock!!");
6119 }
6120 else {
6121 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006122 int64_t timeout = 5;
6123 {
6124 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6125 // If there is a pending HDR+ request, the following requests may be blocked until the
6126 // HDR+ request is done. So allow a longer timeout.
6127 if (mHdrPlusPendingRequests.size() > 0) {
6128 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6129 }
6130 }
6131 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006132 }
6133 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006134 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006135 (mState != ERROR) && (mState != DEINIT)) {
6136 if (!isValidTimeout) {
6137 LOGD("Blocking on conditional wait");
6138 pthread_cond_wait(&mRequestCond, &mMutex);
6139 }
6140 else {
6141 LOGD("Blocking on timed conditional wait");
6142 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6143 if (rc == ETIMEDOUT) {
6144 rc = -ENODEV;
6145 LOGE("Unblocked on timeout!!!!");
6146 break;
6147 }
6148 }
6149 LOGD("Unblocked");
6150 if (mWokenUpByDaemon) {
6151 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006152 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006153 break;
6154 }
6155 }
6156 pthread_mutex_unlock(&mMutex);
6157
6158 return rc;
6159}
6160
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006161int32_t QCamera3HardwareInterface::startChannelLocked()
6162{
6163 // Configure modules for stream on.
6164 int32_t rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6165 mChannelHandle, /*start_sensor_streaming*/false);
6166 if (rc != NO_ERROR) {
6167 LOGE("start_channel failed %d", rc);
6168 return rc;
6169 }
6170
6171 {
6172 // Configure Easel for stream on.
6173 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6174
6175 // Now that sensor mode should have been selected, get the selected sensor mode
6176 // info.
6177 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6178 getCurrentSensorModeInfo(mSensorModeInfo);
6179
6180 if (EaselManagerClientOpened) {
6181 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
6182 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6183 /*enableCapture*/true);
6184 if (rc != OK) {
6185 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6186 mCameraId, mSensorModeInfo.op_pixel_clk);
6187 return rc;
6188 }
6189 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
6190 mEaselMipiStarted = true;
6191 }
6192 }
6193
6194 // Start sensor streaming.
6195 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6196 mChannelHandle);
6197 if (rc != NO_ERROR) {
6198 LOGE("start_sensor_stream_on failed %d", rc);
6199 return rc;
6200 }
6201
6202 return 0;
6203}
6204
6205void QCamera3HardwareInterface::stopChannelLocked(bool stopChannelImmediately)
6206{
6207 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6208 mChannelHandle, stopChannelImmediately);
6209
6210 {
6211 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6212 if (EaselManagerClientOpened && mEaselMipiStarted) {
6213 int32_t rc = gEaselManagerClient->stopMipi(mCameraId);
6214 if (rc != 0) {
6215 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
6216 }
6217 mEaselMipiStarted = false;
6218 }
6219 }
6220}
6221
Thierry Strudel3d639192016-09-09 11:52:26 -07006222/*===========================================================================
6223 * FUNCTION : dump
6224 *
6225 * DESCRIPTION:
6226 *
6227 * PARAMETERS :
6228 *
6229 *
6230 * RETURN :
6231 *==========================================================================*/
6232void QCamera3HardwareInterface::dump(int fd)
6233{
6234 pthread_mutex_lock(&mMutex);
6235 dprintf(fd, "\n Camera HAL3 information Begin \n");
6236
6237 dprintf(fd, "\nNumber of pending requests: %zu \n",
6238 mPendingRequestsList.size());
6239 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6240 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6241 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6242 for(pendingRequestIterator i = mPendingRequestsList.begin();
6243 i != mPendingRequestsList.end(); i++) {
6244 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6245 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6246 i->input_buffer);
6247 }
6248 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6249 mPendingBuffersMap.get_num_overall_buffers());
6250 dprintf(fd, "-------+------------------\n");
6251 dprintf(fd, " Frame | Stream type mask \n");
6252 dprintf(fd, "-------+------------------\n");
6253 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6254 for(auto &j : req.mPendingBufferList) {
6255 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6256 dprintf(fd, " %5d | %11d \n",
6257 req.frame_number, channel->getStreamTypeMask());
6258 }
6259 }
6260 dprintf(fd, "-------+------------------\n");
6261
6262 dprintf(fd, "\nPending frame drop list: %zu\n",
6263 mPendingFrameDropList.size());
6264 dprintf(fd, "-------+-----------\n");
6265 dprintf(fd, " Frame | Stream ID \n");
6266 dprintf(fd, "-------+-----------\n");
6267 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6268 i != mPendingFrameDropList.end(); i++) {
6269 dprintf(fd, " %5d | %9d \n",
6270 i->frame_number, i->stream_ID);
6271 }
6272 dprintf(fd, "-------+-----------\n");
6273
6274 dprintf(fd, "\n Camera HAL3 information End \n");
6275
6276 /* use dumpsys media.camera as trigger to send update debug level event */
6277 mUpdateDebugLevel = true;
6278 pthread_mutex_unlock(&mMutex);
6279 return;
6280}
6281
6282/*===========================================================================
6283 * FUNCTION : flush
6284 *
6285 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6286 * conditionally restarts channels
6287 *
6288 * PARAMETERS :
6289 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006290 * @ stopChannelImmediately: stop the channel immediately. This should be used
6291 * when device encountered an error and MIPI may has
6292 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006293 *
6294 * RETURN :
6295 * 0 on success
6296 * Error code on failure
6297 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006298int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006299{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006300 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006301 int32_t rc = NO_ERROR;
6302
6303 LOGD("Unblocking Process Capture Request");
6304 pthread_mutex_lock(&mMutex);
6305 mFlush = true;
6306 pthread_mutex_unlock(&mMutex);
6307
Chien-Yu Chen11c8edc2017-09-11 20:54:24 -07006308 // Disable HDR+ if it's enabled;
6309 {
6310 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6311 finishHdrPlusClientOpeningLocked(l);
6312 disableHdrPlusModeLocked();
6313 }
6314
Thierry Strudel3d639192016-09-09 11:52:26 -07006315 rc = stopAllChannels();
6316 // unlink of dualcam
6317 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006318 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6319 &m_pDualCamCmdPtr->bundle_info;
6320 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006321 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6322 pthread_mutex_lock(&gCamLock);
6323
6324 if (mIsMainCamera == 1) {
6325 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6326 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006327 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006328 // related session id should be session id of linked session
6329 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6330 } else {
6331 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6332 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006333 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006334 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6335 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006336 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006337 pthread_mutex_unlock(&gCamLock);
6338
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006339 rc = mCameraHandle->ops->set_dual_cam_cmd(
6340 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006341 if (rc < 0) {
6342 LOGE("Dualcam: Unlink failed, but still proceed to close");
6343 }
6344 }
6345
6346 if (rc < 0) {
6347 LOGE("stopAllChannels failed");
6348 return rc;
6349 }
6350 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006351 stopChannelLocked(stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006352 }
6353
6354 // Reset bundle info
6355 rc = setBundleInfo();
6356 if (rc < 0) {
6357 LOGE("setBundleInfo failed %d", rc);
6358 return rc;
6359 }
6360
6361 // Mutex Lock
6362 pthread_mutex_lock(&mMutex);
6363
6364 // Unblock process_capture_request
6365 mPendingLiveRequest = 0;
6366 pthread_cond_signal(&mRequestCond);
6367
6368 rc = notifyErrorForPendingRequests();
6369 if (rc < 0) {
6370 LOGE("notifyErrorForPendingRequests failed");
6371 pthread_mutex_unlock(&mMutex);
6372 return rc;
6373 }
6374
6375 mFlush = false;
6376
6377 // Start the Streams/Channels
6378 if (restartChannels) {
6379 rc = startAllChannels();
6380 if (rc < 0) {
6381 LOGE("startAllChannels failed");
6382 pthread_mutex_unlock(&mMutex);
6383 return rc;
6384 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006385 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006386 // Configure modules for stream on.
6387 rc = startChannelLocked();
Thierry Strudel2896d122017-02-23 19:18:03 -08006388 if (rc < 0) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006389 LOGE("startChannelLocked failed");
Thierry Strudel2896d122017-02-23 19:18:03 -08006390 pthread_mutex_unlock(&mMutex);
6391 return rc;
6392 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006393 }
6394 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006395 pthread_mutex_unlock(&mMutex);
6396
6397 return 0;
6398}
6399
6400/*===========================================================================
6401 * FUNCTION : flushPerf
6402 *
6403 * DESCRIPTION: This is the performance optimization version of flush that does
6404 * not use stream off, rather flushes the system
6405 *
6406 * PARAMETERS :
6407 *
6408 *
6409 * RETURN : 0 : success
6410 * -EINVAL: input is malformed (device is not valid)
6411 * -ENODEV: if the device has encountered a serious error
6412 *==========================================================================*/
6413int QCamera3HardwareInterface::flushPerf()
6414{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006415 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006416 int32_t rc = 0;
6417 struct timespec timeout;
6418 bool timed_wait = false;
6419
6420 pthread_mutex_lock(&mMutex);
6421 mFlushPerf = true;
6422 mPendingBuffersMap.numPendingBufsAtFlush =
6423 mPendingBuffersMap.get_num_overall_buffers();
6424 LOGD("Calling flush. Wait for %d buffers to return",
6425 mPendingBuffersMap.numPendingBufsAtFlush);
6426
6427 /* send the flush event to the backend */
6428 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6429 if (rc < 0) {
6430 LOGE("Error in flush: IOCTL failure");
6431 mFlushPerf = false;
6432 pthread_mutex_unlock(&mMutex);
6433 return -ENODEV;
6434 }
6435
6436 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6437 LOGD("No pending buffers in HAL, return flush");
6438 mFlushPerf = false;
6439 pthread_mutex_unlock(&mMutex);
6440 return rc;
6441 }
6442
6443 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006444 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006445 if (rc < 0) {
6446 LOGE("Error reading the real time clock, cannot use timed wait");
6447 } else {
6448 timeout.tv_sec += FLUSH_TIMEOUT;
6449 timed_wait = true;
6450 }
6451
6452 //Block on conditional variable
6453 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6454 LOGD("Waiting on mBuffersCond");
6455 if (!timed_wait) {
6456 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6457 if (rc != 0) {
6458 LOGE("pthread_cond_wait failed due to rc = %s",
6459 strerror(rc));
6460 break;
6461 }
6462 } else {
6463 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6464 if (rc != 0) {
6465 LOGE("pthread_cond_timedwait failed due to rc = %s",
6466 strerror(rc));
6467 break;
6468 }
6469 }
6470 }
6471 if (rc != 0) {
6472 mFlushPerf = false;
6473 pthread_mutex_unlock(&mMutex);
6474 return -ENODEV;
6475 }
6476
6477 LOGD("Received buffers, now safe to return them");
6478
6479 //make sure the channels handle flush
6480 //currently only required for the picture channel to release snapshot resources
6481 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6482 it != mStreamInfo.end(); it++) {
6483 QCamera3Channel *channel = (*it)->channel;
6484 if (channel) {
6485 rc = channel->flush();
6486 if (rc) {
6487 LOGE("Flushing the channels failed with error %d", rc);
6488 // even though the channel flush failed we need to continue and
6489 // return the buffers we have to the framework, however the return
6490 // value will be an error
6491 rc = -ENODEV;
6492 }
6493 }
6494 }
6495
6496 /* notify the frameworks and send errored results */
6497 rc = notifyErrorForPendingRequests();
6498 if (rc < 0) {
6499 LOGE("notifyErrorForPendingRequests failed");
6500 pthread_mutex_unlock(&mMutex);
6501 return rc;
6502 }
6503
6504 //unblock process_capture_request
6505 mPendingLiveRequest = 0;
6506 unblockRequestIfNecessary();
6507
6508 mFlushPerf = false;
6509 pthread_mutex_unlock(&mMutex);
6510 LOGD ("Flush Operation complete. rc = %d", rc);
6511 return rc;
6512}
6513
6514/*===========================================================================
6515 * FUNCTION : handleCameraDeviceError
6516 *
6517 * DESCRIPTION: This function calls internal flush and notifies the error to
6518 * framework and updates the state variable.
6519 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006520 * PARAMETERS :
6521 * @stopChannelImmediately : stop channels immediately without waiting for
6522 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006523 *
6524 * RETURN : NO_ERROR on Success
6525 * Error code on failure
6526 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006527int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006528{
6529 int32_t rc = NO_ERROR;
6530
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006531 {
6532 Mutex::Autolock lock(mFlushLock);
6533 pthread_mutex_lock(&mMutex);
6534 if (mState != ERROR) {
6535 //if mState != ERROR, nothing to be done
6536 pthread_mutex_unlock(&mMutex);
6537 return NO_ERROR;
6538 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006539 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006540
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006541 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006542 if (NO_ERROR != rc) {
6543 LOGE("internal flush to handle mState = ERROR failed");
6544 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006545
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006546 pthread_mutex_lock(&mMutex);
6547 mState = DEINIT;
6548 pthread_mutex_unlock(&mMutex);
6549 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006550
6551 camera3_notify_msg_t notify_msg;
6552 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6553 notify_msg.type = CAMERA3_MSG_ERROR;
6554 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6555 notify_msg.message.error.error_stream = NULL;
6556 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006557 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006558
6559 return rc;
6560}
6561
6562/*===========================================================================
6563 * FUNCTION : captureResultCb
6564 *
6565 * DESCRIPTION: Callback handler for all capture result
6566 * (streams, as well as metadata)
6567 *
6568 * PARAMETERS :
6569 * @metadata : metadata information
6570 * @buffer : actual gralloc buffer to be returned to frameworks.
6571 * NULL if metadata.
6572 *
6573 * RETURN : NONE
6574 *==========================================================================*/
6575void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6576 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6577{
6578 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006579 pthread_mutex_lock(&mMutex);
6580 uint8_t batchSize = mBatchSize;
6581 pthread_mutex_unlock(&mMutex);
6582 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006583 handleBatchMetadata(metadata_buf,
6584 true /* free_and_bufdone_meta_buf */);
6585 } else { /* mBatchSize = 0 */
6586 hdrPlusPerfLock(metadata_buf);
6587 pthread_mutex_lock(&mMutex);
6588 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006589 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006590 true /* last urgent frame of batch metadata */,
6591 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006592 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006593 pthread_mutex_unlock(&mMutex);
6594 }
6595 } else if (isInputBuffer) {
6596 pthread_mutex_lock(&mMutex);
6597 handleInputBufferWithLock(frame_number);
6598 pthread_mutex_unlock(&mMutex);
6599 } else {
6600 pthread_mutex_lock(&mMutex);
6601 handleBufferWithLock(buffer, frame_number);
6602 pthread_mutex_unlock(&mMutex);
6603 }
6604 return;
6605}
6606
6607/*===========================================================================
6608 * FUNCTION : getReprocessibleOutputStreamId
6609 *
6610 * DESCRIPTION: Get source output stream id for the input reprocess stream
6611 * based on size and format, which would be the largest
6612 * output stream if an input stream exists.
6613 *
6614 * PARAMETERS :
6615 * @id : return the stream id if found
6616 *
6617 * RETURN : int32_t type of status
6618 * NO_ERROR -- success
6619 * none-zero failure code
6620 *==========================================================================*/
6621int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6622{
6623 /* check if any output or bidirectional stream with the same size and format
6624 and return that stream */
6625 if ((mInputStreamInfo.dim.width > 0) &&
6626 (mInputStreamInfo.dim.height > 0)) {
6627 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6628 it != mStreamInfo.end(); it++) {
6629
6630 camera3_stream_t *stream = (*it)->stream;
6631 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6632 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6633 (stream->format == mInputStreamInfo.format)) {
6634 // Usage flag for an input stream and the source output stream
6635 // may be different.
6636 LOGD("Found reprocessible output stream! %p", *it);
6637 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6638 stream->usage, mInputStreamInfo.usage);
6639
6640 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6641 if (channel != NULL && channel->mStreams[0]) {
6642 id = channel->mStreams[0]->getMyServerID();
6643 return NO_ERROR;
6644 }
6645 }
6646 }
6647 } else {
6648 LOGD("No input stream, so no reprocessible output stream");
6649 }
6650 return NAME_NOT_FOUND;
6651}
6652
6653/*===========================================================================
6654 * FUNCTION : lookupFwkName
6655 *
6656 * DESCRIPTION: In case the enum is not same in fwk and backend
6657 * make sure the parameter is correctly propogated
6658 *
6659 * PARAMETERS :
6660 * @arr : map between the two enums
6661 * @len : len of the map
6662 * @hal_name : name of the hal_parm to map
6663 *
6664 * RETURN : int type of status
6665 * fwk_name -- success
6666 * none-zero failure code
6667 *==========================================================================*/
6668template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6669 size_t len, halType hal_name)
6670{
6671
6672 for (size_t i = 0; i < len; i++) {
6673 if (arr[i].hal_name == hal_name) {
6674 return arr[i].fwk_name;
6675 }
6676 }
6677
6678 /* Not able to find matching framework type is not necessarily
6679 * an error case. This happens when mm-camera supports more attributes
6680 * than the frameworks do */
6681 LOGH("Cannot find matching framework type");
6682 return NAME_NOT_FOUND;
6683}
6684
6685/*===========================================================================
6686 * FUNCTION : lookupHalName
6687 *
6688 * DESCRIPTION: In case the enum is not same in fwk and backend
6689 * make sure the parameter is correctly propogated
6690 *
6691 * PARAMETERS :
6692 * @arr : map between the two enums
6693 * @len : len of the map
6694 * @fwk_name : name of the hal_parm to map
6695 *
6696 * RETURN : int32_t type of status
6697 * hal_name -- success
6698 * none-zero failure code
6699 *==========================================================================*/
6700template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6701 size_t len, fwkType fwk_name)
6702{
6703 for (size_t i = 0; i < len; i++) {
6704 if (arr[i].fwk_name == fwk_name) {
6705 return arr[i].hal_name;
6706 }
6707 }
6708
6709 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6710 return NAME_NOT_FOUND;
6711}
6712
6713/*===========================================================================
6714 * FUNCTION : lookupProp
6715 *
6716 * DESCRIPTION: lookup a value by its name
6717 *
6718 * PARAMETERS :
6719 * @arr : map between the two enums
6720 * @len : size of the map
6721 * @name : name to be looked up
6722 *
6723 * RETURN : Value if found
6724 * CAM_CDS_MODE_MAX if not found
6725 *==========================================================================*/
6726template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6727 size_t len, const char *name)
6728{
6729 if (name) {
6730 for (size_t i = 0; i < len; i++) {
6731 if (!strcmp(arr[i].desc, name)) {
6732 return arr[i].val;
6733 }
6734 }
6735 }
6736 return CAM_CDS_MODE_MAX;
6737}
6738
6739/*===========================================================================
6740 *
6741 * DESCRIPTION:
6742 *
6743 * PARAMETERS :
6744 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006745 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006746 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006747 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6748 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006749 *
6750 * RETURN : camera_metadata_t*
6751 * metadata in a format specified by fwk
6752 *==========================================================================*/
6753camera_metadata_t*
6754QCamera3HardwareInterface::translateFromHalMetadata(
6755 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006756 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006757 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006758 bool lastMetadataInBatch,
6759 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006760{
6761 CameraMetadata camMetadata;
6762 camera_metadata_t *resultMetadata;
6763
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006764 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006765 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6766 * Timestamp is needed because it's used for shutter notify calculation.
6767 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006768 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006769 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006770 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006771 }
6772
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006773 if (pendingRequest.jpegMetadata.entryCount())
6774 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006775
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006776 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6777 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6778 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6779 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6780 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006781 if (mBatchSize == 0) {
6782 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006783 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006784 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006785
Samuel Ha68ba5172016-12-15 18:41:12 -08006786 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6787 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006788 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006789 // DevCamDebug metadata translateFromHalMetadata AF
6790 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6791 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6792 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6793 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6794 }
6795 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07006796 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006797 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6798 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6799 }
6800 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07006801 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006802 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6803 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6804 }
6805 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6806 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6807 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6808 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6809 }
6810 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6811 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6812 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6813 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6814 }
6815 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6816 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6817 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6818 *DevCamDebug_af_monitor_pdaf_target_pos;
6819 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6820 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6821 }
6822 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6823 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6824 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6825 *DevCamDebug_af_monitor_pdaf_confidence;
6826 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6827 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6828 }
6829 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6830 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6831 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6832 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6833 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6834 }
6835 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6836 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6837 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6838 *DevCamDebug_af_monitor_tof_target_pos;
6839 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6840 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6841 }
6842 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6843 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6844 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6845 *DevCamDebug_af_monitor_tof_confidence;
6846 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6847 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6848 }
6849 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6850 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6851 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6852 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6853 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6854 }
6855 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6856 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6857 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6858 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6859 &fwk_DevCamDebug_af_monitor_type_select, 1);
6860 }
6861 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6862 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6863 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6864 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6865 &fwk_DevCamDebug_af_monitor_refocus, 1);
6866 }
6867 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6868 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6869 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6870 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6871 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6872 }
6873 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6874 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6875 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6876 *DevCamDebug_af_search_pdaf_target_pos;
6877 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6878 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6879 }
6880 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6881 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6882 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6883 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6884 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6885 }
6886 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6887 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6888 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6889 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6890 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6891 }
6892 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6893 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6894 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6895 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6896 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6897 }
6898 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6899 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6900 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6901 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6902 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6903 }
6904 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6905 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6906 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6907 *DevCamDebug_af_search_tof_target_pos;
6908 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6909 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6910 }
6911 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6912 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6913 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6914 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6915 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6916 }
6917 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6918 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6919 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6920 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6921 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6922 }
6923 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6924 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6925 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6926 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6927 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6928 }
6929 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6930 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6931 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6932 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6933 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6934 }
6935 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6936 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6937 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6938 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6939 &fwk_DevCamDebug_af_search_type_select, 1);
6940 }
6941 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6942 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6943 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6944 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6945 &fwk_DevCamDebug_af_search_next_pos, 1);
6946 }
6947 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6948 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6949 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6950 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6951 &fwk_DevCamDebug_af_search_target_pos, 1);
6952 }
6953 // DevCamDebug metadata translateFromHalMetadata AEC
6954 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6955 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6956 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6957 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6958 }
6959 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6960 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6961 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6962 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6963 }
6964 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6965 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6966 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6967 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6968 }
6969 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6970 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6971 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6972 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6973 }
6974 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6975 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6976 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6977 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6978 }
6979 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6980 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6981 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6982 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6983 }
6984 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6985 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6986 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6987 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6988 }
6989 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6990 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6991 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6992 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6993 }
Samuel Ha34229982017-02-17 13:51:11 -08006994 // DevCamDebug metadata translateFromHalMetadata zzHDR
6995 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6996 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6997 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6998 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6999 }
7000 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
7001 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07007002 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08007003 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
7004 }
7005 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
7006 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
7007 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
7008 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
7009 }
7010 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
7011 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07007012 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08007013 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
7014 }
7015 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
7016 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
7017 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
7018 *DevCamDebug_aec_hdr_sensitivity_ratio;
7019 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
7020 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
7021 }
7022 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
7023 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
7024 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
7025 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
7026 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
7027 }
7028 // DevCamDebug metadata translateFromHalMetadata ADRC
7029 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
7030 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
7031 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
7032 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
7033 &fwk_DevCamDebug_aec_total_drc_gain, 1);
7034 }
7035 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
7036 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
7037 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
7038 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
7039 &fwk_DevCamDebug_aec_color_drc_gain, 1);
7040 }
7041 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
7042 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
7043 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
7044 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
7045 }
7046 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
7047 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
7048 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
7049 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
7050 }
7051 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
7052 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
7053 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
7054 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
7055 }
7056 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
7057 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
7058 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
7059 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
7060 }
Samuel Habdf4fac2017-07-28 17:21:18 -07007061 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
7062 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
7063 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
7064 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
7065 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
7066 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
7067 }
7068 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
7069 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
7070 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
7071 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
7072 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
7073 }
7074 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
7075 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
7076 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
7077 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
7078 &fwk_DevCamDebug_aec_subject_motion, 1);
7079 }
Samuel Ha68ba5172016-12-15 18:41:12 -08007080 // DevCamDebug metadata translateFromHalMetadata AWB
7081 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
7082 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
7083 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
7084 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7085 }
7086 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7087 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7088 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7089 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7090 }
7091 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7092 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7093 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7094 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7095 }
7096 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7097 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7098 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7099 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7100 }
7101 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7102 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7103 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7104 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7105 }
7106 }
7107 // atrace_end(ATRACE_TAG_ALWAYS);
7108
Thierry Strudel3d639192016-09-09 11:52:26 -07007109 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7110 int64_t fwk_frame_number = *frame_number;
7111 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7112 }
7113
7114 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7115 int32_t fps_range[2];
7116 fps_range[0] = (int32_t)float_range->min_fps;
7117 fps_range[1] = (int32_t)float_range->max_fps;
7118 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7119 fps_range, 2);
7120 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7121 fps_range[0], fps_range[1]);
7122 }
7123
7124 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7125 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7126 }
7127
7128 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7129 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7130 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7131 *sceneMode);
7132 if (NAME_NOT_FOUND != val) {
7133 uint8_t fwkSceneMode = (uint8_t)val;
7134 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7135 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7136 fwkSceneMode);
7137 }
7138 }
7139
7140 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7141 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7142 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7143 }
7144
7145 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7146 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7147 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7148 }
7149
7150 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7151 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7152 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7153 }
7154
7155 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7156 CAM_INTF_META_EDGE_MODE, metadata) {
7157 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7158 }
7159
7160 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7161 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7162 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7163 }
7164
7165 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7166 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7167 }
7168
7169 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7170 if (0 <= *flashState) {
7171 uint8_t fwk_flashState = (uint8_t) *flashState;
7172 if (!gCamCapability[mCameraId]->flash_available) {
7173 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7174 }
7175 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7176 }
7177 }
7178
7179 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7180 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7181 if (NAME_NOT_FOUND != val) {
7182 uint8_t fwk_flashMode = (uint8_t)val;
7183 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7184 }
7185 }
7186
7187 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7188 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7189 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7190 }
7191
7192 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7193 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7194 }
7195
7196 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7197 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7198 }
7199
7200 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7201 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7202 }
7203
7204 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7205 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7206 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7207 }
7208
7209 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7210 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7211 LOGD("fwk_videoStab = %d", fwk_videoStab);
7212 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7213 } else {
7214 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7215 // and so hardcoding the Video Stab result to OFF mode.
7216 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7217 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007218 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007219 }
7220
7221 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7222 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7223 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7224 }
7225
7226 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7227 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7228 }
7229
Thierry Strudel3d639192016-09-09 11:52:26 -07007230 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7231 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007232 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007233
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007234 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7235 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007236
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007237 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007238 blackLevelAppliedPattern->cam_black_level[0],
7239 blackLevelAppliedPattern->cam_black_level[1],
7240 blackLevelAppliedPattern->cam_black_level[2],
7241 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007242 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7243 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007244
7245#ifndef USE_HAL_3_3
7246 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307247 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007248 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307249 fwk_blackLevelInd[0] /= 16.0;
7250 fwk_blackLevelInd[1] /= 16.0;
7251 fwk_blackLevelInd[2] /= 16.0;
7252 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007253 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7254 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007255#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007256 }
7257
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007258#ifndef USE_HAL_3_3
7259 // Fixed whitelevel is used by ISP/Sensor
7260 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7261 &gCamCapability[mCameraId]->white_level, 1);
7262#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007263
7264 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7265 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7266 int32_t scalerCropRegion[4];
7267 scalerCropRegion[0] = hScalerCropRegion->left;
7268 scalerCropRegion[1] = hScalerCropRegion->top;
7269 scalerCropRegion[2] = hScalerCropRegion->width;
7270 scalerCropRegion[3] = hScalerCropRegion->height;
7271
7272 // Adjust crop region from sensor output coordinate system to active
7273 // array coordinate system.
7274 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7275 scalerCropRegion[2], scalerCropRegion[3]);
7276
7277 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7278 }
7279
7280 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7281 LOGD("sensorExpTime = %lld", *sensorExpTime);
7282 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7283 }
7284
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007285 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7286 LOGD("expTimeBoost = %f", *expTimeBoost);
7287 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7288 }
7289
Thierry Strudel3d639192016-09-09 11:52:26 -07007290 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7291 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7292 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7293 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7294 }
7295
7296 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7297 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7298 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7299 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7300 sensorRollingShutterSkew, 1);
7301 }
7302
7303 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7304 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7305 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7306
7307 //calculate the noise profile based on sensitivity
7308 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7309 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7310 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7311 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7312 noise_profile[i] = noise_profile_S;
7313 noise_profile[i+1] = noise_profile_O;
7314 }
7315 LOGD("noise model entry (S, O) is (%f, %f)",
7316 noise_profile_S, noise_profile_O);
7317 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7318 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7319 }
7320
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007321#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007322 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007323 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007324 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007325 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007326 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7327 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7328 }
7329 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007330#endif
7331
Thierry Strudel3d639192016-09-09 11:52:26 -07007332 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7333 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7334 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7335 }
7336
7337 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7338 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7339 *faceDetectMode);
7340 if (NAME_NOT_FOUND != val) {
7341 uint8_t fwk_faceDetectMode = (uint8_t)val;
7342 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7343
7344 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7345 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7346 CAM_INTF_META_FACE_DETECTION, metadata) {
7347 uint8_t numFaces = MIN(
7348 faceDetectionInfo->num_faces_detected, MAX_ROI);
7349 int32_t faceIds[MAX_ROI];
7350 uint8_t faceScores[MAX_ROI];
7351 int32_t faceRectangles[MAX_ROI * 4];
7352 int32_t faceLandmarks[MAX_ROI * 6];
7353 size_t j = 0, k = 0;
7354
7355 for (size_t i = 0; i < numFaces; i++) {
7356 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7357 // Adjust crop region from sensor output coordinate system to active
7358 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007359 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007360 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7361 rect.width, rect.height);
7362
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007363 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007364
Jason Lee8ce36fa2017-04-19 19:40:37 -07007365 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7366 "bottom-right (%d, %d)",
7367 faceDetectionInfo->frame_id, i,
7368 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7369 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7370
Thierry Strudel3d639192016-09-09 11:52:26 -07007371 j+= 4;
7372 }
7373 if (numFaces <= 0) {
7374 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7375 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7376 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7377 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7378 }
7379
7380 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7381 numFaces);
7382 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7383 faceRectangles, numFaces * 4U);
7384 if (fwk_faceDetectMode ==
7385 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7386 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7387 CAM_INTF_META_FACE_LANDMARK, metadata) {
7388
7389 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007390 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007391 // Map the co-ordinate sensor output coordinate system to active
7392 // array coordinate system.
7393 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007394 face_landmarks.left_eye_center.x,
7395 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007396 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007397 face_landmarks.right_eye_center.x,
7398 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007399 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007400 face_landmarks.mouth_center.x,
7401 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007402
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007403 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007404
7405 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7406 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7407 faceDetectionInfo->frame_id, i,
7408 faceLandmarks[k + LEFT_EYE_X],
7409 faceLandmarks[k + LEFT_EYE_Y],
7410 faceLandmarks[k + RIGHT_EYE_X],
7411 faceLandmarks[k + RIGHT_EYE_Y],
7412 faceLandmarks[k + MOUTH_X],
7413 faceLandmarks[k + MOUTH_Y]);
7414
Thierry Strudel04e026f2016-10-10 11:27:36 -07007415 k+= TOTAL_LANDMARK_INDICES;
7416 }
7417 } else {
7418 for (size_t i = 0; i < numFaces; i++) {
7419 setInvalidLandmarks(faceLandmarks+k);
7420 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007421 }
7422 }
7423
Jason Lee49619db2017-04-13 12:07:22 -07007424 for (size_t i = 0; i < numFaces; i++) {
7425 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7426
7427 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7428 faceDetectionInfo->frame_id, i, faceIds[i]);
7429 }
7430
Thierry Strudel3d639192016-09-09 11:52:26 -07007431 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7432 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7433 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007434 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007435 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7436 CAM_INTF_META_FACE_BLINK, metadata) {
7437 uint8_t detected[MAX_ROI];
7438 uint8_t degree[MAX_ROI * 2];
7439 for (size_t i = 0; i < numFaces; i++) {
7440 detected[i] = blinks->blink[i].blink_detected;
7441 degree[2 * i] = blinks->blink[i].left_blink;
7442 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007443
Jason Lee49619db2017-04-13 12:07:22 -07007444 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7445 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7446 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7447 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007448 }
7449 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7450 detected, numFaces);
7451 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7452 degree, numFaces * 2);
7453 }
7454 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7455 CAM_INTF_META_FACE_SMILE, metadata) {
7456 uint8_t degree[MAX_ROI];
7457 uint8_t confidence[MAX_ROI];
7458 for (size_t i = 0; i < numFaces; i++) {
7459 degree[i] = smiles->smile[i].smile_degree;
7460 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007461
Jason Lee49619db2017-04-13 12:07:22 -07007462 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7463 "smile_degree=%d, smile_score=%d",
7464 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007465 }
7466 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7467 degree, numFaces);
7468 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7469 confidence, numFaces);
7470 }
7471 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7472 CAM_INTF_META_FACE_GAZE, metadata) {
7473 int8_t angle[MAX_ROI];
7474 int32_t direction[MAX_ROI * 3];
7475 int8_t degree[MAX_ROI * 2];
7476 for (size_t i = 0; i < numFaces; i++) {
7477 angle[i] = gazes->gaze[i].gaze_angle;
7478 direction[3 * i] = gazes->gaze[i].updown_dir;
7479 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7480 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7481 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7482 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007483
7484 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7485 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7486 "left_right_gaze=%d, top_bottom_gaze=%d",
7487 faceDetectionInfo->frame_id, i, angle[i],
7488 direction[3 * i], direction[3 * i + 1],
7489 direction[3 * i + 2],
7490 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007491 }
7492 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7493 (uint8_t *)angle, numFaces);
7494 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7495 direction, numFaces * 3);
7496 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7497 (uint8_t *)degree, numFaces * 2);
7498 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007499 }
7500 }
7501 }
7502 }
7503
7504 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7505 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007506 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007507 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007508 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007509
Shuzhen Wang14415f52016-11-16 18:26:18 -08007510 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7511 histogramBins = *histBins;
7512 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7513 }
7514
7515 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007516 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7517 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007518 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007519
7520 switch (stats_data->type) {
7521 case CAM_HISTOGRAM_TYPE_BAYER:
7522 switch (stats_data->bayer_stats.data_type) {
7523 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007524 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7525 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007526 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007527 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7528 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007529 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007530 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7531 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007532 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007533 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007534 case CAM_STATS_CHANNEL_R:
7535 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007536 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7537 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007538 }
7539 break;
7540 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007541 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007542 break;
7543 }
7544
Shuzhen Wang14415f52016-11-16 18:26:18 -08007545 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007546 }
7547 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007548 }
7549
7550 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7551 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7552 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7553 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7554 }
7555
7556 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7557 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7558 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7559 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7560 }
7561
7562 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7563 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7564 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7565 CAM_MAX_SHADING_MAP_HEIGHT);
7566 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7567 CAM_MAX_SHADING_MAP_WIDTH);
7568 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7569 lensShadingMap->lens_shading, 4U * map_width * map_height);
7570 }
7571
7572 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7573 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7574 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7575 }
7576
7577 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7578 //Populate CAM_INTF_META_TONEMAP_CURVES
7579 /* ch0 = G, ch 1 = B, ch 2 = R*/
7580 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7581 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7582 tonemap->tonemap_points_cnt,
7583 CAM_MAX_TONEMAP_CURVE_SIZE);
7584 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7585 }
7586
7587 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7588 &tonemap->curves[0].tonemap_points[0][0],
7589 tonemap->tonemap_points_cnt * 2);
7590
7591 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7592 &tonemap->curves[1].tonemap_points[0][0],
7593 tonemap->tonemap_points_cnt * 2);
7594
7595 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7596 &tonemap->curves[2].tonemap_points[0][0],
7597 tonemap->tonemap_points_cnt * 2);
7598 }
7599
7600 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7601 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7602 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7603 CC_GAIN_MAX);
7604 }
7605
7606 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7607 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7608 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7609 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7610 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7611 }
7612
7613 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7614 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7615 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7616 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7617 toneCurve->tonemap_points_cnt,
7618 CAM_MAX_TONEMAP_CURVE_SIZE);
7619 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7620 }
7621 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7622 (float*)toneCurve->curve.tonemap_points,
7623 toneCurve->tonemap_points_cnt * 2);
7624 }
7625
7626 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7627 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7628 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7629 predColorCorrectionGains->gains, 4);
7630 }
7631
7632 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7633 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7634 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7635 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7636 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7637 }
7638
7639 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7640 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7641 }
7642
7643 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7644 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7645 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7646 }
7647
7648 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7649 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7650 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7651 }
7652
7653 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7654 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7655 *effectMode);
7656 if (NAME_NOT_FOUND != val) {
7657 uint8_t fwk_effectMode = (uint8_t)val;
7658 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7659 }
7660 }
7661
7662 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7663 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7664 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7665 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7666 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7667 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7668 }
7669 int32_t fwk_testPatternData[4];
7670 fwk_testPatternData[0] = testPatternData->r;
7671 fwk_testPatternData[3] = testPatternData->b;
7672 switch (gCamCapability[mCameraId]->color_arrangement) {
7673 case CAM_FILTER_ARRANGEMENT_RGGB:
7674 case CAM_FILTER_ARRANGEMENT_GRBG:
7675 fwk_testPatternData[1] = testPatternData->gr;
7676 fwk_testPatternData[2] = testPatternData->gb;
7677 break;
7678 case CAM_FILTER_ARRANGEMENT_GBRG:
7679 case CAM_FILTER_ARRANGEMENT_BGGR:
7680 fwk_testPatternData[2] = testPatternData->gr;
7681 fwk_testPatternData[1] = testPatternData->gb;
7682 break;
7683 default:
7684 LOGE("color arrangement %d is not supported",
7685 gCamCapability[mCameraId]->color_arrangement);
7686 break;
7687 }
7688 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7689 }
7690
7691 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7692 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7693 }
7694
7695 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7696 String8 str((const char *)gps_methods);
7697 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7698 }
7699
7700 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7701 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7702 }
7703
7704 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7705 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7706 }
7707
7708 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7709 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7710 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7711 }
7712
7713 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7714 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7715 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7716 }
7717
7718 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7719 int32_t fwk_thumb_size[2];
7720 fwk_thumb_size[0] = thumb_size->width;
7721 fwk_thumb_size[1] = thumb_size->height;
7722 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7723 }
7724
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007725 // Skip reprocess metadata if there is no input stream.
7726 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7727 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7728 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7729 privateData,
7730 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7731 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007732 }
7733
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007734 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007735 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007736 meteringMode, 1);
7737 }
7738
Thierry Strudel54dc9782017-02-15 12:12:10 -08007739 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7740 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7741 LOGD("hdr_scene_data: %d %f\n",
7742 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7743 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7744 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7745 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7746 &isHdr, 1);
7747 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7748 &isHdrConfidence, 1);
7749 }
7750
7751
7752
Thierry Strudel3d639192016-09-09 11:52:26 -07007753 if (metadata->is_tuning_params_valid) {
7754 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7755 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7756 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7757
7758
7759 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7760 sizeof(uint32_t));
7761 data += sizeof(uint32_t);
7762
7763 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7764 sizeof(uint32_t));
7765 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7766 data += sizeof(uint32_t);
7767
7768 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7769 sizeof(uint32_t));
7770 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7771 data += sizeof(uint32_t);
7772
7773 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7774 sizeof(uint32_t));
7775 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7776 data += sizeof(uint32_t);
7777
7778 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7779 sizeof(uint32_t));
7780 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7781 data += sizeof(uint32_t);
7782
7783 metadata->tuning_params.tuning_mod3_data_size = 0;
7784 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7785 sizeof(uint32_t));
7786 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7787 data += sizeof(uint32_t);
7788
7789 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7790 TUNING_SENSOR_DATA_MAX);
7791 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7792 count);
7793 data += count;
7794
7795 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7796 TUNING_VFE_DATA_MAX);
7797 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7798 count);
7799 data += count;
7800
7801 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7802 TUNING_CPP_DATA_MAX);
7803 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7804 count);
7805 data += count;
7806
7807 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7808 TUNING_CAC_DATA_MAX);
7809 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7810 count);
7811 data += count;
7812
7813 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7814 (int32_t *)(void *)tuning_meta_data_blob,
7815 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7816 }
7817
7818 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7819 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7820 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7821 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7822 NEUTRAL_COL_POINTS);
7823 }
7824
7825 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7826 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7827 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7828 }
7829
7830 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7831 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7832 // Adjust crop region from sensor output coordinate system to active
7833 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007834 cam_rect_t hAeRect = hAeRegions->rect;
7835 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7836 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007837
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007838 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007839 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7840 REGIONS_TUPLE_COUNT);
7841 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7842 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007843 hAeRect.left, hAeRect.top, hAeRect.width,
7844 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007845 }
7846
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007847 if (!pendingRequest.focusStateSent) {
7848 if (pendingRequest.focusStateValid) {
7849 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7850 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007851 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007852 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7853 uint8_t fwk_afState = (uint8_t) *afState;
7854 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7855 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7856 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007857 }
7858 }
7859
Thierry Strudel3d639192016-09-09 11:52:26 -07007860 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7861 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7862 }
7863
7864 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7865 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7866 }
7867
7868 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7869 uint8_t fwk_lensState = *lensState;
7870 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7871 }
7872
Thierry Strudel3d639192016-09-09 11:52:26 -07007873 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007874 uint32_t ab_mode = *hal_ab_mode;
7875 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7876 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7877 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7878 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007879 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007880 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007881 if (NAME_NOT_FOUND != val) {
7882 uint8_t fwk_ab_mode = (uint8_t)val;
7883 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7884 }
7885 }
7886
7887 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7888 int val = lookupFwkName(SCENE_MODES_MAP,
7889 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7890 if (NAME_NOT_FOUND != val) {
7891 uint8_t fwkBestshotMode = (uint8_t)val;
7892 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7893 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7894 } else {
7895 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7896 }
7897 }
7898
7899 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7900 uint8_t fwk_mode = (uint8_t) *mode;
7901 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7902 }
7903
7904 /* Constant metadata values to be update*/
7905 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7906 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7907
7908 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7909 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7910
7911 int32_t hotPixelMap[2];
7912 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7913
7914 // CDS
7915 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7916 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7917 }
7918
Thierry Strudel04e026f2016-10-10 11:27:36 -07007919 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7920 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007921 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007922 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7923 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7924 } else {
7925 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7926 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007927
7928 if(fwk_hdr != curr_hdr_state) {
7929 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7930 if(fwk_hdr)
7931 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7932 else
7933 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7934 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007935 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7936 }
7937
Thierry Strudel54dc9782017-02-15 12:12:10 -08007938 //binning correction
7939 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7940 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7941 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7942 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7943 }
7944
Thierry Strudel04e026f2016-10-10 11:27:36 -07007945 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007946 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007947 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7948 int8_t is_ir_on = 0;
7949
7950 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7951 if(is_ir_on != curr_ir_state) {
7952 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7953 if(is_ir_on)
7954 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7955 else
7956 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7957 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007958 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007959 }
7960
Thierry Strudel269c81a2016-10-12 12:13:59 -07007961 // AEC SPEED
7962 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7963 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7964 }
7965
7966 // AWB SPEED
7967 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7968 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7969 }
7970
Thierry Strudel3d639192016-09-09 11:52:26 -07007971 // TNR
7972 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7973 uint8_t tnr_enable = tnr->denoise_enable;
7974 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007975 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7976 int8_t is_tnr_on = 0;
7977
7978 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7979 if(is_tnr_on != curr_tnr_state) {
7980 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7981 if(is_tnr_on)
7982 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7983 else
7984 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7985 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007986
7987 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7988 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7989 }
7990
7991 // Reprocess crop data
7992 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7993 uint8_t cnt = crop_data->num_of_streams;
7994 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7995 // mm-qcamera-daemon only posts crop_data for streams
7996 // not linked to pproc. So no valid crop metadata is not
7997 // necessarily an error case.
7998 LOGD("No valid crop metadata entries");
7999 } else {
8000 uint32_t reproc_stream_id;
8001 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8002 LOGD("No reprocessible stream found, ignore crop data");
8003 } else {
8004 int rc = NO_ERROR;
8005 Vector<int32_t> roi_map;
8006 int32_t *crop = new int32_t[cnt*4];
8007 if (NULL == crop) {
8008 rc = NO_MEMORY;
8009 }
8010 if (NO_ERROR == rc) {
8011 int32_t streams_found = 0;
8012 for (size_t i = 0; i < cnt; i++) {
8013 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
8014 if (pprocDone) {
8015 // HAL already does internal reprocessing,
8016 // either via reprocessing before JPEG encoding,
8017 // or offline postprocessing for pproc bypass case.
8018 crop[0] = 0;
8019 crop[1] = 0;
8020 crop[2] = mInputStreamInfo.dim.width;
8021 crop[3] = mInputStreamInfo.dim.height;
8022 } else {
8023 crop[0] = crop_data->crop_info[i].crop.left;
8024 crop[1] = crop_data->crop_info[i].crop.top;
8025 crop[2] = crop_data->crop_info[i].crop.width;
8026 crop[3] = crop_data->crop_info[i].crop.height;
8027 }
8028 roi_map.add(crop_data->crop_info[i].roi_map.left);
8029 roi_map.add(crop_data->crop_info[i].roi_map.top);
8030 roi_map.add(crop_data->crop_info[i].roi_map.width);
8031 roi_map.add(crop_data->crop_info[i].roi_map.height);
8032 streams_found++;
8033 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
8034 crop[0], crop[1], crop[2], crop[3]);
8035 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
8036 crop_data->crop_info[i].roi_map.left,
8037 crop_data->crop_info[i].roi_map.top,
8038 crop_data->crop_info[i].roi_map.width,
8039 crop_data->crop_info[i].roi_map.height);
8040 break;
8041
8042 }
8043 }
8044 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
8045 &streams_found, 1);
8046 camMetadata.update(QCAMERA3_CROP_REPROCESS,
8047 crop, (size_t)(streams_found * 4));
8048 if (roi_map.array()) {
8049 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
8050 roi_map.array(), roi_map.size());
8051 }
8052 }
8053 if (crop) {
8054 delete [] crop;
8055 }
8056 }
8057 }
8058 }
8059
8060 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8061 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
8062 // so hardcoding the CAC result to OFF mode.
8063 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8064 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
8065 } else {
8066 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
8067 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8068 *cacMode);
8069 if (NAME_NOT_FOUND != val) {
8070 uint8_t resultCacMode = (uint8_t)val;
8071 // check whether CAC result from CB is equal to Framework set CAC mode
8072 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008073 if (pendingRequest.fwkCacMode != resultCacMode) {
8074 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07008075 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08008076 //Check if CAC is disabled by property
8077 if (m_cacModeDisabled) {
8078 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8079 }
8080
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008081 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07008082 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
8083 } else {
8084 LOGE("Invalid CAC camera parameter: %d", *cacMode);
8085 }
8086 }
8087 }
8088
8089 // Post blob of cam_cds_data through vendor tag.
8090 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8091 uint8_t cnt = cdsInfo->num_of_streams;
8092 cam_cds_data_t cdsDataOverride;
8093 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8094 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8095 cdsDataOverride.num_of_streams = 1;
8096 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8097 uint32_t reproc_stream_id;
8098 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8099 LOGD("No reprocessible stream found, ignore cds data");
8100 } else {
8101 for (size_t i = 0; i < cnt; i++) {
8102 if (cdsInfo->cds_info[i].stream_id ==
8103 reproc_stream_id) {
8104 cdsDataOverride.cds_info[0].cds_enable =
8105 cdsInfo->cds_info[i].cds_enable;
8106 break;
8107 }
8108 }
8109 }
8110 } else {
8111 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8112 }
8113 camMetadata.update(QCAMERA3_CDS_INFO,
8114 (uint8_t *)&cdsDataOverride,
8115 sizeof(cam_cds_data_t));
8116 }
8117
8118 // Ldaf calibration data
8119 if (!mLdafCalibExist) {
8120 IF_META_AVAILABLE(uint32_t, ldafCalib,
8121 CAM_INTF_META_LDAF_EXIF, metadata) {
8122 mLdafCalibExist = true;
8123 mLdafCalib[0] = ldafCalib[0];
8124 mLdafCalib[1] = ldafCalib[1];
8125 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8126 ldafCalib[0], ldafCalib[1]);
8127 }
8128 }
8129
Thierry Strudel54dc9782017-02-15 12:12:10 -08008130 // EXIF debug data through vendor tag
8131 /*
8132 * Mobicat Mask can assume 3 values:
8133 * 1 refers to Mobicat data,
8134 * 2 refers to Stats Debug and Exif Debug Data
8135 * 3 refers to Mobicat and Stats Debug Data
8136 * We want to make sure that we are sending Exif debug data
8137 * only when Mobicat Mask is 2.
8138 */
8139 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8140 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8141 (uint8_t *)(void *)mExifParams.debug_params,
8142 sizeof(mm_jpeg_debug_exif_params_t));
8143 }
8144
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008145 // Reprocess and DDM debug data through vendor tag
8146 cam_reprocess_info_t repro_info;
8147 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008148 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8149 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008150 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008151 }
8152 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8153 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008154 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008155 }
8156 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8157 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008158 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008159 }
8160 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8161 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008162 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008163 }
8164 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8165 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008166 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008167 }
8168 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008169 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008170 }
8171 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8172 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008173 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008174 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008175 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8176 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8177 }
8178 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8179 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8180 }
8181 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8182 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008183
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008184 // INSTANT AEC MODE
8185 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8186 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8187 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8188 }
8189
Shuzhen Wange763e802016-03-31 10:24:29 -07008190 // AF scene change
8191 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8192 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8193 }
8194
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008195 // Enable ZSL
8196 if (enableZsl != nullptr) {
8197 uint8_t value = *enableZsl ?
8198 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8199 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8200 }
8201
Xu Han821ea9c2017-05-23 09:00:40 -07008202 // OIS Data
8203 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8204 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8205 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8206 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8207 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8208 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8209 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8210 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8211 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8212 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8213 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
Xue Tu2c3e9142017-08-18 16:23:52 -07008214 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8215 frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8216 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8217 frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
Xu Han821ea9c2017-05-23 09:00:40 -07008218 }
8219
Thierry Strudel3d639192016-09-09 11:52:26 -07008220 resultMetadata = camMetadata.release();
8221 return resultMetadata;
8222}
8223
8224/*===========================================================================
8225 * FUNCTION : saveExifParams
8226 *
8227 * DESCRIPTION:
8228 *
8229 * PARAMETERS :
8230 * @metadata : metadata information from callback
8231 *
8232 * RETURN : none
8233 *
8234 *==========================================================================*/
8235void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8236{
8237 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8238 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8239 if (mExifParams.debug_params) {
8240 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8241 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8242 }
8243 }
8244 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8245 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8246 if (mExifParams.debug_params) {
8247 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8248 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8249 }
8250 }
8251 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8252 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8253 if (mExifParams.debug_params) {
8254 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8255 mExifParams.debug_params->af_debug_params_valid = TRUE;
8256 }
8257 }
8258 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8259 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8260 if (mExifParams.debug_params) {
8261 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8262 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8263 }
8264 }
8265 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8266 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8267 if (mExifParams.debug_params) {
8268 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8269 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8270 }
8271 }
8272 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8273 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8274 if (mExifParams.debug_params) {
8275 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8276 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8277 }
8278 }
8279 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8280 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8281 if (mExifParams.debug_params) {
8282 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8283 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8284 }
8285 }
8286 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8287 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8288 if (mExifParams.debug_params) {
8289 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8290 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8291 }
8292 }
8293}
8294
8295/*===========================================================================
8296 * FUNCTION : get3AExifParams
8297 *
8298 * DESCRIPTION:
8299 *
8300 * PARAMETERS : none
8301 *
8302 *
8303 * RETURN : mm_jpeg_exif_params_t
8304 *
8305 *==========================================================================*/
8306mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8307{
8308 return mExifParams;
8309}
8310
8311/*===========================================================================
8312 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8313 *
8314 * DESCRIPTION:
8315 *
8316 * PARAMETERS :
8317 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008318 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8319 * urgent metadata in a batch. Always true for
8320 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008321 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008322 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8323 * i.e. even though it doesn't map to a valid partial
8324 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008325 * RETURN : camera_metadata_t*
8326 * metadata in a format specified by fwk
8327 *==========================================================================*/
8328camera_metadata_t*
8329QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008330 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008331 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008332{
8333 CameraMetadata camMetadata;
8334 camera_metadata_t *resultMetadata;
8335
Shuzhen Wang485e2442017-08-02 12:21:08 -07008336 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008337 /* In batch mode, use empty metadata if this is not the last in batch
8338 */
8339 resultMetadata = allocate_camera_metadata(0, 0);
8340 return resultMetadata;
8341 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008342
8343 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8344 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8345 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8346 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8347 }
8348
8349 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8350 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8351 &aecTrigger->trigger, 1);
8352 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8353 &aecTrigger->trigger_id, 1);
8354 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8355 aecTrigger->trigger);
8356 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8357 aecTrigger->trigger_id);
8358 }
8359
8360 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8361 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8362 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8363 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8364 }
8365
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008366 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8367 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8368 if (NAME_NOT_FOUND != val) {
8369 uint8_t fwkAfMode = (uint8_t)val;
8370 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8371 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8372 } else {
8373 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8374 val);
8375 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008376 }
8377
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008378 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8379 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8380 af_trigger->trigger);
8381 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8382 af_trigger->trigger_id);
8383
8384 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8385 mAfTrigger = *af_trigger;
8386 uint32_t fwk_AfState = (uint32_t) *afState;
8387
8388 // If this is the result for a new trigger, check if there is new early
8389 // af state. If there is, use the last af state for all results
8390 // preceding current partial frame number.
8391 for (auto & pendingRequest : mPendingRequestsList) {
8392 if (pendingRequest.frame_number < frame_number) {
8393 pendingRequest.focusStateValid = true;
8394 pendingRequest.focusState = fwk_AfState;
8395 } else if (pendingRequest.frame_number == frame_number) {
8396 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8397 // Check if early AF state for trigger exists. If yes, send AF state as
8398 // partial result for better latency.
8399 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8400 pendingRequest.focusStateSent = true;
8401 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8402 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8403 frame_number, fwkEarlyAfState);
8404 }
8405 }
8406 }
8407 }
8408 }
8409 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8410 &mAfTrigger.trigger, 1);
8411 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8412
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008413 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8414 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008415 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008416 int32_t afRegions[REGIONS_TUPLE_COUNT];
8417 // Adjust crop region from sensor output coordinate system to active
8418 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008419 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8420 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008421
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008422 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008423 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8424 REGIONS_TUPLE_COUNT);
8425 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8426 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008427 hAfRect.left, hAfRect.top, hAfRect.width,
8428 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008429 }
8430
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008431 // AF region confidence
8432 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8433 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8434 }
8435
Thierry Strudel3d639192016-09-09 11:52:26 -07008436 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8437 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8438 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8439 if (NAME_NOT_FOUND != val) {
8440 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8441 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8442 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8443 } else {
8444 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8445 }
8446 }
8447
8448 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8449 uint32_t aeMode = CAM_AE_MODE_MAX;
8450 int32_t flashMode = CAM_FLASH_MODE_MAX;
8451 int32_t redeye = -1;
8452 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8453 aeMode = *pAeMode;
8454 }
8455 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8456 flashMode = *pFlashMode;
8457 }
8458 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8459 redeye = *pRedeye;
8460 }
8461
8462 if (1 == redeye) {
8463 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8464 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8465 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8466 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8467 flashMode);
8468 if (NAME_NOT_FOUND != val) {
8469 fwk_aeMode = (uint8_t)val;
8470 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8471 } else {
8472 LOGE("Unsupported flash mode %d", flashMode);
8473 }
8474 } else if (aeMode == CAM_AE_MODE_ON) {
8475 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8476 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8477 } else if (aeMode == CAM_AE_MODE_OFF) {
8478 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8479 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008480 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8481 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8482 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008483 } else {
8484 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8485 "flashMode:%d, aeMode:%u!!!",
8486 redeye, flashMode, aeMode);
8487 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008488 if (mInstantAEC) {
8489 // Increment frame Idx count untill a bound reached for instant AEC.
8490 mInstantAecFrameIdxCount++;
8491 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8492 CAM_INTF_META_AEC_INFO, metadata) {
8493 LOGH("ae_params->settled = %d",ae_params->settled);
8494 // If AEC settled, or if number of frames reached bound value,
8495 // should reset instant AEC.
8496 if (ae_params->settled ||
8497 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8498 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8499 mInstantAEC = false;
8500 mResetInstantAEC = true;
8501 mInstantAecFrameIdxCount = 0;
8502 }
8503 }
8504 }
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07008505
8506 IF_META_AVAILABLE(int32_t, af_tof_confidence,
8507 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
8508 IF_META_AVAILABLE(int32_t, af_tof_distance,
8509 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
8510 int32_t fwk_af_tof_confidence = *af_tof_confidence;
8511 int32_t fwk_af_tof_distance = *af_tof_distance;
8512 if (fwk_af_tof_confidence == 1) {
8513 mSceneDistance = fwk_af_tof_distance;
8514 } else {
8515 mSceneDistance = -1;
8516 }
8517 LOGD("tof_distance %d, tof_confidence %d, mSceneDistance %d",
8518 fwk_af_tof_distance, fwk_af_tof_confidence, mSceneDistance);
8519 }
8520 }
8521 camMetadata.update(NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE, &mSceneDistance, 1);
8522
Thierry Strudel3d639192016-09-09 11:52:26 -07008523 resultMetadata = camMetadata.release();
8524 return resultMetadata;
8525}
8526
8527/*===========================================================================
8528 * FUNCTION : dumpMetadataToFile
8529 *
8530 * DESCRIPTION: Dumps tuning metadata to file system
8531 *
8532 * PARAMETERS :
8533 * @meta : tuning metadata
8534 * @dumpFrameCount : current dump frame count
8535 * @enabled : Enable mask
8536 *
8537 *==========================================================================*/
8538void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8539 uint32_t &dumpFrameCount,
8540 bool enabled,
8541 const char *type,
8542 uint32_t frameNumber)
8543{
8544 //Some sanity checks
8545 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8546 LOGE("Tuning sensor data size bigger than expected %d: %d",
8547 meta.tuning_sensor_data_size,
8548 TUNING_SENSOR_DATA_MAX);
8549 return;
8550 }
8551
8552 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8553 LOGE("Tuning VFE data size bigger than expected %d: %d",
8554 meta.tuning_vfe_data_size,
8555 TUNING_VFE_DATA_MAX);
8556 return;
8557 }
8558
8559 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8560 LOGE("Tuning CPP data size bigger than expected %d: %d",
8561 meta.tuning_cpp_data_size,
8562 TUNING_CPP_DATA_MAX);
8563 return;
8564 }
8565
8566 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8567 LOGE("Tuning CAC data size bigger than expected %d: %d",
8568 meta.tuning_cac_data_size,
8569 TUNING_CAC_DATA_MAX);
8570 return;
8571 }
8572 //
8573
8574 if(enabled){
8575 char timeBuf[FILENAME_MAX];
8576 char buf[FILENAME_MAX];
8577 memset(buf, 0, sizeof(buf));
8578 memset(timeBuf, 0, sizeof(timeBuf));
8579 time_t current_time;
8580 struct tm * timeinfo;
8581 time (&current_time);
8582 timeinfo = localtime (&current_time);
8583 if (timeinfo != NULL) {
8584 strftime (timeBuf, sizeof(timeBuf),
8585 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8586 }
8587 String8 filePath(timeBuf);
8588 snprintf(buf,
8589 sizeof(buf),
8590 "%dm_%s_%d.bin",
8591 dumpFrameCount,
8592 type,
8593 frameNumber);
8594 filePath.append(buf);
8595 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8596 if (file_fd >= 0) {
8597 ssize_t written_len = 0;
8598 meta.tuning_data_version = TUNING_DATA_VERSION;
8599 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8600 written_len += write(file_fd, data, sizeof(uint32_t));
8601 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8602 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8603 written_len += write(file_fd, data, sizeof(uint32_t));
8604 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8605 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8606 written_len += write(file_fd, data, sizeof(uint32_t));
8607 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8608 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8609 written_len += write(file_fd, data, sizeof(uint32_t));
8610 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8611 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8612 written_len += write(file_fd, data, sizeof(uint32_t));
8613 meta.tuning_mod3_data_size = 0;
8614 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8615 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8616 written_len += write(file_fd, data, sizeof(uint32_t));
8617 size_t total_size = meta.tuning_sensor_data_size;
8618 data = (void *)((uint8_t *)&meta.data);
8619 written_len += write(file_fd, data, total_size);
8620 total_size = meta.tuning_vfe_data_size;
8621 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8622 written_len += write(file_fd, data, total_size);
8623 total_size = meta.tuning_cpp_data_size;
8624 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8625 written_len += write(file_fd, data, total_size);
8626 total_size = meta.tuning_cac_data_size;
8627 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8628 written_len += write(file_fd, data, total_size);
8629 close(file_fd);
8630 }else {
8631 LOGE("fail to open file for metadata dumping");
8632 }
8633 }
8634}
8635
8636/*===========================================================================
8637 * FUNCTION : cleanAndSortStreamInfo
8638 *
8639 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8640 * and sort them such that raw stream is at the end of the list
8641 * This is a workaround for camera daemon constraint.
8642 *
8643 * PARAMETERS : None
8644 *
8645 *==========================================================================*/
8646void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8647{
8648 List<stream_info_t *> newStreamInfo;
8649
8650 /*clean up invalid streams*/
8651 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8652 it != mStreamInfo.end();) {
8653 if(((*it)->status) == INVALID){
8654 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8655 delete channel;
8656 free(*it);
8657 it = mStreamInfo.erase(it);
8658 } else {
8659 it++;
8660 }
8661 }
8662
8663 // Move preview/video/callback/snapshot streams into newList
8664 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8665 it != mStreamInfo.end();) {
8666 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8667 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8668 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8669 newStreamInfo.push_back(*it);
8670 it = mStreamInfo.erase(it);
8671 } else
8672 it++;
8673 }
8674 // Move raw streams into newList
8675 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8676 it != mStreamInfo.end();) {
8677 newStreamInfo.push_back(*it);
8678 it = mStreamInfo.erase(it);
8679 }
8680
8681 mStreamInfo = newStreamInfo;
8682}
8683
8684/*===========================================================================
8685 * FUNCTION : extractJpegMetadata
8686 *
8687 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8688 * JPEG metadata is cached in HAL, and return as part of capture
8689 * result when metadata is returned from camera daemon.
8690 *
8691 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8692 * @request: capture request
8693 *
8694 *==========================================================================*/
8695void QCamera3HardwareInterface::extractJpegMetadata(
8696 CameraMetadata& jpegMetadata,
8697 const camera3_capture_request_t *request)
8698{
8699 CameraMetadata frame_settings;
8700 frame_settings = request->settings;
8701
8702 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8703 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8704 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8705 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8706
8707 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8708 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8709 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8710 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8711
8712 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8713 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8714 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8715 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8716
8717 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8718 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8719 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8720 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8721
8722 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8723 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8724 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8725 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8726
8727 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8728 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8729 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8730 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8731
8732 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8733 int32_t thumbnail_size[2];
8734 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8735 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8736 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8737 int32_t orientation =
8738 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008739 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008740 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8741 int32_t temp;
8742 temp = thumbnail_size[0];
8743 thumbnail_size[0] = thumbnail_size[1];
8744 thumbnail_size[1] = temp;
8745 }
8746 }
8747 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8748 thumbnail_size,
8749 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8750 }
8751
8752}
8753
8754/*===========================================================================
8755 * FUNCTION : convertToRegions
8756 *
8757 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8758 *
8759 * PARAMETERS :
8760 * @rect : cam_rect_t struct to convert
8761 * @region : int32_t destination array
8762 * @weight : if we are converting from cam_area_t, weight is valid
8763 * else weight = -1
8764 *
8765 *==========================================================================*/
8766void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8767 int32_t *region, int weight)
8768{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008769 region[FACE_LEFT] = rect.left;
8770 region[FACE_TOP] = rect.top;
8771 region[FACE_RIGHT] = rect.left + rect.width;
8772 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008773 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008774 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008775 }
8776}
8777
8778/*===========================================================================
8779 * FUNCTION : convertFromRegions
8780 *
8781 * DESCRIPTION: helper method to convert from array to cam_rect_t
8782 *
8783 * PARAMETERS :
8784 * @rect : cam_rect_t struct to convert
8785 * @region : int32_t destination array
8786 * @weight : if we are converting from cam_area_t, weight is valid
8787 * else weight = -1
8788 *
8789 *==========================================================================*/
8790void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008791 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008792{
Thierry Strudel3d639192016-09-09 11:52:26 -07008793 int32_t x_min = frame_settings.find(tag).data.i32[0];
8794 int32_t y_min = frame_settings.find(tag).data.i32[1];
8795 int32_t x_max = frame_settings.find(tag).data.i32[2];
8796 int32_t y_max = frame_settings.find(tag).data.i32[3];
8797 roi.weight = frame_settings.find(tag).data.i32[4];
8798 roi.rect.left = x_min;
8799 roi.rect.top = y_min;
8800 roi.rect.width = x_max - x_min;
8801 roi.rect.height = y_max - y_min;
8802}
8803
8804/*===========================================================================
8805 * FUNCTION : resetIfNeededROI
8806 *
8807 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8808 * crop region
8809 *
8810 * PARAMETERS :
8811 * @roi : cam_area_t struct to resize
8812 * @scalerCropRegion : cam_crop_region_t region to compare against
8813 *
8814 *
8815 *==========================================================================*/
8816bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8817 const cam_crop_region_t* scalerCropRegion)
8818{
8819 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8820 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8821 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8822 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8823
8824 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8825 * without having this check the calculations below to validate if the roi
8826 * is inside scalar crop region will fail resulting in the roi not being
8827 * reset causing algorithm to continue to use stale roi window
8828 */
8829 if (roi->weight == 0) {
8830 return true;
8831 }
8832
8833 if ((roi_x_max < scalerCropRegion->left) ||
8834 // right edge of roi window is left of scalar crop's left edge
8835 (roi_y_max < scalerCropRegion->top) ||
8836 // bottom edge of roi window is above scalar crop's top edge
8837 (roi->rect.left > crop_x_max) ||
8838 // left edge of roi window is beyond(right) of scalar crop's right edge
8839 (roi->rect.top > crop_y_max)){
8840 // top edge of roi windo is above scalar crop's top edge
8841 return false;
8842 }
8843 if (roi->rect.left < scalerCropRegion->left) {
8844 roi->rect.left = scalerCropRegion->left;
8845 }
8846 if (roi->rect.top < scalerCropRegion->top) {
8847 roi->rect.top = scalerCropRegion->top;
8848 }
8849 if (roi_x_max > crop_x_max) {
8850 roi_x_max = crop_x_max;
8851 }
8852 if (roi_y_max > crop_y_max) {
8853 roi_y_max = crop_y_max;
8854 }
8855 roi->rect.width = roi_x_max - roi->rect.left;
8856 roi->rect.height = roi_y_max - roi->rect.top;
8857 return true;
8858}
8859
8860/*===========================================================================
8861 * FUNCTION : convertLandmarks
8862 *
8863 * DESCRIPTION: helper method to extract the landmarks from face detection info
8864 *
8865 * PARAMETERS :
8866 * @landmark_data : input landmark data to be converted
8867 * @landmarks : int32_t destination array
8868 *
8869 *
8870 *==========================================================================*/
8871void QCamera3HardwareInterface::convertLandmarks(
8872 cam_face_landmarks_info_t landmark_data,
8873 int32_t *landmarks)
8874{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008875 if (landmark_data.is_left_eye_valid) {
8876 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8877 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8878 } else {
8879 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8880 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8881 }
8882
8883 if (landmark_data.is_right_eye_valid) {
8884 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8885 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8886 } else {
8887 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8888 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8889 }
8890
8891 if (landmark_data.is_mouth_valid) {
8892 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8893 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8894 } else {
8895 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8896 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8897 }
8898}
8899
8900/*===========================================================================
8901 * FUNCTION : setInvalidLandmarks
8902 *
8903 * DESCRIPTION: helper method to set invalid landmarks
8904 *
8905 * PARAMETERS :
8906 * @landmarks : int32_t destination array
8907 *
8908 *
8909 *==========================================================================*/
8910void QCamera3HardwareInterface::setInvalidLandmarks(
8911 int32_t *landmarks)
8912{
8913 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8914 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8915 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8916 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8917 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8918 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008919}
8920
8921#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008922
8923/*===========================================================================
8924 * FUNCTION : getCapabilities
8925 *
8926 * DESCRIPTION: query camera capability from back-end
8927 *
8928 * PARAMETERS :
8929 * @ops : mm-interface ops structure
8930 * @cam_handle : camera handle for which we need capability
8931 *
8932 * RETURN : ptr type of capability structure
8933 * capability for success
8934 * NULL for failure
8935 *==========================================================================*/
8936cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8937 uint32_t cam_handle)
8938{
8939 int rc = NO_ERROR;
8940 QCamera3HeapMemory *capabilityHeap = NULL;
8941 cam_capability_t *cap_ptr = NULL;
8942
8943 if (ops == NULL) {
8944 LOGE("Invalid arguments");
8945 return NULL;
8946 }
8947
8948 capabilityHeap = new QCamera3HeapMemory(1);
8949 if (capabilityHeap == NULL) {
8950 LOGE("creation of capabilityHeap failed");
8951 return NULL;
8952 }
8953
8954 /* Allocate memory for capability buffer */
8955 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8956 if(rc != OK) {
8957 LOGE("No memory for cappability");
8958 goto allocate_failed;
8959 }
8960
8961 /* Map memory for capability buffer */
8962 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8963
8964 rc = ops->map_buf(cam_handle,
8965 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8966 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8967 if(rc < 0) {
8968 LOGE("failed to map capability buffer");
8969 rc = FAILED_TRANSACTION;
8970 goto map_failed;
8971 }
8972
8973 /* Query Capability */
8974 rc = ops->query_capability(cam_handle);
8975 if(rc < 0) {
8976 LOGE("failed to query capability");
8977 rc = FAILED_TRANSACTION;
8978 goto query_failed;
8979 }
8980
8981 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8982 if (cap_ptr == NULL) {
8983 LOGE("out of memory");
8984 rc = NO_MEMORY;
8985 goto query_failed;
8986 }
8987
8988 memset(cap_ptr, 0, sizeof(cam_capability_t));
8989 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8990
8991 int index;
8992 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8993 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8994 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8995 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8996 }
8997
8998query_failed:
8999 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
9000map_failed:
9001 capabilityHeap->deallocate();
9002allocate_failed:
9003 delete capabilityHeap;
9004
9005 if (rc != NO_ERROR) {
9006 return NULL;
9007 } else {
9008 return cap_ptr;
9009 }
9010}
9011
Thierry Strudel3d639192016-09-09 11:52:26 -07009012/*===========================================================================
9013 * FUNCTION : initCapabilities
9014 *
9015 * DESCRIPTION: initialize camera capabilities in static data struct
9016 *
9017 * PARAMETERS :
9018 * @cameraId : camera Id
9019 *
9020 * RETURN : int32_t type of status
9021 * NO_ERROR -- success
9022 * none-zero failure code
9023 *==========================================================================*/
9024int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
9025{
9026 int rc = 0;
9027 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009028 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07009029
9030 rc = camera_open((uint8_t)cameraId, &cameraHandle);
9031 if (rc) {
9032 LOGE("camera_open failed. rc = %d", rc);
9033 goto open_failed;
9034 }
9035 if (!cameraHandle) {
9036 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
9037 goto open_failed;
9038 }
9039
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009040 handle = get_main_camera_handle(cameraHandle->camera_handle);
9041 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
9042 if (gCamCapability[cameraId] == NULL) {
9043 rc = FAILED_TRANSACTION;
9044 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07009045 }
9046
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009047 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009048 if (is_dual_camera_by_idx(cameraId)) {
9049 handle = get_aux_camera_handle(cameraHandle->camera_handle);
9050 gCamCapability[cameraId]->aux_cam_cap =
9051 getCapabilities(cameraHandle->ops, handle);
9052 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
9053 rc = FAILED_TRANSACTION;
9054 free(gCamCapability[cameraId]);
9055 goto failed_op;
9056 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009057
9058 // Copy the main camera capability to main_cam_cap struct
9059 gCamCapability[cameraId]->main_cam_cap =
9060 (cam_capability_t *)malloc(sizeof(cam_capability_t));
9061 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
9062 LOGE("out of memory");
9063 rc = NO_MEMORY;
9064 goto failed_op;
9065 }
9066 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
9067 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07009068 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009069failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07009070 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
9071 cameraHandle = NULL;
9072open_failed:
9073 return rc;
9074}
9075
9076/*==========================================================================
9077 * FUNCTION : get3Aversion
9078 *
9079 * DESCRIPTION: get the Q3A S/W version
9080 *
9081 * PARAMETERS :
9082 * @sw_version: Reference of Q3A structure which will hold version info upon
9083 * return
9084 *
9085 * RETURN : None
9086 *
9087 *==========================================================================*/
9088void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
9089{
9090 if(gCamCapability[mCameraId])
9091 sw_version = gCamCapability[mCameraId]->q3a_version;
9092 else
9093 LOGE("Capability structure NULL!");
9094}
9095
9096
9097/*===========================================================================
9098 * FUNCTION : initParameters
9099 *
9100 * DESCRIPTION: initialize camera parameters
9101 *
9102 * PARAMETERS :
9103 *
9104 * RETURN : int32_t type of status
9105 * NO_ERROR -- success
9106 * none-zero failure code
9107 *==========================================================================*/
9108int QCamera3HardwareInterface::initParameters()
9109{
9110 int rc = 0;
9111
9112 //Allocate Set Param Buffer
9113 mParamHeap = new QCamera3HeapMemory(1);
9114 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9115 if(rc != OK) {
9116 rc = NO_MEMORY;
9117 LOGE("Failed to allocate SETPARM Heap memory");
9118 delete mParamHeap;
9119 mParamHeap = NULL;
9120 return rc;
9121 }
9122
9123 //Map memory for parameters buffer
9124 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9125 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9126 mParamHeap->getFd(0),
9127 sizeof(metadata_buffer_t),
9128 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9129 if(rc < 0) {
9130 LOGE("failed to map SETPARM buffer");
9131 rc = FAILED_TRANSACTION;
9132 mParamHeap->deallocate();
9133 delete mParamHeap;
9134 mParamHeap = NULL;
9135 return rc;
9136 }
9137
9138 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9139
9140 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9141 return rc;
9142}
9143
9144/*===========================================================================
9145 * FUNCTION : deinitParameters
9146 *
9147 * DESCRIPTION: de-initialize camera parameters
9148 *
9149 * PARAMETERS :
9150 *
9151 * RETURN : NONE
9152 *==========================================================================*/
9153void QCamera3HardwareInterface::deinitParameters()
9154{
9155 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9156 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9157
9158 mParamHeap->deallocate();
9159 delete mParamHeap;
9160 mParamHeap = NULL;
9161
9162 mParameters = NULL;
9163
9164 free(mPrevParameters);
9165 mPrevParameters = NULL;
9166}
9167
9168/*===========================================================================
9169 * FUNCTION : calcMaxJpegSize
9170 *
9171 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9172 *
9173 * PARAMETERS :
9174 *
9175 * RETURN : max_jpeg_size
9176 *==========================================================================*/
9177size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9178{
9179 size_t max_jpeg_size = 0;
9180 size_t temp_width, temp_height;
9181 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9182 MAX_SIZES_CNT);
9183 for (size_t i = 0; i < count; i++) {
9184 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9185 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9186 if (temp_width * temp_height > max_jpeg_size ) {
9187 max_jpeg_size = temp_width * temp_height;
9188 }
9189 }
9190 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9191 return max_jpeg_size;
9192}
9193
9194/*===========================================================================
9195 * FUNCTION : getMaxRawSize
9196 *
9197 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9198 *
9199 * PARAMETERS :
9200 *
9201 * RETURN : Largest supported Raw Dimension
9202 *==========================================================================*/
9203cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9204{
9205 int max_width = 0;
9206 cam_dimension_t maxRawSize;
9207
9208 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9209 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9210 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9211 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9212 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9213 }
9214 }
9215 return maxRawSize;
9216}
9217
9218
9219/*===========================================================================
9220 * FUNCTION : calcMaxJpegDim
9221 *
9222 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9223 *
9224 * PARAMETERS :
9225 *
9226 * RETURN : max_jpeg_dim
9227 *==========================================================================*/
9228cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9229{
9230 cam_dimension_t max_jpeg_dim;
9231 cam_dimension_t curr_jpeg_dim;
9232 max_jpeg_dim.width = 0;
9233 max_jpeg_dim.height = 0;
9234 curr_jpeg_dim.width = 0;
9235 curr_jpeg_dim.height = 0;
9236 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9237 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9238 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9239 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9240 max_jpeg_dim.width * max_jpeg_dim.height ) {
9241 max_jpeg_dim.width = curr_jpeg_dim.width;
9242 max_jpeg_dim.height = curr_jpeg_dim.height;
9243 }
9244 }
9245 return max_jpeg_dim;
9246}
9247
9248/*===========================================================================
9249 * FUNCTION : addStreamConfig
9250 *
9251 * DESCRIPTION: adds the stream configuration to the array
9252 *
9253 * PARAMETERS :
9254 * @available_stream_configs : pointer to stream configuration array
9255 * @scalar_format : scalar format
9256 * @dim : configuration dimension
9257 * @config_type : input or output configuration type
9258 *
9259 * RETURN : NONE
9260 *==========================================================================*/
9261void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9262 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9263{
9264 available_stream_configs.add(scalar_format);
9265 available_stream_configs.add(dim.width);
9266 available_stream_configs.add(dim.height);
9267 available_stream_configs.add(config_type);
9268}
9269
9270/*===========================================================================
9271 * FUNCTION : suppportBurstCapture
9272 *
9273 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9274 *
9275 * PARAMETERS :
9276 * @cameraId : camera Id
9277 *
9278 * RETURN : true if camera supports BURST_CAPTURE
9279 * false otherwise
9280 *==========================================================================*/
9281bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9282{
9283 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9284 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9285 const int32_t highResWidth = 3264;
9286 const int32_t highResHeight = 2448;
9287
9288 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9289 // Maximum resolution images cannot be captured at >= 10fps
9290 // -> not supporting BURST_CAPTURE
9291 return false;
9292 }
9293
9294 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9295 // Maximum resolution images can be captured at >= 20fps
9296 // --> supporting BURST_CAPTURE
9297 return true;
9298 }
9299
9300 // Find the smallest highRes resolution, or largest resolution if there is none
9301 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9302 MAX_SIZES_CNT);
9303 size_t highRes = 0;
9304 while ((highRes + 1 < totalCnt) &&
9305 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9306 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9307 highResWidth * highResHeight)) {
9308 highRes++;
9309 }
9310 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9311 return true;
9312 } else {
9313 return false;
9314 }
9315}
9316
9317/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009318 * FUNCTION : getPDStatIndex
9319 *
9320 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9321 *
9322 * PARAMETERS :
9323 * @caps : camera capabilities
9324 *
9325 * RETURN : int32_t type
9326 * non-negative - on success
9327 * -1 - on failure
9328 *==========================================================================*/
9329int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9330 if (nullptr == caps) {
9331 return -1;
9332 }
9333
9334 uint32_t metaRawCount = caps->meta_raw_channel_count;
9335 int32_t ret = -1;
9336 for (size_t i = 0; i < metaRawCount; i++) {
9337 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9338 ret = i;
9339 break;
9340 }
9341 }
9342
9343 return ret;
9344}
9345
9346/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009347 * FUNCTION : initStaticMetadata
9348 *
9349 * DESCRIPTION: initialize the static metadata
9350 *
9351 * PARAMETERS :
9352 * @cameraId : camera Id
9353 *
9354 * RETURN : int32_t type of status
9355 * 0 -- success
9356 * non-zero failure code
9357 *==========================================================================*/
9358int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9359{
9360 int rc = 0;
9361 CameraMetadata staticInfo;
9362 size_t count = 0;
9363 bool limitedDevice = false;
9364 char prop[PROPERTY_VALUE_MAX];
9365 bool supportBurst = false;
9366
9367 supportBurst = supportBurstCapture(cameraId);
9368
9369 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9370 * guaranteed or if min fps of max resolution is less than 20 fps, its
9371 * advertised as limited device*/
9372 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9373 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9374 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9375 !supportBurst;
9376
9377 uint8_t supportedHwLvl = limitedDevice ?
9378 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009379#ifndef USE_HAL_3_3
9380 // LEVEL_3 - This device will support level 3.
9381 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9382#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009383 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009384#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009385
9386 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9387 &supportedHwLvl, 1);
9388
9389 bool facingBack = false;
9390 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9391 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9392 facingBack = true;
9393 }
9394 /*HAL 3 only*/
9395 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9396 &gCamCapability[cameraId]->min_focus_distance, 1);
9397
9398 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9399 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9400
9401 /*should be using focal lengths but sensor doesn't provide that info now*/
9402 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9403 &gCamCapability[cameraId]->focal_length,
9404 1);
9405
9406 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9407 gCamCapability[cameraId]->apertures,
9408 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9409
9410 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9411 gCamCapability[cameraId]->filter_densities,
9412 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9413
9414
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009415 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9416 size_t mode_count =
9417 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9418 for (size_t i = 0; i < mode_count; i++) {
9419 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9420 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009421 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009422 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009423
9424 int32_t lens_shading_map_size[] = {
9425 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9426 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9427 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9428 lens_shading_map_size,
9429 sizeof(lens_shading_map_size)/sizeof(int32_t));
9430
9431 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9432 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9433
9434 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9435 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9436
9437 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9438 &gCamCapability[cameraId]->max_frame_duration, 1);
9439
9440 camera_metadata_rational baseGainFactor = {
9441 gCamCapability[cameraId]->base_gain_factor.numerator,
9442 gCamCapability[cameraId]->base_gain_factor.denominator};
9443 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9444 &baseGainFactor, 1);
9445
9446 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9447 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9448
9449 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9450 gCamCapability[cameraId]->pixel_array_size.height};
9451 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9452 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9453
9454 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9455 gCamCapability[cameraId]->active_array_size.top,
9456 gCamCapability[cameraId]->active_array_size.width,
9457 gCamCapability[cameraId]->active_array_size.height};
9458 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9459 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9460
9461 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9462 &gCamCapability[cameraId]->white_level, 1);
9463
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009464 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9465 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9466 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009467 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009468 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009469
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009470#ifndef USE_HAL_3_3
9471 bool hasBlackRegions = false;
9472 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9473 LOGW("black_region_count: %d is bounded to %d",
9474 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9475 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9476 }
9477 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9478 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9479 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9480 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9481 }
9482 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9483 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9484 hasBlackRegions = true;
9485 }
9486#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009487 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9488 &gCamCapability[cameraId]->flash_charge_duration, 1);
9489
9490 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9491 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9492
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009493 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9494 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9495 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009496 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9497 &timestampSource, 1);
9498
Thierry Strudel54dc9782017-02-15 12:12:10 -08009499 //update histogram vendor data
9500 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009501 &gCamCapability[cameraId]->histogram_size, 1);
9502
Thierry Strudel54dc9782017-02-15 12:12:10 -08009503 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009504 &gCamCapability[cameraId]->max_histogram_count, 1);
9505
Shuzhen Wang14415f52016-11-16 18:26:18 -08009506 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9507 //so that app can request fewer number of bins than the maximum supported.
9508 std::vector<int32_t> histBins;
9509 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9510 histBins.push_back(maxHistBins);
9511 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9512 (maxHistBins & 0x1) == 0) {
9513 histBins.push_back(maxHistBins >> 1);
9514 maxHistBins >>= 1;
9515 }
9516 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9517 histBins.data(), histBins.size());
9518
Thierry Strudel3d639192016-09-09 11:52:26 -07009519 int32_t sharpness_map_size[] = {
9520 gCamCapability[cameraId]->sharpness_map_size.width,
9521 gCamCapability[cameraId]->sharpness_map_size.height};
9522
9523 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9524 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9525
9526 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9527 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9528
Emilian Peev0f3c3162017-03-15 12:57:46 +00009529 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9530 if (0 <= indexPD) {
9531 // Advertise PD stats data as part of the Depth capabilities
9532 int32_t depthWidth =
9533 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9534 int32_t depthHeight =
9535 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009536 int32_t depthStride =
9537 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009538 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9539 assert(0 < depthSamplesCount);
9540 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9541 &depthSamplesCount, 1);
9542
9543 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9544 depthHeight,
9545 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9546 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9547 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9548 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9549 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9550
9551 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9552 depthHeight, 33333333,
9553 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9554 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9555 depthMinDuration,
9556 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9557
9558 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9559 depthHeight, 0,
9560 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9561 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9562 depthStallDuration,
9563 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9564
9565 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9566 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009567
9568 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9569 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9570 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009571 }
9572
Thierry Strudel3d639192016-09-09 11:52:26 -07009573 int32_t scalar_formats[] = {
9574 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9575 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9576 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9577 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9578 HAL_PIXEL_FORMAT_RAW10,
9579 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009580 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9581 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9582 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009583
9584 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9585 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9586 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9587 count, MAX_SIZES_CNT, available_processed_sizes);
9588 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9589 available_processed_sizes, count * 2);
9590
9591 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9592 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9593 makeTable(gCamCapability[cameraId]->raw_dim,
9594 count, MAX_SIZES_CNT, available_raw_sizes);
9595 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9596 available_raw_sizes, count * 2);
9597
9598 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9599 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9600 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9601 count, MAX_SIZES_CNT, available_fps_ranges);
9602 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9603 available_fps_ranges, count * 2);
9604
9605 camera_metadata_rational exposureCompensationStep = {
9606 gCamCapability[cameraId]->exp_compensation_step.numerator,
9607 gCamCapability[cameraId]->exp_compensation_step.denominator};
9608 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9609 &exposureCompensationStep, 1);
9610
9611 Vector<uint8_t> availableVstabModes;
9612 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9613 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009614 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009615 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009616 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009617 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009618 count = IS_TYPE_MAX;
9619 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9620 for (size_t i = 0; i < count; i++) {
9621 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9622 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9623 eisSupported = true;
9624 break;
9625 }
9626 }
9627 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009628 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9629 }
9630 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9631 availableVstabModes.array(), availableVstabModes.size());
9632
9633 /*HAL 1 and HAL 3 common*/
9634 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9635 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9636 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009637 // Cap the max zoom to the max preferred value
9638 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009639 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9640 &maxZoom, 1);
9641
9642 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9643 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9644
9645 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9646 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9647 max3aRegions[2] = 0; /* AF not supported */
9648 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9649 max3aRegions, 3);
9650
9651 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9652 memset(prop, 0, sizeof(prop));
9653 property_get("persist.camera.facedetect", prop, "1");
9654 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9655 LOGD("Support face detection mode: %d",
9656 supportedFaceDetectMode);
9657
9658 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009659 /* support mode should be OFF if max number of face is 0 */
9660 if (maxFaces <= 0) {
9661 supportedFaceDetectMode = 0;
9662 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009663 Vector<uint8_t> availableFaceDetectModes;
9664 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9665 if (supportedFaceDetectMode == 1) {
9666 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9667 } else if (supportedFaceDetectMode == 2) {
9668 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9669 } else if (supportedFaceDetectMode == 3) {
9670 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9671 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9672 } else {
9673 maxFaces = 0;
9674 }
9675 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9676 availableFaceDetectModes.array(),
9677 availableFaceDetectModes.size());
9678 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9679 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009680 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9681 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9682 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009683
9684 int32_t exposureCompensationRange[] = {
9685 gCamCapability[cameraId]->exposure_compensation_min,
9686 gCamCapability[cameraId]->exposure_compensation_max};
9687 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9688 exposureCompensationRange,
9689 sizeof(exposureCompensationRange)/sizeof(int32_t));
9690
9691 uint8_t lensFacing = (facingBack) ?
9692 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9693 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9694
9695 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9696 available_thumbnail_sizes,
9697 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9698
9699 /*all sizes will be clubbed into this tag*/
9700 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9701 /*android.scaler.availableStreamConfigurations*/
9702 Vector<int32_t> available_stream_configs;
9703 cam_dimension_t active_array_dim;
9704 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9705 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009706
9707 /*advertise list of input dimensions supported based on below property.
9708 By default all sizes upto 5MP will be advertised.
9709 Note that the setprop resolution format should be WxH.
9710 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9711 To list all supported sizes, setprop needs to be set with "0x0" */
9712 cam_dimension_t minInputSize = {2592,1944}; //5MP
9713 memset(prop, 0, sizeof(prop));
9714 property_get("persist.camera.input.minsize", prop, "2592x1944");
9715 if (strlen(prop) > 0) {
9716 char *saveptr = NULL;
9717 char *token = strtok_r(prop, "x", &saveptr);
9718 if (token != NULL) {
9719 minInputSize.width = atoi(token);
9720 }
9721 token = strtok_r(NULL, "x", &saveptr);
9722 if (token != NULL) {
9723 minInputSize.height = atoi(token);
9724 }
9725 }
9726
Thierry Strudel3d639192016-09-09 11:52:26 -07009727 /* Add input/output stream configurations for each scalar formats*/
9728 for (size_t j = 0; j < scalar_formats_count; j++) {
9729 switch (scalar_formats[j]) {
9730 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9731 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9732 case HAL_PIXEL_FORMAT_RAW10:
9733 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9734 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9735 addStreamConfig(available_stream_configs, scalar_formats[j],
9736 gCamCapability[cameraId]->raw_dim[i],
9737 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9738 }
9739 break;
9740 case HAL_PIXEL_FORMAT_BLOB:
9741 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9742 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9743 addStreamConfig(available_stream_configs, scalar_formats[j],
9744 gCamCapability[cameraId]->picture_sizes_tbl[i],
9745 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9746 }
9747 break;
9748 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9749 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9750 default:
9751 cam_dimension_t largest_picture_size;
9752 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9753 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9754 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9755 addStreamConfig(available_stream_configs, scalar_formats[j],
9756 gCamCapability[cameraId]->picture_sizes_tbl[i],
9757 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009758 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009759 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9760 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009761 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9762 >= minInputSize.width) || (gCamCapability[cameraId]->
9763 picture_sizes_tbl[i].height >= minInputSize.height)) {
9764 addStreamConfig(available_stream_configs, scalar_formats[j],
9765 gCamCapability[cameraId]->picture_sizes_tbl[i],
9766 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9767 }
9768 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009769 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009770
Thierry Strudel3d639192016-09-09 11:52:26 -07009771 break;
9772 }
9773 }
9774
9775 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9776 available_stream_configs.array(), available_stream_configs.size());
9777 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9778 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9779
9780 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9781 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9782
9783 /* android.scaler.availableMinFrameDurations */
9784 Vector<int64_t> available_min_durations;
9785 for (size_t j = 0; j < scalar_formats_count; j++) {
9786 switch (scalar_formats[j]) {
9787 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9788 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9789 case HAL_PIXEL_FORMAT_RAW10:
9790 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9791 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9792 available_min_durations.add(scalar_formats[j]);
9793 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9794 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9795 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9796 }
9797 break;
9798 default:
9799 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9800 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9801 available_min_durations.add(scalar_formats[j]);
9802 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9803 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9804 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9805 }
9806 break;
9807 }
9808 }
9809 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9810 available_min_durations.array(), available_min_durations.size());
9811
9812 Vector<int32_t> available_hfr_configs;
9813 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9814 int32_t fps = 0;
9815 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9816 case CAM_HFR_MODE_60FPS:
9817 fps = 60;
9818 break;
9819 case CAM_HFR_MODE_90FPS:
9820 fps = 90;
9821 break;
9822 case CAM_HFR_MODE_120FPS:
9823 fps = 120;
9824 break;
9825 case CAM_HFR_MODE_150FPS:
9826 fps = 150;
9827 break;
9828 case CAM_HFR_MODE_180FPS:
9829 fps = 180;
9830 break;
9831 case CAM_HFR_MODE_210FPS:
9832 fps = 210;
9833 break;
9834 case CAM_HFR_MODE_240FPS:
9835 fps = 240;
9836 break;
9837 case CAM_HFR_MODE_480FPS:
9838 fps = 480;
9839 break;
9840 case CAM_HFR_MODE_OFF:
9841 case CAM_HFR_MODE_MAX:
9842 default:
9843 break;
9844 }
9845
9846 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9847 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9848 /* For each HFR frame rate, need to advertise one variable fps range
9849 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9850 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9851 * set by the app. When video recording is started, [120, 120] is
9852 * set. This way sensor configuration does not change when recording
9853 * is started */
9854
9855 /* (width, height, fps_min, fps_max, batch_size_max) */
9856 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9857 j < MAX_SIZES_CNT; j++) {
9858 available_hfr_configs.add(
9859 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9860 available_hfr_configs.add(
9861 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9862 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9863 available_hfr_configs.add(fps);
9864 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9865
9866 /* (width, height, fps_min, fps_max, batch_size_max) */
9867 available_hfr_configs.add(
9868 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9869 available_hfr_configs.add(
9870 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9871 available_hfr_configs.add(fps);
9872 available_hfr_configs.add(fps);
9873 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9874 }
9875 }
9876 }
9877 //Advertise HFR capability only if the property is set
9878 memset(prop, 0, sizeof(prop));
9879 property_get("persist.camera.hal3hfr.enable", prop, "1");
9880 uint8_t hfrEnable = (uint8_t)atoi(prop);
9881
9882 if(hfrEnable && available_hfr_configs.array()) {
9883 staticInfo.update(
9884 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9885 available_hfr_configs.array(), available_hfr_configs.size());
9886 }
9887
9888 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9889 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9890 &max_jpeg_size, 1);
9891
9892 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9893 size_t size = 0;
9894 count = CAM_EFFECT_MODE_MAX;
9895 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9896 for (size_t i = 0; i < count; i++) {
9897 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9898 gCamCapability[cameraId]->supported_effects[i]);
9899 if (NAME_NOT_FOUND != val) {
9900 avail_effects[size] = (uint8_t)val;
9901 size++;
9902 }
9903 }
9904 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9905 avail_effects,
9906 size);
9907
9908 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9909 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9910 size_t supported_scene_modes_cnt = 0;
9911 count = CAM_SCENE_MODE_MAX;
9912 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9913 for (size_t i = 0; i < count; i++) {
9914 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9915 CAM_SCENE_MODE_OFF) {
9916 int val = lookupFwkName(SCENE_MODES_MAP,
9917 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9918 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009919
Thierry Strudel3d639192016-09-09 11:52:26 -07009920 if (NAME_NOT_FOUND != val) {
9921 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9922 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9923 supported_scene_modes_cnt++;
9924 }
9925 }
9926 }
9927 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9928 avail_scene_modes,
9929 supported_scene_modes_cnt);
9930
9931 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9932 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9933 supported_scene_modes_cnt,
9934 CAM_SCENE_MODE_MAX,
9935 scene_mode_overrides,
9936 supported_indexes,
9937 cameraId);
9938
9939 if (supported_scene_modes_cnt == 0) {
9940 supported_scene_modes_cnt = 1;
9941 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9942 }
9943
9944 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9945 scene_mode_overrides, supported_scene_modes_cnt * 3);
9946
9947 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9948 ANDROID_CONTROL_MODE_AUTO,
9949 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9950 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9951 available_control_modes,
9952 3);
9953
9954 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9955 size = 0;
9956 count = CAM_ANTIBANDING_MODE_MAX;
9957 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9958 for (size_t i = 0; i < count; i++) {
9959 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9960 gCamCapability[cameraId]->supported_antibandings[i]);
9961 if (NAME_NOT_FOUND != val) {
9962 avail_antibanding_modes[size] = (uint8_t)val;
9963 size++;
9964 }
9965
9966 }
9967 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9968 avail_antibanding_modes,
9969 size);
9970
9971 uint8_t avail_abberation_modes[] = {
9972 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9973 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9974 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9975 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9976 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9977 if (0 == count) {
9978 // If no aberration correction modes are available for a device, this advertise OFF mode
9979 size = 1;
9980 } else {
9981 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9982 // So, advertize all 3 modes if atleast any one mode is supported as per the
9983 // new M requirement
9984 size = 3;
9985 }
9986 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9987 avail_abberation_modes,
9988 size);
9989
9990 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9991 size = 0;
9992 count = CAM_FOCUS_MODE_MAX;
9993 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9994 for (size_t i = 0; i < count; i++) {
9995 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9996 gCamCapability[cameraId]->supported_focus_modes[i]);
9997 if (NAME_NOT_FOUND != val) {
9998 avail_af_modes[size] = (uint8_t)val;
9999 size++;
10000 }
10001 }
10002 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
10003 avail_af_modes,
10004 size);
10005
10006 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
10007 size = 0;
10008 count = CAM_WB_MODE_MAX;
10009 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
10010 for (size_t i = 0; i < count; i++) {
10011 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10012 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10013 gCamCapability[cameraId]->supported_white_balances[i]);
10014 if (NAME_NOT_FOUND != val) {
10015 avail_awb_modes[size] = (uint8_t)val;
10016 size++;
10017 }
10018 }
10019 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
10020 avail_awb_modes,
10021 size);
10022
10023 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
10024 count = CAM_FLASH_FIRING_LEVEL_MAX;
10025 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
10026 count);
10027 for (size_t i = 0; i < count; i++) {
10028 available_flash_levels[i] =
10029 gCamCapability[cameraId]->supported_firing_levels[i];
10030 }
10031 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
10032 available_flash_levels, count);
10033
10034 uint8_t flashAvailable;
10035 if (gCamCapability[cameraId]->flash_available)
10036 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
10037 else
10038 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
10039 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
10040 &flashAvailable, 1);
10041
10042 Vector<uint8_t> avail_ae_modes;
10043 count = CAM_AE_MODE_MAX;
10044 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
10045 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080010046 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
10047 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
10048 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
10049 }
10050 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070010051 }
10052 if (flashAvailable) {
10053 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
10054 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
10055 }
10056 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
10057 avail_ae_modes.array(),
10058 avail_ae_modes.size());
10059
10060 int32_t sensitivity_range[2];
10061 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
10062 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
10063 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
10064 sensitivity_range,
10065 sizeof(sensitivity_range) / sizeof(int32_t));
10066
10067 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10068 &gCamCapability[cameraId]->max_analog_sensitivity,
10069 1);
10070
10071 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
10072 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
10073 &sensor_orientation,
10074 1);
10075
10076 int32_t max_output_streams[] = {
10077 MAX_STALLING_STREAMS,
10078 MAX_PROCESSED_STREAMS,
10079 MAX_RAW_STREAMS};
10080 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
10081 max_output_streams,
10082 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
10083
10084 uint8_t avail_leds = 0;
10085 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
10086 &avail_leds, 0);
10087
10088 uint8_t focus_dist_calibrated;
10089 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
10090 gCamCapability[cameraId]->focus_dist_calibrated);
10091 if (NAME_NOT_FOUND != val) {
10092 focus_dist_calibrated = (uint8_t)val;
10093 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10094 &focus_dist_calibrated, 1);
10095 }
10096
10097 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
10098 size = 0;
10099 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
10100 MAX_TEST_PATTERN_CNT);
10101 for (size_t i = 0; i < count; i++) {
10102 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10103 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10104 if (NAME_NOT_FOUND != testpatternMode) {
10105 avail_testpattern_modes[size] = testpatternMode;
10106 size++;
10107 }
10108 }
10109 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10110 avail_testpattern_modes,
10111 size);
10112
10113 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10114 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10115 &max_pipeline_depth,
10116 1);
10117
10118 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10119 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10120 &partial_result_count,
10121 1);
10122
10123 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10124 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10125
10126 Vector<uint8_t> available_capabilities;
10127 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10128 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10129 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10130 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10131 if (supportBurst) {
10132 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10133 }
10134 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10135 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10136 if (hfrEnable && available_hfr_configs.array()) {
10137 available_capabilities.add(
10138 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10139 }
10140
10141 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10142 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10143 }
10144 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10145 available_capabilities.array(),
10146 available_capabilities.size());
10147
10148 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10149 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10150 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10151 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10152
10153 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10154 &aeLockAvailable, 1);
10155
10156 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10157 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10158 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10159 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10160
10161 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10162 &awbLockAvailable, 1);
10163
10164 int32_t max_input_streams = 1;
10165 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10166 &max_input_streams,
10167 1);
10168
10169 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10170 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10171 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10172 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10173 HAL_PIXEL_FORMAT_YCbCr_420_888};
10174 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10175 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10176
10177 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10178 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10179 &max_latency,
10180 1);
10181
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010182#ifndef USE_HAL_3_3
10183 int32_t isp_sensitivity_range[2];
10184 isp_sensitivity_range[0] =
10185 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10186 isp_sensitivity_range[1] =
10187 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10188 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10189 isp_sensitivity_range,
10190 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10191#endif
10192
Thierry Strudel3d639192016-09-09 11:52:26 -070010193 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10194 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10195 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10196 available_hot_pixel_modes,
10197 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10198
10199 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10200 ANDROID_SHADING_MODE_FAST,
10201 ANDROID_SHADING_MODE_HIGH_QUALITY};
10202 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10203 available_shading_modes,
10204 3);
10205
10206 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10207 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10208 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10209 available_lens_shading_map_modes,
10210 2);
10211
10212 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10213 ANDROID_EDGE_MODE_FAST,
10214 ANDROID_EDGE_MODE_HIGH_QUALITY,
10215 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10216 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10217 available_edge_modes,
10218 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10219
10220 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10221 ANDROID_NOISE_REDUCTION_MODE_FAST,
10222 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10223 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10224 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10225 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10226 available_noise_red_modes,
10227 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10228
10229 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10230 ANDROID_TONEMAP_MODE_FAST,
10231 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10232 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10233 available_tonemap_modes,
10234 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10235
10236 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10237 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10238 available_hot_pixel_map_modes,
10239 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10240
10241 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10242 gCamCapability[cameraId]->reference_illuminant1);
10243 if (NAME_NOT_FOUND != val) {
10244 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10245 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10246 }
10247
10248 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10249 gCamCapability[cameraId]->reference_illuminant2);
10250 if (NAME_NOT_FOUND != val) {
10251 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10252 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10253 }
10254
10255 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10256 (void *)gCamCapability[cameraId]->forward_matrix1,
10257 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10258
10259 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10260 (void *)gCamCapability[cameraId]->forward_matrix2,
10261 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10262
10263 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10264 (void *)gCamCapability[cameraId]->color_transform1,
10265 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10266
10267 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10268 (void *)gCamCapability[cameraId]->color_transform2,
10269 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10270
10271 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10272 (void *)gCamCapability[cameraId]->calibration_transform1,
10273 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10274
10275 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10276 (void *)gCamCapability[cameraId]->calibration_transform2,
10277 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10278
10279 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10280 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10281 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10282 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10283 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10284 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10285 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10286 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10287 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10288 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10289 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10290 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10291 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10292 ANDROID_JPEG_GPS_COORDINATES,
10293 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10294 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10295 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10296 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10297 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10298 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10299 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10300 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10301 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10302 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010303#ifndef USE_HAL_3_3
10304 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10305#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010306 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010307 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010308 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10309 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010310 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010311 /* DevCamDebug metadata request_keys_basic */
10312 DEVCAMDEBUG_META_ENABLE,
10313 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010314 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010315 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010316 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010317 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010318 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010319 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010320
10321 size_t request_keys_cnt =
10322 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10323 Vector<int32_t> available_request_keys;
10324 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10325 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10326 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10327 }
10328
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010329 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010330 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010331 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070010332 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
Chien-Yu Chenec328c82017-08-30 16:41:08 -070010333 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010334 }
10335
Thierry Strudel3d639192016-09-09 11:52:26 -070010336 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10337 available_request_keys.array(), available_request_keys.size());
10338
10339 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10340 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10341 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10342 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10343 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10344 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10345 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10346 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10347 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10348 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10349 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10350 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10351 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10352 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10353 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10354 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10355 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010356 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010357 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10358 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10359 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010360 ANDROID_STATISTICS_FACE_SCORES,
10361#ifndef USE_HAL_3_3
10362 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10363#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010364 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010365 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010366 // DevCamDebug metadata result_keys_basic
10367 DEVCAMDEBUG_META_ENABLE,
10368 // DevCamDebug metadata result_keys AF
10369 DEVCAMDEBUG_AF_LENS_POSITION,
10370 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10371 DEVCAMDEBUG_AF_TOF_DISTANCE,
10372 DEVCAMDEBUG_AF_LUMA,
10373 DEVCAMDEBUG_AF_HAF_STATE,
10374 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10375 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10376 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10377 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10378 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10379 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10380 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10381 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10382 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10383 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10384 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10385 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10386 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10387 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10388 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10389 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10390 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10391 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10392 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10393 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10394 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10395 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10396 // DevCamDebug metadata result_keys AEC
10397 DEVCAMDEBUG_AEC_TARGET_LUMA,
10398 DEVCAMDEBUG_AEC_COMP_LUMA,
10399 DEVCAMDEBUG_AEC_AVG_LUMA,
10400 DEVCAMDEBUG_AEC_CUR_LUMA,
10401 DEVCAMDEBUG_AEC_LINECOUNT,
10402 DEVCAMDEBUG_AEC_REAL_GAIN,
10403 DEVCAMDEBUG_AEC_EXP_INDEX,
10404 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010405 // DevCamDebug metadata result_keys zzHDR
10406 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10407 DEVCAMDEBUG_AEC_L_LINECOUNT,
10408 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10409 DEVCAMDEBUG_AEC_S_LINECOUNT,
10410 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10411 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10412 // DevCamDebug metadata result_keys ADRC
10413 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10414 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10415 DEVCAMDEBUG_AEC_GTM_RATIO,
10416 DEVCAMDEBUG_AEC_LTM_RATIO,
10417 DEVCAMDEBUG_AEC_LA_RATIO,
10418 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010419 // DevCamDebug metadata result_keys AEC MOTION
10420 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10421 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10422 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010423 // DevCamDebug metadata result_keys AWB
10424 DEVCAMDEBUG_AWB_R_GAIN,
10425 DEVCAMDEBUG_AWB_G_GAIN,
10426 DEVCAMDEBUG_AWB_B_GAIN,
10427 DEVCAMDEBUG_AWB_CCT,
10428 DEVCAMDEBUG_AWB_DECISION,
10429 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010430 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10431 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10432 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010433 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010434 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -070010435 NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010436 };
10437
Thierry Strudel3d639192016-09-09 11:52:26 -070010438 size_t result_keys_cnt =
10439 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10440
10441 Vector<int32_t> available_result_keys;
10442 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10443 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10444 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10445 }
10446 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10447 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10448 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10449 }
10450 if (supportedFaceDetectMode == 1) {
10451 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10452 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10453 } else if ((supportedFaceDetectMode == 2) ||
10454 (supportedFaceDetectMode == 3)) {
10455 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10456 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10457 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010458#ifndef USE_HAL_3_3
Shuzhen Wanga1bc9de2017-09-14 16:54:02 -070010459 {
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010460 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10461 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10462 }
10463#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010464
10465 if (gExposeEnableZslKey) {
10466 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chendaf68892017-08-25 12:56:40 -070010467 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010468 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
10469 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010470 }
10471
Thierry Strudel3d639192016-09-09 11:52:26 -070010472 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10473 available_result_keys.array(), available_result_keys.size());
10474
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010475 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010476 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10477 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10478 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10479 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10480 ANDROID_SCALER_CROPPING_TYPE,
10481 ANDROID_SYNC_MAX_LATENCY,
10482 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10483 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10484 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10485 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10486 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10487 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10488 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10489 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10490 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10491 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10492 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10493 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10494 ANDROID_LENS_FACING,
10495 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10496 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10497 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10498 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10499 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10500 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10501 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10502 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10503 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10504 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10505 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10506 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10507 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10508 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10509 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10510 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10511 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10512 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10513 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10514 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010515 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010516 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10517 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10518 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10519 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10520 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10521 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10522 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10523 ANDROID_CONTROL_AVAILABLE_MODES,
10524 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10525 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10526 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10527 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010528 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10529#ifndef USE_HAL_3_3
10530 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10531 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10532#endif
10533 };
10534
10535 Vector<int32_t> available_characteristics_keys;
10536 available_characteristics_keys.appendArray(characteristics_keys_basic,
10537 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10538#ifndef USE_HAL_3_3
10539 if (hasBlackRegions) {
10540 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10541 }
10542#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010543
10544 if (0 <= indexPD) {
10545 int32_t depthKeys[] = {
10546 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10547 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10548 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10549 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10550 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10551 };
10552 available_characteristics_keys.appendArray(depthKeys,
10553 sizeof(depthKeys) / sizeof(depthKeys[0]));
10554 }
10555
Thierry Strudel3d639192016-09-09 11:52:26 -070010556 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010557 available_characteristics_keys.array(),
10558 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010559
10560 /*available stall durations depend on the hw + sw and will be different for different devices */
10561 /*have to add for raw after implementation*/
10562 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10563 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10564
10565 Vector<int64_t> available_stall_durations;
10566 for (uint32_t j = 0; j < stall_formats_count; j++) {
10567 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10568 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10569 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10570 available_stall_durations.add(stall_formats[j]);
10571 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10572 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10573 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10574 }
10575 } else {
10576 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10577 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10578 available_stall_durations.add(stall_formats[j]);
10579 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10580 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10581 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10582 }
10583 }
10584 }
10585 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10586 available_stall_durations.array(),
10587 available_stall_durations.size());
10588
10589 //QCAMERA3_OPAQUE_RAW
10590 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10591 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10592 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10593 case LEGACY_RAW:
10594 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10595 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10596 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10597 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10598 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10599 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10600 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10601 break;
10602 case MIPI_RAW:
10603 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10604 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10605 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10606 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10607 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10608 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10609 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10610 break;
10611 default:
10612 LOGE("unknown opaque_raw_format %d",
10613 gCamCapability[cameraId]->opaque_raw_fmt);
10614 break;
10615 }
10616 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10617
10618 Vector<int32_t> strides;
10619 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10620 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10621 cam_stream_buf_plane_info_t buf_planes;
10622 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10623 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10624 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10625 &gCamCapability[cameraId]->padding_info, &buf_planes);
10626 strides.add(buf_planes.plane_info.mp[0].stride);
10627 }
10628 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10629 strides.size());
10630
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010631 //TBD: remove the following line once backend advertises zzHDR in feature mask
10632 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010633 //Video HDR default
10634 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10635 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010636 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010637 int32_t vhdr_mode[] = {
10638 QCAMERA3_VIDEO_HDR_MODE_OFF,
10639 QCAMERA3_VIDEO_HDR_MODE_ON};
10640
10641 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10642 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10643 vhdr_mode, vhdr_mode_count);
10644 }
10645
Thierry Strudel3d639192016-09-09 11:52:26 -070010646 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10647 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10648 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10649
10650 uint8_t isMonoOnly =
10651 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10652 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10653 &isMonoOnly, 1);
10654
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010655#ifndef USE_HAL_3_3
10656 Vector<int32_t> opaque_size;
10657 for (size_t j = 0; j < scalar_formats_count; j++) {
10658 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10659 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10660 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10661 cam_stream_buf_plane_info_t buf_planes;
10662
10663 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10664 &gCamCapability[cameraId]->padding_info, &buf_planes);
10665
10666 if (rc == 0) {
10667 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10668 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10669 opaque_size.add(buf_planes.plane_info.frame_len);
10670 }else {
10671 LOGE("raw frame calculation failed!");
10672 }
10673 }
10674 }
10675 }
10676
10677 if ((opaque_size.size() > 0) &&
10678 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10679 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10680 else
10681 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10682#endif
10683
Thierry Strudel04e026f2016-10-10 11:27:36 -070010684 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10685 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10686 size = 0;
10687 count = CAM_IR_MODE_MAX;
10688 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10689 for (size_t i = 0; i < count; i++) {
10690 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10691 gCamCapability[cameraId]->supported_ir_modes[i]);
10692 if (NAME_NOT_FOUND != val) {
10693 avail_ir_modes[size] = (int32_t)val;
10694 size++;
10695 }
10696 }
10697 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10698 avail_ir_modes, size);
10699 }
10700
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010701 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10702 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10703 size = 0;
10704 count = CAM_AEC_CONVERGENCE_MAX;
10705 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10706 for (size_t i = 0; i < count; i++) {
10707 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10708 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10709 if (NAME_NOT_FOUND != val) {
10710 available_instant_aec_modes[size] = (int32_t)val;
10711 size++;
10712 }
10713 }
10714 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10715 available_instant_aec_modes, size);
10716 }
10717
Thierry Strudel54dc9782017-02-15 12:12:10 -080010718 int32_t sharpness_range[] = {
10719 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10720 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10721 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10722
10723 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10724 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10725 size = 0;
10726 count = CAM_BINNING_CORRECTION_MODE_MAX;
10727 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10728 for (size_t i = 0; i < count; i++) {
10729 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10730 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10731 gCamCapability[cameraId]->supported_binning_modes[i]);
10732 if (NAME_NOT_FOUND != val) {
10733 avail_binning_modes[size] = (int32_t)val;
10734 size++;
10735 }
10736 }
10737 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10738 avail_binning_modes, size);
10739 }
10740
10741 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10742 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10743 size = 0;
10744 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10745 for (size_t i = 0; i < count; i++) {
10746 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10747 gCamCapability[cameraId]->supported_aec_modes[i]);
10748 if (NAME_NOT_FOUND != val)
10749 available_aec_modes[size++] = val;
10750 }
10751 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10752 available_aec_modes, size);
10753 }
10754
10755 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10756 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10757 size = 0;
10758 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10759 for (size_t i = 0; i < count; i++) {
10760 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10761 gCamCapability[cameraId]->supported_iso_modes[i]);
10762 if (NAME_NOT_FOUND != val)
10763 available_iso_modes[size++] = val;
10764 }
10765 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10766 available_iso_modes, size);
10767 }
10768
10769 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010770 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010771 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10772 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10773 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10774
10775 int32_t available_saturation_range[4];
10776 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10777 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10778 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10779 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10780 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10781 available_saturation_range, 4);
10782
10783 uint8_t is_hdr_values[2];
10784 is_hdr_values[0] = 0;
10785 is_hdr_values[1] = 1;
10786 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10787 is_hdr_values, 2);
10788
10789 float is_hdr_confidence_range[2];
10790 is_hdr_confidence_range[0] = 0.0;
10791 is_hdr_confidence_range[1] = 1.0;
10792 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10793 is_hdr_confidence_range, 2);
10794
Emilian Peev0a972ef2017-03-16 10:25:53 +000010795 size_t eepromLength = strnlen(
10796 reinterpret_cast<const char *>(
10797 gCamCapability[cameraId]->eeprom_version_info),
10798 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10799 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010800 char easelInfo[] = ",E:N";
10801 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10802 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10803 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010804 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
Arnd Geis082a4d72017-08-24 10:33:07 -070010805 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010806 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010807 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010808 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10809 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10810 }
10811
Thierry Strudel3d639192016-09-09 11:52:26 -070010812 gStaticMetadata[cameraId] = staticInfo.release();
10813 return rc;
10814}
10815
10816/*===========================================================================
10817 * FUNCTION : makeTable
10818 *
10819 * DESCRIPTION: make a table of sizes
10820 *
10821 * PARAMETERS :
10822 *
10823 *
10824 *==========================================================================*/
10825void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10826 size_t max_size, int32_t *sizeTable)
10827{
10828 size_t j = 0;
10829 if (size > max_size) {
10830 size = max_size;
10831 }
10832 for (size_t i = 0; i < size; i++) {
10833 sizeTable[j] = dimTable[i].width;
10834 sizeTable[j+1] = dimTable[i].height;
10835 j+=2;
10836 }
10837}
10838
10839/*===========================================================================
10840 * FUNCTION : makeFPSTable
10841 *
10842 * DESCRIPTION: make a table of fps ranges
10843 *
10844 * PARAMETERS :
10845 *
10846 *==========================================================================*/
10847void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10848 size_t max_size, int32_t *fpsRangesTable)
10849{
10850 size_t j = 0;
10851 if (size > max_size) {
10852 size = max_size;
10853 }
10854 for (size_t i = 0; i < size; i++) {
10855 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10856 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10857 j+=2;
10858 }
10859}
10860
10861/*===========================================================================
10862 * FUNCTION : makeOverridesList
10863 *
10864 * DESCRIPTION: make a list of scene mode overrides
10865 *
10866 * PARAMETERS :
10867 *
10868 *
10869 *==========================================================================*/
10870void QCamera3HardwareInterface::makeOverridesList(
10871 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10872 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10873{
10874 /*daemon will give a list of overrides for all scene modes.
10875 However we should send the fwk only the overrides for the scene modes
10876 supported by the framework*/
10877 size_t j = 0;
10878 if (size > max_size) {
10879 size = max_size;
10880 }
10881 size_t focus_count = CAM_FOCUS_MODE_MAX;
10882 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10883 focus_count);
10884 for (size_t i = 0; i < size; i++) {
10885 bool supt = false;
10886 size_t index = supported_indexes[i];
10887 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10888 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10889 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10890 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10891 overridesTable[index].awb_mode);
10892 if (NAME_NOT_FOUND != val) {
10893 overridesList[j+1] = (uint8_t)val;
10894 }
10895 uint8_t focus_override = overridesTable[index].af_mode;
10896 for (size_t k = 0; k < focus_count; k++) {
10897 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10898 supt = true;
10899 break;
10900 }
10901 }
10902 if (supt) {
10903 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10904 focus_override);
10905 if (NAME_NOT_FOUND != val) {
10906 overridesList[j+2] = (uint8_t)val;
10907 }
10908 } else {
10909 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10910 }
10911 j+=3;
10912 }
10913}
10914
10915/*===========================================================================
10916 * FUNCTION : filterJpegSizes
10917 *
10918 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10919 * could be downscaled to
10920 *
10921 * PARAMETERS :
10922 *
10923 * RETURN : length of jpegSizes array
10924 *==========================================================================*/
10925
10926size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10927 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10928 uint8_t downscale_factor)
10929{
10930 if (0 == downscale_factor) {
10931 downscale_factor = 1;
10932 }
10933
10934 int32_t min_width = active_array_size.width / downscale_factor;
10935 int32_t min_height = active_array_size.height / downscale_factor;
10936 size_t jpegSizesCnt = 0;
10937 if (processedSizesCnt > maxCount) {
10938 processedSizesCnt = maxCount;
10939 }
10940 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10941 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10942 jpegSizes[jpegSizesCnt] = processedSizes[i];
10943 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10944 jpegSizesCnt += 2;
10945 }
10946 }
10947 return jpegSizesCnt;
10948}
10949
10950/*===========================================================================
10951 * FUNCTION : computeNoiseModelEntryS
10952 *
10953 * DESCRIPTION: function to map a given sensitivity to the S noise
10954 * model parameters in the DNG noise model.
10955 *
10956 * PARAMETERS : sens : the sensor sensitivity
10957 *
10958 ** RETURN : S (sensor amplification) noise
10959 *
10960 *==========================================================================*/
10961double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10962 double s = gCamCapability[mCameraId]->gradient_S * sens +
10963 gCamCapability[mCameraId]->offset_S;
10964 return ((s < 0.0) ? 0.0 : s);
10965}
10966
10967/*===========================================================================
10968 * FUNCTION : computeNoiseModelEntryO
10969 *
10970 * DESCRIPTION: function to map a given sensitivity to the O noise
10971 * model parameters in the DNG noise model.
10972 *
10973 * PARAMETERS : sens : the sensor sensitivity
10974 *
10975 ** RETURN : O (sensor readout) noise
10976 *
10977 *==========================================================================*/
10978double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10979 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10980 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10981 1.0 : (1.0 * sens / max_analog_sens);
10982 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10983 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10984 return ((o < 0.0) ? 0.0 : o);
10985}
10986
10987/*===========================================================================
10988 * FUNCTION : getSensorSensitivity
10989 *
10990 * DESCRIPTION: convert iso_mode to an integer value
10991 *
10992 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10993 *
10994 ** RETURN : sensitivity supported by sensor
10995 *
10996 *==========================================================================*/
10997int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10998{
10999 int32_t sensitivity;
11000
11001 switch (iso_mode) {
11002 case CAM_ISO_MODE_100:
11003 sensitivity = 100;
11004 break;
11005 case CAM_ISO_MODE_200:
11006 sensitivity = 200;
11007 break;
11008 case CAM_ISO_MODE_400:
11009 sensitivity = 400;
11010 break;
11011 case CAM_ISO_MODE_800:
11012 sensitivity = 800;
11013 break;
11014 case CAM_ISO_MODE_1600:
11015 sensitivity = 1600;
11016 break;
11017 default:
11018 sensitivity = -1;
11019 break;
11020 }
11021 return sensitivity;
11022}
11023
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011024int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011025 if (gEaselManagerClient == nullptr) {
11026 gEaselManagerClient = EaselManagerClient::create();
11027 if (gEaselManagerClient == nullptr) {
11028 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
11029 return -ENODEV;
11030 }
11031 }
11032
11033 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070011034 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
11035 // to connect to Easel.
11036 bool doNotpowerOnEasel =
11037 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
11038
11039 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070011040 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
11041 return OK;
11042 }
11043
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011044 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011045 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011046 if (res != OK) {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070011047 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
11048 res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011049 return res;
11050 }
11051
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011052 EaselManagerClientOpened = true;
11053
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011054 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011055 if (res != OK) {
11056 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
11057 }
11058
Chien-Yu Chen4d752e32017-06-07 12:13:24 -070011059 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", true);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070011060 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070011061 gEnableMultipleHdrplusOutputs =
11062 property_get_bool("persist.camera.hdrplus.multiple_outputs", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011063
11064 // Expose enableZsl key only when HDR+ mode is enabled.
11065 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011066 }
11067
11068 return OK;
11069}
11070
Thierry Strudel3d639192016-09-09 11:52:26 -070011071/*===========================================================================
11072 * FUNCTION : getCamInfo
11073 *
11074 * DESCRIPTION: query camera capabilities
11075 *
11076 * PARAMETERS :
11077 * @cameraId : camera Id
11078 * @info : camera info struct to be filled in with camera capabilities
11079 *
11080 * RETURN : int type of status
11081 * NO_ERROR -- success
11082 * none-zero failure code
11083 *==========================================================================*/
11084int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
11085 struct camera_info *info)
11086{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011087 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070011088 int rc = 0;
11089
11090 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070011091
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011092 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070011093 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011094 rc = initHdrPlusClientLocked();
11095 if (rc != OK) {
11096 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
11097 pthread_mutex_unlock(&gCamLock);
11098 return rc;
11099 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070011100 }
11101
Thierry Strudel3d639192016-09-09 11:52:26 -070011102 if (NULL == gCamCapability[cameraId]) {
11103 rc = initCapabilities(cameraId);
11104 if (rc < 0) {
11105 pthread_mutex_unlock(&gCamLock);
11106 return rc;
11107 }
11108 }
11109
11110 if (NULL == gStaticMetadata[cameraId]) {
11111 rc = initStaticMetadata(cameraId);
11112 if (rc < 0) {
11113 pthread_mutex_unlock(&gCamLock);
11114 return rc;
11115 }
11116 }
11117
11118 switch(gCamCapability[cameraId]->position) {
11119 case CAM_POSITION_BACK:
11120 case CAM_POSITION_BACK_AUX:
11121 info->facing = CAMERA_FACING_BACK;
11122 break;
11123
11124 case CAM_POSITION_FRONT:
11125 case CAM_POSITION_FRONT_AUX:
11126 info->facing = CAMERA_FACING_FRONT;
11127 break;
11128
11129 default:
11130 LOGE("Unknown position type %d for camera id:%d",
11131 gCamCapability[cameraId]->position, cameraId);
11132 rc = -1;
11133 break;
11134 }
11135
11136
11137 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011138#ifndef USE_HAL_3_3
11139 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11140#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011141 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011142#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011143 info->static_camera_characteristics = gStaticMetadata[cameraId];
11144
11145 //For now assume both cameras can operate independently.
11146 info->conflicting_devices = NULL;
11147 info->conflicting_devices_length = 0;
11148
11149 //resource cost is 100 * MIN(1.0, m/M),
11150 //where m is throughput requirement with maximum stream configuration
11151 //and M is CPP maximum throughput.
11152 float max_fps = 0.0;
11153 for (uint32_t i = 0;
11154 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11155 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11156 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11157 }
11158 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11159 gCamCapability[cameraId]->active_array_size.width *
11160 gCamCapability[cameraId]->active_array_size.height * max_fps /
11161 gCamCapability[cameraId]->max_pixel_bandwidth;
11162 info->resource_cost = 100 * MIN(1.0, ratio);
11163 LOGI("camera %d resource cost is %d", cameraId,
11164 info->resource_cost);
11165
11166 pthread_mutex_unlock(&gCamLock);
11167 return rc;
11168}
11169
11170/*===========================================================================
11171 * FUNCTION : translateCapabilityToMetadata
11172 *
11173 * DESCRIPTION: translate the capability into camera_metadata_t
11174 *
11175 * PARAMETERS : type of the request
11176 *
11177 *
11178 * RETURN : success: camera_metadata_t*
11179 * failure: NULL
11180 *
11181 *==========================================================================*/
11182camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11183{
11184 if (mDefaultMetadata[type] != NULL) {
11185 return mDefaultMetadata[type];
11186 }
11187 //first time we are handling this request
11188 //fill up the metadata structure using the wrapper class
11189 CameraMetadata settings;
11190 //translate from cam_capability_t to camera_metadata_tag_t
11191 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11192 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11193 int32_t defaultRequestID = 0;
11194 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11195
11196 /* OIS disable */
11197 char ois_prop[PROPERTY_VALUE_MAX];
11198 memset(ois_prop, 0, sizeof(ois_prop));
11199 property_get("persist.camera.ois.disable", ois_prop, "0");
11200 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11201
11202 /* Force video to use OIS */
11203 char videoOisProp[PROPERTY_VALUE_MAX];
11204 memset(videoOisProp, 0, sizeof(videoOisProp));
11205 property_get("persist.camera.ois.video", videoOisProp, "1");
11206 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011207
11208 // Hybrid AE enable/disable
11209 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11210 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11211 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011212 uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011213
Thierry Strudel3d639192016-09-09 11:52:26 -070011214 uint8_t controlIntent = 0;
11215 uint8_t focusMode;
11216 uint8_t vsMode;
11217 uint8_t optStabMode;
11218 uint8_t cacMode;
11219 uint8_t edge_mode;
11220 uint8_t noise_red_mode;
11221 uint8_t tonemap_mode;
11222 bool highQualityModeEntryAvailable = FALSE;
11223 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011224 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011225 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11226 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011227 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011228 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011229 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011230
Thierry Strudel3d639192016-09-09 11:52:26 -070011231 switch (type) {
11232 case CAMERA3_TEMPLATE_PREVIEW:
11233 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11234 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11235 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11236 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11237 edge_mode = ANDROID_EDGE_MODE_FAST;
11238 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11239 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11240 break;
11241 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11242 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11243 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11244 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11245 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11246 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11247 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11248 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11249 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11250 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11251 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11252 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11253 highQualityModeEntryAvailable = TRUE;
11254 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11255 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11256 fastModeEntryAvailable = TRUE;
11257 }
11258 }
11259 if (highQualityModeEntryAvailable) {
11260 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11261 } else if (fastModeEntryAvailable) {
11262 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11263 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011264 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11265 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11266 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011267 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011268 break;
11269 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11270 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11271 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11272 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011273 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11274 edge_mode = ANDROID_EDGE_MODE_FAST;
11275 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11276 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11277 if (forceVideoOis)
11278 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11279 break;
11280 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11281 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11282 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11283 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011284 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11285 edge_mode = ANDROID_EDGE_MODE_FAST;
11286 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11287 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11288 if (forceVideoOis)
11289 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11290 break;
11291 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11292 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11293 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11294 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11295 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11296 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11297 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11298 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11299 break;
11300 case CAMERA3_TEMPLATE_MANUAL:
11301 edge_mode = ANDROID_EDGE_MODE_FAST;
11302 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11303 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11304 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11305 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11306 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11307 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11308 break;
11309 default:
11310 edge_mode = ANDROID_EDGE_MODE_FAST;
11311 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11312 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11313 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11314 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11315 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11316 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11317 break;
11318 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011319 // Set CAC to OFF if underlying device doesn't support
11320 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11321 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11322 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011323 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11324 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11325 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11326 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11327 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11328 }
11329 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011330 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011331 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011332
11333 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11334 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11335 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11336 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11337 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11338 || ois_disable)
11339 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11340 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011341 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011342
11343 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11344 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11345
11346 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11347 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11348
11349 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11350 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11351
11352 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11353 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11354
11355 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11356 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11357
11358 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11359 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11360
11361 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11362 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11363
11364 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11365 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11366
11367 /*flash*/
11368 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11369 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11370
11371 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11372 settings.update(ANDROID_FLASH_FIRING_POWER,
11373 &flashFiringLevel, 1);
11374
11375 /* lens */
11376 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11377 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11378
11379 if (gCamCapability[mCameraId]->filter_densities_count) {
11380 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11381 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11382 gCamCapability[mCameraId]->filter_densities_count);
11383 }
11384
11385 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11386 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11387
Thierry Strudel3d639192016-09-09 11:52:26 -070011388 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11389 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11390
11391 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11392 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11393
11394 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11395 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11396
11397 /* face detection (default to OFF) */
11398 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11399 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11400
Thierry Strudel54dc9782017-02-15 12:12:10 -080011401 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11402 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011403
11404 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11405 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11406
11407 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11408 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11409
Thierry Strudel3d639192016-09-09 11:52:26 -070011410
11411 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11412 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11413
11414 /* Exposure time(Update the Min Exposure Time)*/
11415 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11416 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11417
11418 /* frame duration */
11419 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11420 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11421
11422 /* sensitivity */
11423 static const int32_t default_sensitivity = 100;
11424 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011425#ifndef USE_HAL_3_3
11426 static const int32_t default_isp_sensitivity =
11427 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11428 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11429#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011430
11431 /*edge mode*/
11432 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11433
11434 /*noise reduction mode*/
11435 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11436
11437 /*color correction mode*/
11438 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11439 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11440
11441 /*transform matrix mode*/
11442 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11443
11444 int32_t scaler_crop_region[4];
11445 scaler_crop_region[0] = 0;
11446 scaler_crop_region[1] = 0;
11447 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11448 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11449 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11450
11451 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11452 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11453
11454 /*focus distance*/
11455 float focus_distance = 0.0;
11456 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11457
11458 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011459 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011460 float max_range = 0.0;
11461 float max_fixed_fps = 0.0;
11462 int32_t fps_range[2] = {0, 0};
11463 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11464 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011465 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11466 TEMPLATE_MAX_PREVIEW_FPS) {
11467 continue;
11468 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011469 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11470 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11471 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11472 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11473 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11474 if (range > max_range) {
11475 fps_range[0] =
11476 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11477 fps_range[1] =
11478 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11479 max_range = range;
11480 }
11481 } else {
11482 if (range < 0.01 && max_fixed_fps <
11483 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11484 fps_range[0] =
11485 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11486 fps_range[1] =
11487 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11488 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11489 }
11490 }
11491 }
11492 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11493
11494 /*precapture trigger*/
11495 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11496 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11497
11498 /*af trigger*/
11499 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11500 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11501
11502 /* ae & af regions */
11503 int32_t active_region[] = {
11504 gCamCapability[mCameraId]->active_array_size.left,
11505 gCamCapability[mCameraId]->active_array_size.top,
11506 gCamCapability[mCameraId]->active_array_size.left +
11507 gCamCapability[mCameraId]->active_array_size.width,
11508 gCamCapability[mCameraId]->active_array_size.top +
11509 gCamCapability[mCameraId]->active_array_size.height,
11510 0};
11511 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11512 sizeof(active_region) / sizeof(active_region[0]));
11513 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11514 sizeof(active_region) / sizeof(active_region[0]));
11515
11516 /* black level lock */
11517 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11518 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11519
Thierry Strudel3d639192016-09-09 11:52:26 -070011520 //special defaults for manual template
11521 if (type == CAMERA3_TEMPLATE_MANUAL) {
11522 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11523 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11524
11525 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11526 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11527
11528 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11529 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11530
11531 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11532 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11533
11534 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11535 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11536
11537 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11538 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11539 }
11540
11541
11542 /* TNR
11543 * We'll use this location to determine which modes TNR will be set.
11544 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11545 * This is not to be confused with linking on a per stream basis that decision
11546 * is still on per-session basis and will be handled as part of config stream
11547 */
11548 uint8_t tnr_enable = 0;
11549
11550 if (m_bTnrPreview || m_bTnrVideo) {
11551
11552 switch (type) {
11553 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11554 tnr_enable = 1;
11555 break;
11556
11557 default:
11558 tnr_enable = 0;
11559 break;
11560 }
11561
11562 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11563 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11564 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11565
11566 LOGD("TNR:%d with process plate %d for template:%d",
11567 tnr_enable, tnr_process_type, type);
11568 }
11569
11570 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011571 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011572 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11573
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011574 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011575 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11576
Shuzhen Wang920ea402017-05-03 08:49:39 -070011577 uint8_t related_camera_id = mCameraId;
11578 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011579
11580 /* CDS default */
11581 char prop[PROPERTY_VALUE_MAX];
11582 memset(prop, 0, sizeof(prop));
11583 property_get("persist.camera.CDS", prop, "Auto");
11584 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11585 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11586 if (CAM_CDS_MODE_MAX == cds_mode) {
11587 cds_mode = CAM_CDS_MODE_AUTO;
11588 }
11589
11590 /* Disabling CDS in templates which have TNR enabled*/
11591 if (tnr_enable)
11592 cds_mode = CAM_CDS_MODE_OFF;
11593
11594 int32_t mode = cds_mode;
11595 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011596
Thierry Strudel269c81a2016-10-12 12:13:59 -070011597 /* Manual Convergence AEC Speed is disabled by default*/
11598 float default_aec_speed = 0;
11599 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11600
11601 /* Manual Convergence AWB Speed is disabled by default*/
11602 float default_awb_speed = 0;
11603 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11604
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011605 // Set instant AEC to normal convergence by default
11606 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11607 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11608
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011609 if (gExposeEnableZslKey) {
11610 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070011611 int32_t postview = 0;
11612 settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070011613 int32_t continuousZslCapture = 0;
11614 settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
Chien-Yu Chenec328c82017-08-30 16:41:08 -070011615 // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE.
11616 int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE) ? 0 : 1;
11617 settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
11618
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011619 // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
11620 // hybrid ae is enabled for 3rd party app HDR+.
11621 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11622 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
11623 hybrid_ae = 1;
11624 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011625 }
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011626 /* hybrid ae */
11627 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011628
Thierry Strudel3d639192016-09-09 11:52:26 -070011629 mDefaultMetadata[type] = settings.release();
11630
11631 return mDefaultMetadata[type];
11632}
11633
11634/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011635 * FUNCTION : getExpectedFrameDuration
11636 *
11637 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11638 * duration
11639 *
11640 * PARAMETERS :
11641 * @request : request settings
11642 * @frameDuration : The maximum frame duration in nanoseconds
11643 *
11644 * RETURN : None
11645 *==========================================================================*/
11646void QCamera3HardwareInterface::getExpectedFrameDuration(
11647 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11648 if (nullptr == frameDuration) {
11649 return;
11650 }
11651
11652 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11653 find_camera_metadata_ro_entry(request,
11654 ANDROID_SENSOR_EXPOSURE_TIME,
11655 &e);
11656 if (e.count > 0) {
11657 *frameDuration = e.data.i64[0];
11658 }
11659 find_camera_metadata_ro_entry(request,
11660 ANDROID_SENSOR_FRAME_DURATION,
11661 &e);
11662 if (e.count > 0) {
11663 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11664 }
11665}
11666
11667/*===========================================================================
11668 * FUNCTION : calculateMaxExpectedDuration
11669 *
11670 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11671 * current camera settings.
11672 *
11673 * PARAMETERS :
11674 * @request : request settings
11675 *
11676 * RETURN : Expected frame duration in nanoseconds.
11677 *==========================================================================*/
11678nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11679 const camera_metadata_t *request) {
11680 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11681 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11682 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11683 if (e.count == 0) {
11684 return maxExpectedDuration;
11685 }
11686
11687 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11688 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11689 }
11690
11691 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11692 return maxExpectedDuration;
11693 }
11694
11695 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11696 if (e.count == 0) {
11697 return maxExpectedDuration;
11698 }
11699
11700 switch (e.data.u8[0]) {
11701 case ANDROID_CONTROL_AE_MODE_OFF:
11702 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11703 break;
11704 default:
11705 find_camera_metadata_ro_entry(request,
11706 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11707 &e);
11708 if (e.count > 1) {
11709 maxExpectedDuration = 1e9 / e.data.u8[0];
11710 }
11711 break;
11712 }
11713
11714 return maxExpectedDuration;
11715}
11716
11717/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011718 * FUNCTION : setFrameParameters
11719 *
11720 * DESCRIPTION: set parameters per frame as requested in the metadata from
11721 * framework
11722 *
11723 * PARAMETERS :
11724 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011725 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011726 * @blob_request: Whether this request is a blob request or not
11727 *
11728 * RETURN : success: NO_ERROR
11729 * failure:
11730 *==========================================================================*/
11731int QCamera3HardwareInterface::setFrameParameters(
11732 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011733 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011734 int blob_request,
11735 uint32_t snapshotStreamId)
11736{
11737 /*translate from camera_metadata_t type to parm_type_t*/
11738 int rc = 0;
11739 int32_t hal_version = CAM_HAL_V3;
11740
11741 clear_metadata_buffer(mParameters);
11742 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11743 LOGE("Failed to set hal version in the parameters");
11744 return BAD_VALUE;
11745 }
11746
11747 /*we need to update the frame number in the parameters*/
11748 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11749 request->frame_number)) {
11750 LOGE("Failed to set the frame number in the parameters");
11751 return BAD_VALUE;
11752 }
11753
11754 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011755 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011756 LOGE("Failed to set stream type mask in the parameters");
11757 return BAD_VALUE;
11758 }
11759
11760 if (mUpdateDebugLevel) {
11761 uint32_t dummyDebugLevel = 0;
11762 /* The value of dummyDebugLevel is irrelavent. On
11763 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11764 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11765 dummyDebugLevel)) {
11766 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11767 return BAD_VALUE;
11768 }
11769 mUpdateDebugLevel = false;
11770 }
11771
11772 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011773 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011774 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11775 if (blob_request)
11776 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11777 }
11778
11779 return rc;
11780}
11781
11782/*===========================================================================
11783 * FUNCTION : setReprocParameters
11784 *
11785 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11786 * return it.
11787 *
11788 * PARAMETERS :
11789 * @request : request that needs to be serviced
11790 *
11791 * RETURN : success: NO_ERROR
11792 * failure:
11793 *==========================================================================*/
11794int32_t QCamera3HardwareInterface::setReprocParameters(
11795 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11796 uint32_t snapshotStreamId)
11797{
11798 /*translate from camera_metadata_t type to parm_type_t*/
11799 int rc = 0;
11800
11801 if (NULL == request->settings){
11802 LOGE("Reprocess settings cannot be NULL");
11803 return BAD_VALUE;
11804 }
11805
11806 if (NULL == reprocParam) {
11807 LOGE("Invalid reprocessing metadata buffer");
11808 return BAD_VALUE;
11809 }
11810 clear_metadata_buffer(reprocParam);
11811
11812 /*we need to update the frame number in the parameters*/
11813 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11814 request->frame_number)) {
11815 LOGE("Failed to set the frame number in the parameters");
11816 return BAD_VALUE;
11817 }
11818
11819 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11820 if (rc < 0) {
11821 LOGE("Failed to translate reproc request");
11822 return rc;
11823 }
11824
11825 CameraMetadata frame_settings;
11826 frame_settings = request->settings;
11827 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11828 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11829 int32_t *crop_count =
11830 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11831 int32_t *crop_data =
11832 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11833 int32_t *roi_map =
11834 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11835 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11836 cam_crop_data_t crop_meta;
11837 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11838 crop_meta.num_of_streams = 1;
11839 crop_meta.crop_info[0].crop.left = crop_data[0];
11840 crop_meta.crop_info[0].crop.top = crop_data[1];
11841 crop_meta.crop_info[0].crop.width = crop_data[2];
11842 crop_meta.crop_info[0].crop.height = crop_data[3];
11843
11844 crop_meta.crop_info[0].roi_map.left =
11845 roi_map[0];
11846 crop_meta.crop_info[0].roi_map.top =
11847 roi_map[1];
11848 crop_meta.crop_info[0].roi_map.width =
11849 roi_map[2];
11850 crop_meta.crop_info[0].roi_map.height =
11851 roi_map[3];
11852
11853 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11854 rc = BAD_VALUE;
11855 }
11856 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11857 request->input_buffer->stream,
11858 crop_meta.crop_info[0].crop.left,
11859 crop_meta.crop_info[0].crop.top,
11860 crop_meta.crop_info[0].crop.width,
11861 crop_meta.crop_info[0].crop.height);
11862 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11863 request->input_buffer->stream,
11864 crop_meta.crop_info[0].roi_map.left,
11865 crop_meta.crop_info[0].roi_map.top,
11866 crop_meta.crop_info[0].roi_map.width,
11867 crop_meta.crop_info[0].roi_map.height);
11868 } else {
11869 LOGE("Invalid reprocess crop count %d!", *crop_count);
11870 }
11871 } else {
11872 LOGE("No crop data from matching output stream");
11873 }
11874
11875 /* These settings are not needed for regular requests so handle them specially for
11876 reprocess requests; information needed for EXIF tags */
11877 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11878 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11879 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11880 if (NAME_NOT_FOUND != val) {
11881 uint32_t flashMode = (uint32_t)val;
11882 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11883 rc = BAD_VALUE;
11884 }
11885 } else {
11886 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11887 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11888 }
11889 } else {
11890 LOGH("No flash mode in reprocess settings");
11891 }
11892
11893 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11894 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11895 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11896 rc = BAD_VALUE;
11897 }
11898 } else {
11899 LOGH("No flash state in reprocess settings");
11900 }
11901
11902 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11903 uint8_t *reprocessFlags =
11904 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11905 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11906 *reprocessFlags)) {
11907 rc = BAD_VALUE;
11908 }
11909 }
11910
Thierry Strudel54dc9782017-02-15 12:12:10 -080011911 // Add exif debug data to internal metadata
11912 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11913 mm_jpeg_debug_exif_params_t *debug_params =
11914 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11915 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11916 // AE
11917 if (debug_params->ae_debug_params_valid == TRUE) {
11918 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11919 debug_params->ae_debug_params);
11920 }
11921 // AWB
11922 if (debug_params->awb_debug_params_valid == TRUE) {
11923 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11924 debug_params->awb_debug_params);
11925 }
11926 // AF
11927 if (debug_params->af_debug_params_valid == TRUE) {
11928 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11929 debug_params->af_debug_params);
11930 }
11931 // ASD
11932 if (debug_params->asd_debug_params_valid == TRUE) {
11933 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11934 debug_params->asd_debug_params);
11935 }
11936 // Stats
11937 if (debug_params->stats_debug_params_valid == TRUE) {
11938 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11939 debug_params->stats_debug_params);
11940 }
11941 // BE Stats
11942 if (debug_params->bestats_debug_params_valid == TRUE) {
11943 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11944 debug_params->bestats_debug_params);
11945 }
11946 // BHIST
11947 if (debug_params->bhist_debug_params_valid == TRUE) {
11948 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11949 debug_params->bhist_debug_params);
11950 }
11951 // 3A Tuning
11952 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11953 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11954 debug_params->q3a_tuning_debug_params);
11955 }
11956 }
11957
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011958 // Add metadata which reprocess needs
11959 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11960 cam_reprocess_info_t *repro_info =
11961 (cam_reprocess_info_t *)frame_settings.find
11962 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011963 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011964 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011965 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011966 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011967 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011968 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011969 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011970 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011971 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011972 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011973 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011974 repro_info->pipeline_flip);
11975 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11976 repro_info->af_roi);
11977 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11978 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011979 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11980 CAM_INTF_PARM_ROTATION metadata then has been added in
11981 translateToHalMetadata. HAL need to keep this new rotation
11982 metadata. Otherwise, the old rotation info saved in the vendor tag
11983 would be used */
11984 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11985 CAM_INTF_PARM_ROTATION, reprocParam) {
11986 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11987 } else {
11988 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011989 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011990 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011991 }
11992
11993 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11994 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11995 roi.width and roi.height would be the final JPEG size.
11996 For now, HAL only checks this for reprocess request */
11997 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11998 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11999 uint8_t *enable =
12000 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
12001 if (*enable == TRUE) {
12002 int32_t *crop_data =
12003 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
12004 cam_stream_crop_info_t crop_meta;
12005 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
12006 crop_meta.stream_id = 0;
12007 crop_meta.crop.left = crop_data[0];
12008 crop_meta.crop.top = crop_data[1];
12009 crop_meta.crop.width = crop_data[2];
12010 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012011 // The JPEG crop roi should match cpp output size
12012 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
12013 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
12014 crop_meta.roi_map.left = 0;
12015 crop_meta.roi_map.top = 0;
12016 crop_meta.roi_map.width = cpp_crop->crop.width;
12017 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070012018 }
12019 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
12020 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012021 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070012022 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012023 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
12024 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070012025 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012026 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
12027
12028 // Add JPEG scale information
12029 cam_dimension_t scale_dim;
12030 memset(&scale_dim, 0, sizeof(cam_dimension_t));
12031 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
12032 int32_t *roi =
12033 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
12034 scale_dim.width = roi[2];
12035 scale_dim.height = roi[3];
12036 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
12037 scale_dim);
12038 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
12039 scale_dim.width, scale_dim.height, mCameraId);
12040 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012041 }
12042 }
12043
12044 return rc;
12045}
12046
12047/*===========================================================================
12048 * FUNCTION : saveRequestSettings
12049 *
12050 * DESCRIPTION: Add any settings that might have changed to the request settings
12051 * and save the settings to be applied on the frame
12052 *
12053 * PARAMETERS :
12054 * @jpegMetadata : the extracted and/or modified jpeg metadata
12055 * @request : request with initial settings
12056 *
12057 * RETURN :
12058 * camera_metadata_t* : pointer to the saved request settings
12059 *==========================================================================*/
12060camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
12061 const CameraMetadata &jpegMetadata,
12062 camera3_capture_request_t *request)
12063{
12064 camera_metadata_t *resultMetadata;
12065 CameraMetadata camMetadata;
12066 camMetadata = request->settings;
12067
12068 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12069 int32_t thumbnail_size[2];
12070 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12071 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12072 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
12073 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
12074 }
12075
12076 if (request->input_buffer != NULL) {
12077 uint8_t reprocessFlags = 1;
12078 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
12079 (uint8_t*)&reprocessFlags,
12080 sizeof(reprocessFlags));
12081 }
12082
12083 resultMetadata = camMetadata.release();
12084 return resultMetadata;
12085}
12086
12087/*===========================================================================
12088 * FUNCTION : setHalFpsRange
12089 *
12090 * DESCRIPTION: set FPS range parameter
12091 *
12092 *
12093 * PARAMETERS :
12094 * @settings : Metadata from framework
12095 * @hal_metadata: Metadata buffer
12096 *
12097 *
12098 * RETURN : success: NO_ERROR
12099 * failure:
12100 *==========================================================================*/
12101int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
12102 metadata_buffer_t *hal_metadata)
12103{
12104 int32_t rc = NO_ERROR;
12105 cam_fps_range_t fps_range;
12106 fps_range.min_fps = (float)
12107 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12108 fps_range.max_fps = (float)
12109 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12110 fps_range.video_min_fps = fps_range.min_fps;
12111 fps_range.video_max_fps = fps_range.max_fps;
12112
12113 LOGD("aeTargetFpsRange fps: [%f %f]",
12114 fps_range.min_fps, fps_range.max_fps);
12115 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12116 * follows:
12117 * ---------------------------------------------------------------|
12118 * Video stream is absent in configure_streams |
12119 * (Camcorder preview before the first video record |
12120 * ---------------------------------------------------------------|
12121 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12122 * | | | vid_min/max_fps|
12123 * ---------------------------------------------------------------|
12124 * NO | [ 30, 240] | 240 | [240, 240] |
12125 * |-------------|-------------|----------------|
12126 * | [240, 240] | 240 | [240, 240] |
12127 * ---------------------------------------------------------------|
12128 * Video stream is present in configure_streams |
12129 * ---------------------------------------------------------------|
12130 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12131 * | | | vid_min/max_fps|
12132 * ---------------------------------------------------------------|
12133 * NO | [ 30, 240] | 240 | [240, 240] |
12134 * (camcorder prev |-------------|-------------|----------------|
12135 * after video rec | [240, 240] | 240 | [240, 240] |
12136 * is stopped) | | | |
12137 * ---------------------------------------------------------------|
12138 * YES | [ 30, 240] | 240 | [240, 240] |
12139 * |-------------|-------------|----------------|
12140 * | [240, 240] | 240 | [240, 240] |
12141 * ---------------------------------------------------------------|
12142 * When Video stream is absent in configure_streams,
12143 * preview fps = sensor_fps / batchsize
12144 * Eg: for 240fps at batchSize 4, preview = 60fps
12145 * for 120fps at batchSize 4, preview = 30fps
12146 *
12147 * When video stream is present in configure_streams, preview fps is as per
12148 * the ratio of preview buffers to video buffers requested in process
12149 * capture request
12150 */
12151 mBatchSize = 0;
12152 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12153 fps_range.min_fps = fps_range.video_max_fps;
12154 fps_range.video_min_fps = fps_range.video_max_fps;
12155 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12156 fps_range.max_fps);
12157 if (NAME_NOT_FOUND != val) {
12158 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12159 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12160 return BAD_VALUE;
12161 }
12162
12163 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12164 /* If batchmode is currently in progress and the fps changes,
12165 * set the flag to restart the sensor */
12166 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12167 (mHFRVideoFps != fps_range.max_fps)) {
12168 mNeedSensorRestart = true;
12169 }
12170 mHFRVideoFps = fps_range.max_fps;
12171 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12172 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12173 mBatchSize = MAX_HFR_BATCH_SIZE;
12174 }
12175 }
12176 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12177
12178 }
12179 } else {
12180 /* HFR mode is session param in backend/ISP. This should be reset when
12181 * in non-HFR mode */
12182 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12183 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12184 return BAD_VALUE;
12185 }
12186 }
12187 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12188 return BAD_VALUE;
12189 }
12190 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12191 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12192 return rc;
12193}
12194
12195/*===========================================================================
12196 * FUNCTION : translateToHalMetadata
12197 *
12198 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12199 *
12200 *
12201 * PARAMETERS :
12202 * @request : request sent from framework
12203 *
12204 *
12205 * RETURN : success: NO_ERROR
12206 * failure:
12207 *==========================================================================*/
12208int QCamera3HardwareInterface::translateToHalMetadata
12209 (const camera3_capture_request_t *request,
12210 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012211 uint32_t snapshotStreamId) {
12212 if (request == nullptr || hal_metadata == nullptr) {
12213 return BAD_VALUE;
12214 }
12215
12216 int64_t minFrameDuration = getMinFrameDuration(request);
12217
12218 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12219 minFrameDuration);
12220}
12221
12222int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12223 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12224 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12225
Thierry Strudel3d639192016-09-09 11:52:26 -070012226 int rc = 0;
12227 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012228 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012229
12230 /* Do not change the order of the following list unless you know what you are
12231 * doing.
12232 * The order is laid out in such a way that parameters in the front of the table
12233 * may be used to override the parameters later in the table. Examples are:
12234 * 1. META_MODE should precede AEC/AWB/AF MODE
12235 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12236 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12237 * 4. Any mode should precede it's corresponding settings
12238 */
12239 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12240 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12241 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12242 rc = BAD_VALUE;
12243 }
12244 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12245 if (rc != NO_ERROR) {
12246 LOGE("extractSceneMode failed");
12247 }
12248 }
12249
12250 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12251 uint8_t fwk_aeMode =
12252 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12253 uint8_t aeMode;
12254 int32_t redeye;
12255
12256 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12257 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012258 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12259 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012260 } else {
12261 aeMode = CAM_AE_MODE_ON;
12262 }
12263 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12264 redeye = 1;
12265 } else {
12266 redeye = 0;
12267 }
12268
12269 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12270 fwk_aeMode);
12271 if (NAME_NOT_FOUND != val) {
12272 int32_t flashMode = (int32_t)val;
12273 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12274 }
12275
12276 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12277 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12278 rc = BAD_VALUE;
12279 }
12280 }
12281
12282 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12283 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12284 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12285 fwk_whiteLevel);
12286 if (NAME_NOT_FOUND != val) {
12287 uint8_t whiteLevel = (uint8_t)val;
12288 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12289 rc = BAD_VALUE;
12290 }
12291 }
12292 }
12293
12294 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12295 uint8_t fwk_cacMode =
12296 frame_settings.find(
12297 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12298 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12299 fwk_cacMode);
12300 if (NAME_NOT_FOUND != val) {
12301 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12302 bool entryAvailable = FALSE;
12303 // Check whether Frameworks set CAC mode is supported in device or not
12304 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12305 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12306 entryAvailable = TRUE;
12307 break;
12308 }
12309 }
12310 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12311 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12312 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12313 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12314 if (entryAvailable == FALSE) {
12315 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12316 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12317 } else {
12318 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12319 // High is not supported and so set the FAST as spec say's underlying
12320 // device implementation can be the same for both modes.
12321 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12322 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12323 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12324 // in order to avoid the fps drop due to high quality
12325 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12326 } else {
12327 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12328 }
12329 }
12330 }
12331 LOGD("Final cacMode is %d", cacMode);
12332 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12333 rc = BAD_VALUE;
12334 }
12335 } else {
12336 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12337 }
12338 }
12339
Jason Lee84ae9972017-02-24 13:24:24 -080012340 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012341 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012342 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012343 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012344 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12345 fwk_focusMode);
12346 if (NAME_NOT_FOUND != val) {
12347 uint8_t focusMode = (uint8_t)val;
12348 LOGD("set focus mode %d", focusMode);
12349 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12350 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12351 rc = BAD_VALUE;
12352 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012353 }
12354 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012355 } else {
12356 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12357 LOGE("Focus forced to infinity %d", focusMode);
12358 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12359 rc = BAD_VALUE;
12360 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012361 }
12362
Jason Lee84ae9972017-02-24 13:24:24 -080012363 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12364 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012365 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12366 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12367 focalDistance)) {
12368 rc = BAD_VALUE;
12369 }
12370 }
12371
12372 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12373 uint8_t fwk_antibandingMode =
12374 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12375 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12376 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12377 if (NAME_NOT_FOUND != val) {
12378 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012379 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12380 if (m60HzZone) {
12381 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12382 } else {
12383 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12384 }
12385 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012386 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12387 hal_antibandingMode)) {
12388 rc = BAD_VALUE;
12389 }
12390 }
12391 }
12392
12393 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12394 int32_t expCompensation = frame_settings.find(
12395 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12396 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12397 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12398 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12399 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012400 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012401 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12402 expCompensation)) {
12403 rc = BAD_VALUE;
12404 }
12405 }
12406
12407 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12408 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12409 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12410 rc = BAD_VALUE;
12411 }
12412 }
12413 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12414 rc = setHalFpsRange(frame_settings, hal_metadata);
12415 if (rc != NO_ERROR) {
12416 LOGE("setHalFpsRange failed");
12417 }
12418 }
12419
12420 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12421 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12422 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12423 rc = BAD_VALUE;
12424 }
12425 }
12426
12427 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12428 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12429 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12430 fwk_effectMode);
12431 if (NAME_NOT_FOUND != val) {
12432 uint8_t effectMode = (uint8_t)val;
12433 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12434 rc = BAD_VALUE;
12435 }
12436 }
12437 }
12438
12439 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12440 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12441 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12442 colorCorrectMode)) {
12443 rc = BAD_VALUE;
12444 }
12445 }
12446
12447 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12448 cam_color_correct_gains_t colorCorrectGains;
12449 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12450 colorCorrectGains.gains[i] =
12451 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12452 }
12453 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12454 colorCorrectGains)) {
12455 rc = BAD_VALUE;
12456 }
12457 }
12458
12459 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12460 cam_color_correct_matrix_t colorCorrectTransform;
12461 cam_rational_type_t transform_elem;
12462 size_t num = 0;
12463 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12464 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12465 transform_elem.numerator =
12466 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12467 transform_elem.denominator =
12468 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12469 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12470 num++;
12471 }
12472 }
12473 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12474 colorCorrectTransform)) {
12475 rc = BAD_VALUE;
12476 }
12477 }
12478
12479 cam_trigger_t aecTrigger;
12480 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12481 aecTrigger.trigger_id = -1;
12482 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12483 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12484 aecTrigger.trigger =
12485 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12486 aecTrigger.trigger_id =
12487 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12488 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12489 aecTrigger)) {
12490 rc = BAD_VALUE;
12491 }
12492 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12493 aecTrigger.trigger, aecTrigger.trigger_id);
12494 }
12495
12496 /*af_trigger must come with a trigger id*/
12497 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12498 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12499 cam_trigger_t af_trigger;
12500 af_trigger.trigger =
12501 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12502 af_trigger.trigger_id =
12503 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12504 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12505 rc = BAD_VALUE;
12506 }
12507 LOGD("AfTrigger: %d AfTriggerID: %d",
12508 af_trigger.trigger, af_trigger.trigger_id);
12509 }
12510
12511 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12512 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12513 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12514 rc = BAD_VALUE;
12515 }
12516 }
12517 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12518 cam_edge_application_t edge_application;
12519 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012520
Thierry Strudel3d639192016-09-09 11:52:26 -070012521 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12522 edge_application.sharpness = 0;
12523 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012524 edge_application.sharpness =
12525 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12526 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12527 int32_t sharpness =
12528 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12529 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12530 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12531 LOGD("Setting edge mode sharpness %d", sharpness);
12532 edge_application.sharpness = sharpness;
12533 }
12534 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012535 }
12536 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12537 rc = BAD_VALUE;
12538 }
12539 }
12540
12541 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12542 int32_t respectFlashMode = 1;
12543 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12544 uint8_t fwk_aeMode =
12545 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012546 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12547 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12548 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012549 respectFlashMode = 0;
12550 LOGH("AE Mode controls flash, ignore android.flash.mode");
12551 }
12552 }
12553 if (respectFlashMode) {
12554 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12555 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12556 LOGH("flash mode after mapping %d", val);
12557 // To check: CAM_INTF_META_FLASH_MODE usage
12558 if (NAME_NOT_FOUND != val) {
12559 uint8_t flashMode = (uint8_t)val;
12560 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12561 rc = BAD_VALUE;
12562 }
12563 }
12564 }
12565 }
12566
12567 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12568 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12569 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12570 rc = BAD_VALUE;
12571 }
12572 }
12573
12574 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12575 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12576 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12577 flashFiringTime)) {
12578 rc = BAD_VALUE;
12579 }
12580 }
12581
12582 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12583 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12584 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12585 hotPixelMode)) {
12586 rc = BAD_VALUE;
12587 }
12588 }
12589
12590 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12591 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12592 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12593 lensAperture)) {
12594 rc = BAD_VALUE;
12595 }
12596 }
12597
12598 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12599 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12600 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12601 filterDensity)) {
12602 rc = BAD_VALUE;
12603 }
12604 }
12605
12606 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12607 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12609 focalLength)) {
12610 rc = BAD_VALUE;
12611 }
12612 }
12613
12614 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12615 uint8_t optStabMode =
12616 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12617 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12618 optStabMode)) {
12619 rc = BAD_VALUE;
12620 }
12621 }
12622
12623 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12624 uint8_t videoStabMode =
12625 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12626 LOGD("videoStabMode from APP = %d", videoStabMode);
12627 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12628 videoStabMode)) {
12629 rc = BAD_VALUE;
12630 }
12631 }
12632
12633
12634 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12635 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12636 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12637 noiseRedMode)) {
12638 rc = BAD_VALUE;
12639 }
12640 }
12641
12642 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12643 float reprocessEffectiveExposureFactor =
12644 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12645 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12646 reprocessEffectiveExposureFactor)) {
12647 rc = BAD_VALUE;
12648 }
12649 }
12650
12651 cam_crop_region_t scalerCropRegion;
12652 bool scalerCropSet = false;
12653 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12654 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12655 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12656 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12657 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12658
12659 // Map coordinate system from active array to sensor output.
12660 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12661 scalerCropRegion.width, scalerCropRegion.height);
12662
12663 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12664 scalerCropRegion)) {
12665 rc = BAD_VALUE;
12666 }
12667 scalerCropSet = true;
12668 }
12669
12670 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12671 int64_t sensorExpTime =
12672 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12673 LOGD("setting sensorExpTime %lld", sensorExpTime);
12674 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12675 sensorExpTime)) {
12676 rc = BAD_VALUE;
12677 }
12678 }
12679
12680 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12681 int64_t sensorFrameDuration =
12682 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012683 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12684 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12685 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12686 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12687 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12688 sensorFrameDuration)) {
12689 rc = BAD_VALUE;
12690 }
12691 }
12692
12693 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12694 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12695 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12696 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12697 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12698 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12699 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12700 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12701 sensorSensitivity)) {
12702 rc = BAD_VALUE;
12703 }
12704 }
12705
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012706#ifndef USE_HAL_3_3
12707 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12708 int32_t ispSensitivity =
12709 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12710 if (ispSensitivity <
12711 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12712 ispSensitivity =
12713 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12714 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12715 }
12716 if (ispSensitivity >
12717 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12718 ispSensitivity =
12719 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12720 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12721 }
12722 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12723 ispSensitivity)) {
12724 rc = BAD_VALUE;
12725 }
12726 }
12727#endif
12728
Thierry Strudel3d639192016-09-09 11:52:26 -070012729 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12730 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12731 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12732 rc = BAD_VALUE;
12733 }
12734 }
12735
12736 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12737 uint8_t fwk_facedetectMode =
12738 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12739
12740 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12741 fwk_facedetectMode);
12742
12743 if (NAME_NOT_FOUND != val) {
12744 uint8_t facedetectMode = (uint8_t)val;
12745 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12746 facedetectMode)) {
12747 rc = BAD_VALUE;
12748 }
12749 }
12750 }
12751
Thierry Strudel54dc9782017-02-15 12:12:10 -080012752 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012753 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012754 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012755 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12756 histogramMode)) {
12757 rc = BAD_VALUE;
12758 }
12759 }
12760
12761 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12762 uint8_t sharpnessMapMode =
12763 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12764 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12765 sharpnessMapMode)) {
12766 rc = BAD_VALUE;
12767 }
12768 }
12769
12770 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12771 uint8_t tonemapMode =
12772 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12773 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12774 rc = BAD_VALUE;
12775 }
12776 }
12777 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12778 /*All tonemap channels will have the same number of points*/
12779 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12780 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12781 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12782 cam_rgb_tonemap_curves tonemapCurves;
12783 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12784 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12785 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12786 tonemapCurves.tonemap_points_cnt,
12787 CAM_MAX_TONEMAP_CURVE_SIZE);
12788 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12789 }
12790
12791 /* ch0 = G*/
12792 size_t point = 0;
12793 cam_tonemap_curve_t tonemapCurveGreen;
12794 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12795 for (size_t j = 0; j < 2; j++) {
12796 tonemapCurveGreen.tonemap_points[i][j] =
12797 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12798 point++;
12799 }
12800 }
12801 tonemapCurves.curves[0] = tonemapCurveGreen;
12802
12803 /* ch 1 = B */
12804 point = 0;
12805 cam_tonemap_curve_t tonemapCurveBlue;
12806 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12807 for (size_t j = 0; j < 2; j++) {
12808 tonemapCurveBlue.tonemap_points[i][j] =
12809 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12810 point++;
12811 }
12812 }
12813 tonemapCurves.curves[1] = tonemapCurveBlue;
12814
12815 /* ch 2 = R */
12816 point = 0;
12817 cam_tonemap_curve_t tonemapCurveRed;
12818 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12819 for (size_t j = 0; j < 2; j++) {
12820 tonemapCurveRed.tonemap_points[i][j] =
12821 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12822 point++;
12823 }
12824 }
12825 tonemapCurves.curves[2] = tonemapCurveRed;
12826
12827 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12828 tonemapCurves)) {
12829 rc = BAD_VALUE;
12830 }
12831 }
12832
12833 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12834 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12835 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12836 captureIntent)) {
12837 rc = BAD_VALUE;
12838 }
12839 }
12840
12841 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12842 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12843 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12844 blackLevelLock)) {
12845 rc = BAD_VALUE;
12846 }
12847 }
12848
12849 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12850 uint8_t lensShadingMapMode =
12851 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12852 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12853 lensShadingMapMode)) {
12854 rc = BAD_VALUE;
12855 }
12856 }
12857
12858 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12859 cam_area_t roi;
12860 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012861 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012862
12863 // Map coordinate system from active array to sensor output.
12864 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12865 roi.rect.height);
12866
12867 if (scalerCropSet) {
12868 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12869 }
12870 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12871 rc = BAD_VALUE;
12872 }
12873 }
12874
12875 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12876 cam_area_t roi;
12877 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012878 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012879
12880 // Map coordinate system from active array to sensor output.
12881 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12882 roi.rect.height);
12883
12884 if (scalerCropSet) {
12885 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12886 }
12887 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12888 rc = BAD_VALUE;
12889 }
12890 }
12891
12892 // CDS for non-HFR non-video mode
12893 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12894 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12895 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12896 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12897 LOGE("Invalid CDS mode %d!", *fwk_cds);
12898 } else {
12899 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12900 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12901 rc = BAD_VALUE;
12902 }
12903 }
12904 }
12905
Thierry Strudel04e026f2016-10-10 11:27:36 -070012906 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012907 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012908 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012909 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12910 }
12911 if (m_bVideoHdrEnabled)
12912 vhdr = CAM_VIDEO_HDR_MODE_ON;
12913
Thierry Strudel54dc9782017-02-15 12:12:10 -080012914 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12915
12916 if(vhdr != curr_hdr_state)
12917 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12918
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012919 rc = setVideoHdrMode(mParameters, vhdr);
12920 if (rc != NO_ERROR) {
12921 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012922 }
12923
12924 //IR
12925 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12926 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12927 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012928 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12929 uint8_t isIRon = 0;
12930
12931 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012932 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12933 LOGE("Invalid IR mode %d!", fwk_ir);
12934 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012935 if(isIRon != curr_ir_state )
12936 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12937
Thierry Strudel04e026f2016-10-10 11:27:36 -070012938 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12939 CAM_INTF_META_IR_MODE, fwk_ir)) {
12940 rc = BAD_VALUE;
12941 }
12942 }
12943 }
12944
Thierry Strudel54dc9782017-02-15 12:12:10 -080012945 //Binning Correction Mode
12946 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12947 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12948 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12949 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12950 || (0 > fwk_binning_correction)) {
12951 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12952 } else {
12953 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12954 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12955 rc = BAD_VALUE;
12956 }
12957 }
12958 }
12959
Thierry Strudel269c81a2016-10-12 12:13:59 -070012960 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12961 float aec_speed;
12962 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12963 LOGD("AEC Speed :%f", aec_speed);
12964 if ( aec_speed < 0 ) {
12965 LOGE("Invalid AEC mode %f!", aec_speed);
12966 } else {
12967 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12968 aec_speed)) {
12969 rc = BAD_VALUE;
12970 }
12971 }
12972 }
12973
12974 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12975 float awb_speed;
12976 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12977 LOGD("AWB Speed :%f", awb_speed);
12978 if ( awb_speed < 0 ) {
12979 LOGE("Invalid AWB mode %f!", awb_speed);
12980 } else {
12981 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12982 awb_speed)) {
12983 rc = BAD_VALUE;
12984 }
12985 }
12986 }
12987
Thierry Strudel3d639192016-09-09 11:52:26 -070012988 // TNR
12989 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12990 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12991 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012992 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012993 cam_denoise_param_t tnr;
12994 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12995 tnr.process_plates =
12996 (cam_denoise_process_type_t)frame_settings.find(
12997 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12998 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012999
13000 if(b_TnrRequested != curr_tnr_state)
13001 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
13002
Thierry Strudel3d639192016-09-09 11:52:26 -070013003 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
13004 rc = BAD_VALUE;
13005 }
13006 }
13007
Thierry Strudel54dc9782017-02-15 12:12:10 -080013008 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013009 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080013010 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013011 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
13012 *exposure_metering_mode)) {
13013 rc = BAD_VALUE;
13014 }
13015 }
13016
Thierry Strudel3d639192016-09-09 11:52:26 -070013017 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
13018 int32_t fwk_testPatternMode =
13019 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
13020 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
13021 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
13022
13023 if (NAME_NOT_FOUND != testPatternMode) {
13024 cam_test_pattern_data_t testPatternData;
13025 memset(&testPatternData, 0, sizeof(testPatternData));
13026 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
13027 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
13028 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
13029 int32_t *fwk_testPatternData =
13030 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
13031 testPatternData.r = fwk_testPatternData[0];
13032 testPatternData.b = fwk_testPatternData[3];
13033 switch (gCamCapability[mCameraId]->color_arrangement) {
13034 case CAM_FILTER_ARRANGEMENT_RGGB:
13035 case CAM_FILTER_ARRANGEMENT_GRBG:
13036 testPatternData.gr = fwk_testPatternData[1];
13037 testPatternData.gb = fwk_testPatternData[2];
13038 break;
13039 case CAM_FILTER_ARRANGEMENT_GBRG:
13040 case CAM_FILTER_ARRANGEMENT_BGGR:
13041 testPatternData.gr = fwk_testPatternData[2];
13042 testPatternData.gb = fwk_testPatternData[1];
13043 break;
13044 default:
13045 LOGE("color arrangement %d is not supported",
13046 gCamCapability[mCameraId]->color_arrangement);
13047 break;
13048 }
13049 }
13050 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
13051 testPatternData)) {
13052 rc = BAD_VALUE;
13053 }
13054 } else {
13055 LOGE("Invalid framework sensor test pattern mode %d",
13056 fwk_testPatternMode);
13057 }
13058 }
13059
13060 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
13061 size_t count = 0;
13062 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
13063 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
13064 gps_coords.data.d, gps_coords.count, count);
13065 if (gps_coords.count != count) {
13066 rc = BAD_VALUE;
13067 }
13068 }
13069
13070 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
13071 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
13072 size_t count = 0;
13073 const char *gps_methods_src = (const char *)
13074 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
13075 memset(gps_methods, '\0', sizeof(gps_methods));
13076 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
13077 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
13078 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
13079 if (GPS_PROCESSING_METHOD_SIZE != count) {
13080 rc = BAD_VALUE;
13081 }
13082 }
13083
13084 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
13085 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
13086 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
13087 gps_timestamp)) {
13088 rc = BAD_VALUE;
13089 }
13090 }
13091
13092 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
13093 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
13094 cam_rotation_info_t rotation_info;
13095 if (orientation == 0) {
13096 rotation_info.rotation = ROTATE_0;
13097 } else if (orientation == 90) {
13098 rotation_info.rotation = ROTATE_90;
13099 } else if (orientation == 180) {
13100 rotation_info.rotation = ROTATE_180;
13101 } else if (orientation == 270) {
13102 rotation_info.rotation = ROTATE_270;
13103 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070013104 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070013105 rotation_info.streamId = snapshotStreamId;
13106 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
13107 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13108 rc = BAD_VALUE;
13109 }
13110 }
13111
13112 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13113 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13114 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13115 rc = BAD_VALUE;
13116 }
13117 }
13118
13119 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13120 uint32_t thumb_quality = (uint32_t)
13121 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13122 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13123 thumb_quality)) {
13124 rc = BAD_VALUE;
13125 }
13126 }
13127
13128 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13129 cam_dimension_t dim;
13130 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13131 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13132 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13133 rc = BAD_VALUE;
13134 }
13135 }
13136
13137 // Internal metadata
13138 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13139 size_t count = 0;
13140 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13141 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13142 privatedata.data.i32, privatedata.count, count);
13143 if (privatedata.count != count) {
13144 rc = BAD_VALUE;
13145 }
13146 }
13147
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013148 // ISO/Exposure Priority
13149 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13150 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13151 cam_priority_mode_t mode =
13152 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13153 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13154 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13155 use_iso_exp_pty.previewOnly = FALSE;
13156 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13157 use_iso_exp_pty.value = *ptr;
13158
13159 if(CAM_ISO_PRIORITY == mode) {
13160 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13161 use_iso_exp_pty)) {
13162 rc = BAD_VALUE;
13163 }
13164 }
13165 else {
13166 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13167 use_iso_exp_pty)) {
13168 rc = BAD_VALUE;
13169 }
13170 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013171
13172 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13173 rc = BAD_VALUE;
13174 }
13175 }
13176 } else {
13177 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13178 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013179 }
13180 }
13181
13182 // Saturation
13183 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13184 int32_t* use_saturation =
13185 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13186 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13187 rc = BAD_VALUE;
13188 }
13189 }
13190
Thierry Strudel3d639192016-09-09 11:52:26 -070013191 // EV step
13192 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13193 gCamCapability[mCameraId]->exp_compensation_step)) {
13194 rc = BAD_VALUE;
13195 }
13196
13197 // CDS info
13198 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13199 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13200 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13201
13202 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13203 CAM_INTF_META_CDS_DATA, *cdsData)) {
13204 rc = BAD_VALUE;
13205 }
13206 }
13207
Shuzhen Wang19463d72016-03-08 11:09:52 -080013208 // Hybrid AE
13209 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13210 uint8_t *hybrid_ae = (uint8_t *)
13211 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
Shuzhen Wang77b049a2017-08-30 12:24:36 -070013212 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13213 rc = BAD_VALUE;
13214 }
Shuzhen Wang19463d72016-03-08 11:09:52 -080013215 }
13216
Shuzhen Wang14415f52016-11-16 18:26:18 -080013217 // Histogram
13218 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13219 uint8_t histogramMode =
13220 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13221 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13222 histogramMode)) {
13223 rc = BAD_VALUE;
13224 }
13225 }
13226
13227 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13228 int32_t histogramBins =
13229 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13230 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13231 histogramBins)) {
13232 rc = BAD_VALUE;
13233 }
13234 }
13235
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013236 // Tracking AF
13237 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13238 uint8_t trackingAfTrigger =
13239 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13240 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13241 trackingAfTrigger)) {
13242 rc = BAD_VALUE;
13243 }
13244 }
13245
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013246 // Makernote
13247 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13248 if (entry.count != 0) {
13249 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13250 cam_makernote_t makernote;
13251 makernote.length = entry.count;
13252 memcpy(makernote.data, entry.data.u8, makernote.length);
13253 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13254 rc = BAD_VALUE;
13255 }
13256 } else {
13257 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13258 MAX_MAKERNOTE_LENGTH);
13259 rc = BAD_VALUE;
13260 }
13261 }
13262
Thierry Strudel3d639192016-09-09 11:52:26 -070013263 return rc;
13264}
13265
13266/*===========================================================================
13267 * FUNCTION : captureResultCb
13268 *
13269 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13270 *
13271 * PARAMETERS :
13272 * @frame : frame information from mm-camera-interface
13273 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13274 * @userdata: userdata
13275 *
13276 * RETURN : NONE
13277 *==========================================================================*/
13278void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13279 camera3_stream_buffer_t *buffer,
13280 uint32_t frame_number, bool isInputBuffer, void *userdata)
13281{
13282 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13283 if (hw == NULL) {
13284 LOGE("Invalid hw %p", hw);
13285 return;
13286 }
13287
13288 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13289 return;
13290}
13291
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013292/*===========================================================================
13293 * FUNCTION : setBufferErrorStatus
13294 *
13295 * DESCRIPTION: Callback handler for channels to report any buffer errors
13296 *
13297 * PARAMETERS :
13298 * @ch : Channel on which buffer error is reported from
13299 * @frame_number : frame number on which buffer error is reported on
13300 * @buffer_status : buffer error status
13301 * @userdata: userdata
13302 *
13303 * RETURN : NONE
13304 *==========================================================================*/
13305void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13306 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13307{
13308 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13309 if (hw == NULL) {
13310 LOGE("Invalid hw %p", hw);
13311 return;
13312 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013313
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013314 hw->setBufferErrorStatus(ch, frame_number, err);
13315 return;
13316}
13317
13318void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13319 uint32_t frameNumber, camera3_buffer_status_t err)
13320{
13321 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13322 pthread_mutex_lock(&mMutex);
13323
13324 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13325 if (req.frame_number != frameNumber)
13326 continue;
13327 for (auto& k : req.mPendingBufferList) {
13328 if(k.stream->priv == ch) {
13329 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13330 }
13331 }
13332 }
13333
13334 pthread_mutex_unlock(&mMutex);
13335 return;
13336}
Thierry Strudel3d639192016-09-09 11:52:26 -070013337/*===========================================================================
13338 * FUNCTION : initialize
13339 *
13340 * DESCRIPTION: Pass framework callback pointers to HAL
13341 *
13342 * PARAMETERS :
13343 *
13344 *
13345 * RETURN : Success : 0
13346 * Failure: -ENODEV
13347 *==========================================================================*/
13348
13349int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13350 const camera3_callback_ops_t *callback_ops)
13351{
13352 LOGD("E");
13353 QCamera3HardwareInterface *hw =
13354 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13355 if (!hw) {
13356 LOGE("NULL camera device");
13357 return -ENODEV;
13358 }
13359
13360 int rc = hw->initialize(callback_ops);
13361 LOGD("X");
13362 return rc;
13363}
13364
13365/*===========================================================================
13366 * FUNCTION : configure_streams
13367 *
13368 * DESCRIPTION:
13369 *
13370 * PARAMETERS :
13371 *
13372 *
13373 * RETURN : Success: 0
13374 * Failure: -EINVAL (if stream configuration is invalid)
13375 * -ENODEV (fatal error)
13376 *==========================================================================*/
13377
13378int QCamera3HardwareInterface::configure_streams(
13379 const struct camera3_device *device,
13380 camera3_stream_configuration_t *stream_list)
13381{
13382 LOGD("E");
13383 QCamera3HardwareInterface *hw =
13384 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13385 if (!hw) {
13386 LOGE("NULL camera device");
13387 return -ENODEV;
13388 }
13389 int rc = hw->configureStreams(stream_list);
13390 LOGD("X");
13391 return rc;
13392}
13393
13394/*===========================================================================
13395 * FUNCTION : construct_default_request_settings
13396 *
13397 * DESCRIPTION: Configure a settings buffer to meet the required use case
13398 *
13399 * PARAMETERS :
13400 *
13401 *
13402 * RETURN : Success: Return valid metadata
13403 * Failure: Return NULL
13404 *==========================================================================*/
13405const camera_metadata_t* QCamera3HardwareInterface::
13406 construct_default_request_settings(const struct camera3_device *device,
13407 int type)
13408{
13409
13410 LOGD("E");
13411 camera_metadata_t* fwk_metadata = NULL;
13412 QCamera3HardwareInterface *hw =
13413 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13414 if (!hw) {
13415 LOGE("NULL camera device");
13416 return NULL;
13417 }
13418
13419 fwk_metadata = hw->translateCapabilityToMetadata(type);
13420
13421 LOGD("X");
13422 return fwk_metadata;
13423}
13424
13425/*===========================================================================
13426 * FUNCTION : process_capture_request
13427 *
13428 * DESCRIPTION:
13429 *
13430 * PARAMETERS :
13431 *
13432 *
13433 * RETURN :
13434 *==========================================================================*/
13435int QCamera3HardwareInterface::process_capture_request(
13436 const struct camera3_device *device,
13437 camera3_capture_request_t *request)
13438{
13439 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013440 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013441 QCamera3HardwareInterface *hw =
13442 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13443 if (!hw) {
13444 LOGE("NULL camera device");
13445 return -EINVAL;
13446 }
13447
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013448 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013449 LOGD("X");
13450 return rc;
13451}
13452
13453/*===========================================================================
13454 * FUNCTION : dump
13455 *
13456 * DESCRIPTION:
13457 *
13458 * PARAMETERS :
13459 *
13460 *
13461 * RETURN :
13462 *==========================================================================*/
13463
13464void QCamera3HardwareInterface::dump(
13465 const struct camera3_device *device, int fd)
13466{
13467 /* Log level property is read when "adb shell dumpsys media.camera" is
13468 called so that the log level can be controlled without restarting
13469 the media server */
13470 getLogLevel();
13471
13472 LOGD("E");
13473 QCamera3HardwareInterface *hw =
13474 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13475 if (!hw) {
13476 LOGE("NULL camera device");
13477 return;
13478 }
13479
13480 hw->dump(fd);
13481 LOGD("X");
13482 return;
13483}
13484
13485/*===========================================================================
13486 * FUNCTION : flush
13487 *
13488 * DESCRIPTION:
13489 *
13490 * PARAMETERS :
13491 *
13492 *
13493 * RETURN :
13494 *==========================================================================*/
13495
13496int QCamera3HardwareInterface::flush(
13497 const struct camera3_device *device)
13498{
13499 int rc;
13500 LOGD("E");
13501 QCamera3HardwareInterface *hw =
13502 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13503 if (!hw) {
13504 LOGE("NULL camera device");
13505 return -EINVAL;
13506 }
13507
13508 pthread_mutex_lock(&hw->mMutex);
13509 // Validate current state
13510 switch (hw->mState) {
13511 case STARTED:
13512 /* valid state */
13513 break;
13514
13515 case ERROR:
13516 pthread_mutex_unlock(&hw->mMutex);
13517 hw->handleCameraDeviceError();
13518 return -ENODEV;
13519
13520 default:
13521 LOGI("Flush returned during state %d", hw->mState);
13522 pthread_mutex_unlock(&hw->mMutex);
13523 return 0;
13524 }
13525 pthread_mutex_unlock(&hw->mMutex);
13526
13527 rc = hw->flush(true /* restart channels */ );
13528 LOGD("X");
13529 return rc;
13530}
13531
13532/*===========================================================================
13533 * FUNCTION : close_camera_device
13534 *
13535 * DESCRIPTION:
13536 *
13537 * PARAMETERS :
13538 *
13539 *
13540 * RETURN :
13541 *==========================================================================*/
13542int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13543{
13544 int ret = NO_ERROR;
13545 QCamera3HardwareInterface *hw =
13546 reinterpret_cast<QCamera3HardwareInterface *>(
13547 reinterpret_cast<camera3_device_t *>(device)->priv);
13548 if (!hw) {
13549 LOGE("NULL camera device");
13550 return BAD_VALUE;
13551 }
13552
13553 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13554 delete hw;
13555 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013556 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013557 return ret;
13558}
13559
13560/*===========================================================================
13561 * FUNCTION : getWaveletDenoiseProcessPlate
13562 *
13563 * DESCRIPTION: query wavelet denoise process plate
13564 *
13565 * PARAMETERS : None
13566 *
13567 * RETURN : WNR prcocess plate value
13568 *==========================================================================*/
13569cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13570{
13571 char prop[PROPERTY_VALUE_MAX];
13572 memset(prop, 0, sizeof(prop));
13573 property_get("persist.denoise.process.plates", prop, "0");
13574 int processPlate = atoi(prop);
13575 switch(processPlate) {
13576 case 0:
13577 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13578 case 1:
13579 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13580 case 2:
13581 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13582 case 3:
13583 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13584 default:
13585 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13586 }
13587}
13588
13589
13590/*===========================================================================
13591 * FUNCTION : getTemporalDenoiseProcessPlate
13592 *
13593 * DESCRIPTION: query temporal denoise process plate
13594 *
13595 * PARAMETERS : None
13596 *
13597 * RETURN : TNR prcocess plate value
13598 *==========================================================================*/
13599cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13600{
13601 char prop[PROPERTY_VALUE_MAX];
13602 memset(prop, 0, sizeof(prop));
13603 property_get("persist.tnr.process.plates", prop, "0");
13604 int processPlate = atoi(prop);
13605 switch(processPlate) {
13606 case 0:
13607 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13608 case 1:
13609 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13610 case 2:
13611 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13612 case 3:
13613 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13614 default:
13615 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13616 }
13617}
13618
13619
13620/*===========================================================================
13621 * FUNCTION : extractSceneMode
13622 *
13623 * DESCRIPTION: Extract scene mode from frameworks set metadata
13624 *
13625 * PARAMETERS :
13626 * @frame_settings: CameraMetadata reference
13627 * @metaMode: ANDROID_CONTORL_MODE
13628 * @hal_metadata: hal metadata structure
13629 *
13630 * RETURN : None
13631 *==========================================================================*/
13632int32_t QCamera3HardwareInterface::extractSceneMode(
13633 const CameraMetadata &frame_settings, uint8_t metaMode,
13634 metadata_buffer_t *hal_metadata)
13635{
13636 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013637 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13638
13639 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13640 LOGD("Ignoring control mode OFF_KEEP_STATE");
13641 return NO_ERROR;
13642 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013643
13644 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13645 camera_metadata_ro_entry entry =
13646 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13647 if (0 == entry.count)
13648 return rc;
13649
13650 uint8_t fwk_sceneMode = entry.data.u8[0];
13651
13652 int val = lookupHalName(SCENE_MODES_MAP,
13653 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13654 fwk_sceneMode);
13655 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013656 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013657 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013658 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013659 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013660
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013661 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13662 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13663 }
13664
13665 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13666 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013667 cam_hdr_param_t hdr_params;
13668 hdr_params.hdr_enable = 1;
13669 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13670 hdr_params.hdr_need_1x = false;
13671 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13672 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13673 rc = BAD_VALUE;
13674 }
13675 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013676
Thierry Strudel3d639192016-09-09 11:52:26 -070013677 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13678 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13679 rc = BAD_VALUE;
13680 }
13681 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013682
13683 if (mForceHdrSnapshot) {
13684 cam_hdr_param_t hdr_params;
13685 hdr_params.hdr_enable = 1;
13686 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13687 hdr_params.hdr_need_1x = false;
13688 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13689 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13690 rc = BAD_VALUE;
13691 }
13692 }
13693
Thierry Strudel3d639192016-09-09 11:52:26 -070013694 return rc;
13695}
13696
13697/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013698 * FUNCTION : setVideoHdrMode
13699 *
13700 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13701 *
13702 * PARAMETERS :
13703 * @hal_metadata: hal metadata structure
13704 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13705 *
13706 * RETURN : None
13707 *==========================================================================*/
13708int32_t QCamera3HardwareInterface::setVideoHdrMode(
13709 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13710{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013711 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13712 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13713 }
13714
13715 LOGE("Invalid Video HDR mode %d!", vhdr);
13716 return BAD_VALUE;
13717}
13718
13719/*===========================================================================
13720 * FUNCTION : setSensorHDR
13721 *
13722 * DESCRIPTION: Enable/disable sensor HDR.
13723 *
13724 * PARAMETERS :
13725 * @hal_metadata: hal metadata structure
13726 * @enable: boolean whether to enable/disable sensor HDR
13727 *
13728 * RETURN : None
13729 *==========================================================================*/
13730int32_t QCamera3HardwareInterface::setSensorHDR(
13731 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13732{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013733 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013734 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13735
13736 if (enable) {
13737 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13738 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13739 #ifdef _LE_CAMERA_
13740 //Default to staggered HDR for IOT
13741 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13742 #else
13743 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13744 #endif
13745 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13746 }
13747
13748 bool isSupported = false;
13749 switch (sensor_hdr) {
13750 case CAM_SENSOR_HDR_IN_SENSOR:
13751 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13752 CAM_QCOM_FEATURE_SENSOR_HDR) {
13753 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013754 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013755 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013756 break;
13757 case CAM_SENSOR_HDR_ZIGZAG:
13758 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13759 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13760 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013761 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013762 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013763 break;
13764 case CAM_SENSOR_HDR_STAGGERED:
13765 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13766 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13767 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013768 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013769 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013770 break;
13771 case CAM_SENSOR_HDR_OFF:
13772 isSupported = true;
13773 LOGD("Turning off sensor HDR");
13774 break;
13775 default:
13776 LOGE("HDR mode %d not supported", sensor_hdr);
13777 rc = BAD_VALUE;
13778 break;
13779 }
13780
13781 if(isSupported) {
13782 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13783 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13784 rc = BAD_VALUE;
13785 } else {
13786 if(!isVideoHdrEnable)
13787 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013788 }
13789 }
13790 return rc;
13791}
13792
13793/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013794 * FUNCTION : needRotationReprocess
13795 *
13796 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13797 *
13798 * PARAMETERS : none
13799 *
13800 * RETURN : true: needed
13801 * false: no need
13802 *==========================================================================*/
13803bool QCamera3HardwareInterface::needRotationReprocess()
13804{
13805 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13806 // current rotation is not zero, and pp has the capability to process rotation
13807 LOGH("need do reprocess for rotation");
13808 return true;
13809 }
13810
13811 return false;
13812}
13813
13814/*===========================================================================
13815 * FUNCTION : needReprocess
13816 *
13817 * DESCRIPTION: if reprocess in needed
13818 *
13819 * PARAMETERS : none
13820 *
13821 * RETURN : true: needed
13822 * false: no need
13823 *==========================================================================*/
13824bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13825{
13826 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13827 // TODO: add for ZSL HDR later
13828 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13829 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13830 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13831 return true;
13832 } else {
13833 LOGH("already post processed frame");
13834 return false;
13835 }
13836 }
13837 return needRotationReprocess();
13838}
13839
13840/*===========================================================================
13841 * FUNCTION : needJpegExifRotation
13842 *
13843 * DESCRIPTION: if rotation from jpeg is needed
13844 *
13845 * PARAMETERS : none
13846 *
13847 * RETURN : true: needed
13848 * false: no need
13849 *==========================================================================*/
13850bool QCamera3HardwareInterface::needJpegExifRotation()
13851{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013852 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013853 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13854 LOGD("Need use Jpeg EXIF Rotation");
13855 return true;
13856 }
13857 return false;
13858}
13859
13860/*===========================================================================
13861 * FUNCTION : addOfflineReprocChannel
13862 *
13863 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13864 * coming from input channel
13865 *
13866 * PARAMETERS :
13867 * @config : reprocess configuration
13868 * @inputChHandle : pointer to the input (source) channel
13869 *
13870 *
13871 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13872 *==========================================================================*/
13873QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13874 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13875{
13876 int32_t rc = NO_ERROR;
13877 QCamera3ReprocessChannel *pChannel = NULL;
13878
13879 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013880 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13881 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013882 if (NULL == pChannel) {
13883 LOGE("no mem for reprocess channel");
13884 return NULL;
13885 }
13886
13887 rc = pChannel->initialize(IS_TYPE_NONE);
13888 if (rc != NO_ERROR) {
13889 LOGE("init reprocess channel failed, ret = %d", rc);
13890 delete pChannel;
13891 return NULL;
13892 }
13893
13894 // pp feature config
13895 cam_pp_feature_config_t pp_config;
13896 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13897
13898 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13899 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13900 & CAM_QCOM_FEATURE_DSDN) {
13901 //Use CPP CDS incase h/w supports it.
13902 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13903 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13904 }
13905 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13906 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13907 }
13908
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013909 if (config.hdr_param.hdr_enable) {
13910 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13911 pp_config.hdr_param = config.hdr_param;
13912 }
13913
13914 if (mForceHdrSnapshot) {
13915 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13916 pp_config.hdr_param.hdr_enable = 1;
13917 pp_config.hdr_param.hdr_need_1x = 0;
13918 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13919 }
13920
Thierry Strudel3d639192016-09-09 11:52:26 -070013921 rc = pChannel->addReprocStreamsFromSource(pp_config,
13922 config,
13923 IS_TYPE_NONE,
13924 mMetadataChannel);
13925
13926 if (rc != NO_ERROR) {
13927 delete pChannel;
13928 return NULL;
13929 }
13930 return pChannel;
13931}
13932
13933/*===========================================================================
13934 * FUNCTION : getMobicatMask
13935 *
13936 * DESCRIPTION: returns mobicat mask
13937 *
13938 * PARAMETERS : none
13939 *
13940 * RETURN : mobicat mask
13941 *
13942 *==========================================================================*/
13943uint8_t QCamera3HardwareInterface::getMobicatMask()
13944{
13945 return m_MobicatMask;
13946}
13947
13948/*===========================================================================
13949 * FUNCTION : setMobicat
13950 *
13951 * DESCRIPTION: set Mobicat on/off.
13952 *
13953 * PARAMETERS :
13954 * @params : none
13955 *
13956 * RETURN : int32_t type of status
13957 * NO_ERROR -- success
13958 * none-zero failure code
13959 *==========================================================================*/
13960int32_t QCamera3HardwareInterface::setMobicat()
13961{
Thierry Strudel3d639192016-09-09 11:52:26 -070013962 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013963
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013964 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013965 tune_cmd_t tune_cmd;
13966 tune_cmd.type = SET_RELOAD_CHROMATIX;
13967 tune_cmd.module = MODULE_ALL;
13968 tune_cmd.value = TRUE;
13969 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13970 CAM_INTF_PARM_SET_VFE_COMMAND,
13971 tune_cmd);
13972
13973 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13974 CAM_INTF_PARM_SET_PP_COMMAND,
13975 tune_cmd);
13976 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013977
13978 return ret;
13979}
13980
13981/*===========================================================================
13982* FUNCTION : getLogLevel
13983*
13984* DESCRIPTION: Reads the log level property into a variable
13985*
13986* PARAMETERS :
13987* None
13988*
13989* RETURN :
13990* None
13991*==========================================================================*/
13992void QCamera3HardwareInterface::getLogLevel()
13993{
13994 char prop[PROPERTY_VALUE_MAX];
13995 uint32_t globalLogLevel = 0;
13996
13997 property_get("persist.camera.hal.debug", prop, "0");
13998 int val = atoi(prop);
13999 if (0 <= val) {
14000 gCamHal3LogLevel = (uint32_t)val;
14001 }
14002
Thierry Strudel9ec39c62016-12-28 11:30:05 -080014003 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070014004 gKpiDebugLevel = atoi(prop);
14005
14006 property_get("persist.camera.global.debug", prop, "0");
14007 val = atoi(prop);
14008 if (0 <= val) {
14009 globalLogLevel = (uint32_t)val;
14010 }
14011
14012 /* Highest log level among hal.logs and global.logs is selected */
14013 if (gCamHal3LogLevel < globalLogLevel)
14014 gCamHal3LogLevel = globalLogLevel;
14015
14016 return;
14017}
14018
14019/*===========================================================================
14020 * FUNCTION : validateStreamRotations
14021 *
14022 * DESCRIPTION: Check if the rotations requested are supported
14023 *
14024 * PARAMETERS :
14025 * @stream_list : streams to be configured
14026 *
14027 * RETURN : NO_ERROR on success
14028 * -EINVAL on failure
14029 *
14030 *==========================================================================*/
14031int QCamera3HardwareInterface::validateStreamRotations(
14032 camera3_stream_configuration_t *streamList)
14033{
14034 int rc = NO_ERROR;
14035
14036 /*
14037 * Loop through all streams requested in configuration
14038 * Check if unsupported rotations have been requested on any of them
14039 */
14040 for (size_t j = 0; j < streamList->num_streams; j++){
14041 camera3_stream_t *newStream = streamList->streams[j];
14042
Emilian Peev35ceeed2017-06-29 11:58:56 -070014043 switch(newStream->rotation) {
14044 case CAMERA3_STREAM_ROTATION_0:
14045 case CAMERA3_STREAM_ROTATION_90:
14046 case CAMERA3_STREAM_ROTATION_180:
14047 case CAMERA3_STREAM_ROTATION_270:
14048 //Expected values
14049 break;
14050 default:
14051 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
14052 "type:%d and stream format:%d", __func__,
14053 newStream->rotation, newStream->stream_type,
14054 newStream->format);
14055 return -EINVAL;
14056 }
14057
Thierry Strudel3d639192016-09-09 11:52:26 -070014058 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
14059 bool isImplDef = (newStream->format ==
14060 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
14061 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
14062 isImplDef);
14063
14064 if (isRotated && (!isImplDef || isZsl)) {
14065 LOGE("Error: Unsupported rotation of %d requested for stream"
14066 "type:%d and stream format:%d",
14067 newStream->rotation, newStream->stream_type,
14068 newStream->format);
14069 rc = -EINVAL;
14070 break;
14071 }
14072 }
14073
14074 return rc;
14075}
14076
14077/*===========================================================================
14078* FUNCTION : getFlashInfo
14079*
14080* DESCRIPTION: Retrieve information about whether the device has a flash.
14081*
14082* PARAMETERS :
14083* @cameraId : Camera id to query
14084* @hasFlash : Boolean indicating whether there is a flash device
14085* associated with given camera
14086* @flashNode : If a flash device exists, this will be its device node.
14087*
14088* RETURN :
14089* None
14090*==========================================================================*/
14091void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
14092 bool& hasFlash,
14093 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
14094{
14095 cam_capability_t* camCapability = gCamCapability[cameraId];
14096 if (NULL == camCapability) {
14097 hasFlash = false;
14098 flashNode[0] = '\0';
14099 } else {
14100 hasFlash = camCapability->flash_available;
14101 strlcpy(flashNode,
14102 (char*)camCapability->flash_dev_name,
14103 QCAMERA_MAX_FILEPATH_LENGTH);
14104 }
14105}
14106
14107/*===========================================================================
14108* FUNCTION : getEepromVersionInfo
14109*
14110* DESCRIPTION: Retrieve version info of the sensor EEPROM data
14111*
14112* PARAMETERS : None
14113*
14114* RETURN : string describing EEPROM version
14115* "\0" if no such info available
14116*==========================================================================*/
14117const char *QCamera3HardwareInterface::getEepromVersionInfo()
14118{
14119 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14120}
14121
14122/*===========================================================================
14123* FUNCTION : getLdafCalib
14124*
14125* DESCRIPTION: Retrieve Laser AF calibration data
14126*
14127* PARAMETERS : None
14128*
14129* RETURN : Two uint32_t describing laser AF calibration data
14130* NULL if none is available.
14131*==========================================================================*/
14132const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14133{
14134 if (mLdafCalibExist) {
14135 return &mLdafCalib[0];
14136 } else {
14137 return NULL;
14138 }
14139}
14140
14141/*===========================================================================
Arnd Geis082a4d72017-08-24 10:33:07 -070014142* FUNCTION : getEaselFwVersion
14143*
14144* DESCRIPTION: Retrieve Easel firmware version
14145*
14146* PARAMETERS : None
14147*
14148* RETURN : string describing Firmware version
Arnd Geis8cbfc182017-09-07 14:46:41 -070014149* "\0" if version is not up to date
Arnd Geis082a4d72017-08-24 10:33:07 -070014150*==========================================================================*/
14151const char *QCamera3HardwareInterface::getEaselFwVersion()
14152{
Arnd Geis8cbfc182017-09-07 14:46:41 -070014153 if (mEaselFwUpdated) {
14154 return (const char *)&mEaselFwVersion[0];
14155 } else {
14156 return NULL;
Arnd Geis082a4d72017-08-24 10:33:07 -070014157 }
Arnd Geis082a4d72017-08-24 10:33:07 -070014158}
14159
14160/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014161 * FUNCTION : dynamicUpdateMetaStreamInfo
14162 *
14163 * DESCRIPTION: This function:
14164 * (1) stops all the channels
14165 * (2) returns error on pending requests and buffers
14166 * (3) sends metastream_info in setparams
14167 * (4) starts all channels
14168 * This is useful when sensor has to be restarted to apply any
14169 * settings such as frame rate from a different sensor mode
14170 *
14171 * PARAMETERS : None
14172 *
14173 * RETURN : NO_ERROR on success
14174 * Error codes on failure
14175 *
14176 *==========================================================================*/
14177int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14178{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014179 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014180 int rc = NO_ERROR;
14181
14182 LOGD("E");
14183
14184 rc = stopAllChannels();
14185 if (rc < 0) {
14186 LOGE("stopAllChannels failed");
14187 return rc;
14188 }
14189
14190 rc = notifyErrorForPendingRequests();
14191 if (rc < 0) {
14192 LOGE("notifyErrorForPendingRequests failed");
14193 return rc;
14194 }
14195
14196 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14197 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14198 "Format:%d",
14199 mStreamConfigInfo.type[i],
14200 mStreamConfigInfo.stream_sizes[i].width,
14201 mStreamConfigInfo.stream_sizes[i].height,
14202 mStreamConfigInfo.postprocess_mask[i],
14203 mStreamConfigInfo.format[i]);
14204 }
14205
14206 /* Send meta stream info once again so that ISP can start */
14207 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14208 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14209 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14210 mParameters);
14211 if (rc < 0) {
14212 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14213 }
14214
14215 rc = startAllChannels();
14216 if (rc < 0) {
14217 LOGE("startAllChannels failed");
14218 return rc;
14219 }
14220
14221 LOGD("X");
14222 return rc;
14223}
14224
14225/*===========================================================================
14226 * FUNCTION : stopAllChannels
14227 *
14228 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14229 *
14230 * PARAMETERS : None
14231 *
14232 * RETURN : NO_ERROR on success
14233 * Error codes on failure
14234 *
14235 *==========================================================================*/
14236int32_t QCamera3HardwareInterface::stopAllChannels()
14237{
14238 int32_t rc = NO_ERROR;
14239
14240 LOGD("Stopping all channels");
14241 // Stop the Streams/Channels
14242 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14243 it != mStreamInfo.end(); it++) {
14244 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14245 if (channel) {
14246 channel->stop();
14247 }
14248 (*it)->status = INVALID;
14249 }
14250
14251 if (mSupportChannel) {
14252 mSupportChannel->stop();
14253 }
14254 if (mAnalysisChannel) {
14255 mAnalysisChannel->stop();
14256 }
14257 if (mRawDumpChannel) {
14258 mRawDumpChannel->stop();
14259 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014260 if (mHdrPlusRawSrcChannel) {
14261 mHdrPlusRawSrcChannel->stop();
14262 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014263 if (mMetadataChannel) {
14264 /* If content of mStreamInfo is not 0, there is metadata stream */
14265 mMetadataChannel->stop();
14266 }
14267
14268 LOGD("All channels stopped");
14269 return rc;
14270}
14271
14272/*===========================================================================
14273 * FUNCTION : startAllChannels
14274 *
14275 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14276 *
14277 * PARAMETERS : None
14278 *
14279 * RETURN : NO_ERROR on success
14280 * Error codes on failure
14281 *
14282 *==========================================================================*/
14283int32_t QCamera3HardwareInterface::startAllChannels()
14284{
14285 int32_t rc = NO_ERROR;
14286
14287 LOGD("Start all channels ");
14288 // Start the Streams/Channels
14289 if (mMetadataChannel) {
14290 /* If content of mStreamInfo is not 0, there is metadata stream */
14291 rc = mMetadataChannel->start();
14292 if (rc < 0) {
14293 LOGE("META channel start failed");
14294 return rc;
14295 }
14296 }
14297 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14298 it != mStreamInfo.end(); it++) {
14299 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14300 if (channel) {
14301 rc = channel->start();
14302 if (rc < 0) {
14303 LOGE("channel start failed");
14304 return rc;
14305 }
14306 }
14307 }
14308 if (mAnalysisChannel) {
14309 mAnalysisChannel->start();
14310 }
14311 if (mSupportChannel) {
14312 rc = mSupportChannel->start();
14313 if (rc < 0) {
14314 LOGE("Support channel start failed");
14315 return rc;
14316 }
14317 }
14318 if (mRawDumpChannel) {
14319 rc = mRawDumpChannel->start();
14320 if (rc < 0) {
14321 LOGE("RAW dump channel start failed");
14322 return rc;
14323 }
14324 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014325 if (mHdrPlusRawSrcChannel) {
14326 rc = mHdrPlusRawSrcChannel->start();
14327 if (rc < 0) {
14328 LOGE("HDR+ RAW channel start failed");
14329 return rc;
14330 }
14331 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014332
14333 LOGD("All channels started");
14334 return rc;
14335}
14336
14337/*===========================================================================
14338 * FUNCTION : notifyErrorForPendingRequests
14339 *
14340 * DESCRIPTION: This function sends error for all the pending requests/buffers
14341 *
14342 * PARAMETERS : None
14343 *
14344 * RETURN : Error codes
14345 * NO_ERROR on success
14346 *
14347 *==========================================================================*/
14348int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14349{
Emilian Peev7650c122017-01-19 08:24:33 -080014350 notifyErrorFoPendingDepthData(mDepthChannel);
14351
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014352 auto pendingRequest = mPendingRequestsList.begin();
14353 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014354
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014355 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14356 // buffers (for which buffers aren't sent yet).
14357 while (pendingRequest != mPendingRequestsList.end() ||
14358 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14359 if (pendingRequest == mPendingRequestsList.end() ||
14360 pendingBuffer->frame_number < pendingRequest->frame_number) {
14361 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14362 // with error.
14363 for (auto &info : pendingBuffer->mPendingBufferList) {
14364 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014365 camera3_notify_msg_t notify_msg;
14366 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14367 notify_msg.type = CAMERA3_MSG_ERROR;
14368 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014369 notify_msg.message.error.error_stream = info.stream;
14370 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014371 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014372
14373 camera3_stream_buffer_t buffer = {};
14374 buffer.acquire_fence = -1;
14375 buffer.release_fence = -1;
14376 buffer.buffer = info.buffer;
14377 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14378 buffer.stream = info.stream;
14379 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014380 }
14381
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014382 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14383 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14384 pendingBuffer->frame_number > pendingRequest->frame_number) {
14385 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014386 camera3_notify_msg_t notify_msg;
14387 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14388 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014389 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14390 notify_msg.message.error.error_stream = nullptr;
14391 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014392 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014393
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014394 if (pendingRequest->input_buffer != nullptr) {
14395 camera3_capture_result result = {};
14396 result.frame_number = pendingRequest->frame_number;
14397 result.result = nullptr;
14398 result.input_buffer = pendingRequest->input_buffer;
14399 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014400 }
14401
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014402 mShutterDispatcher.clear(pendingRequest->frame_number);
14403 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14404 } else {
14405 // If both buffers and result metadata weren't sent yet, notify about a request error
14406 // and return buffers with error.
14407 for (auto &info : pendingBuffer->mPendingBufferList) {
14408 camera3_notify_msg_t notify_msg;
14409 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14410 notify_msg.type = CAMERA3_MSG_ERROR;
14411 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14412 notify_msg.message.error.error_stream = info.stream;
14413 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14414 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014415
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014416 camera3_stream_buffer_t buffer = {};
14417 buffer.acquire_fence = -1;
14418 buffer.release_fence = -1;
14419 buffer.buffer = info.buffer;
14420 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14421 buffer.stream = info.stream;
14422 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14423 }
14424
14425 if (pendingRequest->input_buffer != nullptr) {
14426 camera3_capture_result result = {};
14427 result.frame_number = pendingRequest->frame_number;
14428 result.result = nullptr;
14429 result.input_buffer = pendingRequest->input_buffer;
14430 orchestrateResult(&result);
14431 }
14432
14433 mShutterDispatcher.clear(pendingRequest->frame_number);
14434 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14435 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014436 }
14437 }
14438
14439 /* Reset pending frame Drop list and requests list */
14440 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014441 mShutterDispatcher.clear();
14442 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014443 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014444 mExpectedFrameDuration = 0;
14445 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014446 LOGH("Cleared all the pending buffers ");
14447
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014448 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014449}
14450
14451bool QCamera3HardwareInterface::isOnEncoder(
14452 const cam_dimension_t max_viewfinder_size,
14453 uint32_t width, uint32_t height)
14454{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014455 return ((width > (uint32_t)max_viewfinder_size.width) ||
14456 (height > (uint32_t)max_viewfinder_size.height) ||
14457 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14458 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014459}
14460
14461/*===========================================================================
14462 * FUNCTION : setBundleInfo
14463 *
14464 * DESCRIPTION: Set bundle info for all streams that are bundle.
14465 *
14466 * PARAMETERS : None
14467 *
14468 * RETURN : NO_ERROR on success
14469 * Error codes on failure
14470 *==========================================================================*/
14471int32_t QCamera3HardwareInterface::setBundleInfo()
14472{
14473 int32_t rc = NO_ERROR;
14474
14475 if (mChannelHandle) {
14476 cam_bundle_config_t bundleInfo;
14477 memset(&bundleInfo, 0, sizeof(bundleInfo));
14478 rc = mCameraHandle->ops->get_bundle_info(
14479 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14480 if (rc != NO_ERROR) {
14481 LOGE("get_bundle_info failed");
14482 return rc;
14483 }
14484 if (mAnalysisChannel) {
14485 mAnalysisChannel->setBundleInfo(bundleInfo);
14486 }
14487 if (mSupportChannel) {
14488 mSupportChannel->setBundleInfo(bundleInfo);
14489 }
14490 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14491 it != mStreamInfo.end(); it++) {
14492 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14493 channel->setBundleInfo(bundleInfo);
14494 }
14495 if (mRawDumpChannel) {
14496 mRawDumpChannel->setBundleInfo(bundleInfo);
14497 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014498 if (mHdrPlusRawSrcChannel) {
14499 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14500 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014501 }
14502
14503 return rc;
14504}
14505
14506/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014507 * FUNCTION : setInstantAEC
14508 *
14509 * DESCRIPTION: Set Instant AEC related params.
14510 *
14511 * PARAMETERS :
14512 * @meta: CameraMetadata reference
14513 *
14514 * RETURN : NO_ERROR on success
14515 * Error codes on failure
14516 *==========================================================================*/
14517int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14518{
14519 int32_t rc = NO_ERROR;
14520 uint8_t val = 0;
14521 char prop[PROPERTY_VALUE_MAX];
14522
14523 // First try to configure instant AEC from framework metadata
14524 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14525 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14526 }
14527
14528 // If framework did not set this value, try to read from set prop.
14529 if (val == 0) {
14530 memset(prop, 0, sizeof(prop));
14531 property_get("persist.camera.instant.aec", prop, "0");
14532 val = (uint8_t)atoi(prop);
14533 }
14534
14535 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14536 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14537 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14538 mInstantAEC = val;
14539 mInstantAECSettledFrameNumber = 0;
14540 mInstantAecFrameIdxCount = 0;
14541 LOGH("instantAEC value set %d",val);
14542 if (mInstantAEC) {
14543 memset(prop, 0, sizeof(prop));
14544 property_get("persist.camera.ae.instant.bound", prop, "10");
14545 int32_t aec_frame_skip_cnt = atoi(prop);
14546 if (aec_frame_skip_cnt >= 0) {
14547 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14548 } else {
14549 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14550 rc = BAD_VALUE;
14551 }
14552 }
14553 } else {
14554 LOGE("Bad instant aec value set %d", val);
14555 rc = BAD_VALUE;
14556 }
14557 return rc;
14558}
14559
14560/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014561 * FUNCTION : get_num_overall_buffers
14562 *
14563 * DESCRIPTION: Estimate number of pending buffers across all requests.
14564 *
14565 * PARAMETERS : None
14566 *
14567 * RETURN : Number of overall pending buffers
14568 *
14569 *==========================================================================*/
14570uint32_t PendingBuffersMap::get_num_overall_buffers()
14571{
14572 uint32_t sum_buffers = 0;
14573 for (auto &req : mPendingBuffersInRequest) {
14574 sum_buffers += req.mPendingBufferList.size();
14575 }
14576 return sum_buffers;
14577}
14578
14579/*===========================================================================
14580 * FUNCTION : removeBuf
14581 *
14582 * DESCRIPTION: Remove a matching buffer from tracker.
14583 *
14584 * PARAMETERS : @buffer: image buffer for the callback
14585 *
14586 * RETURN : None
14587 *
14588 *==========================================================================*/
14589void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14590{
14591 bool buffer_found = false;
14592 for (auto req = mPendingBuffersInRequest.begin();
14593 req != mPendingBuffersInRequest.end(); req++) {
14594 for (auto k = req->mPendingBufferList.begin();
14595 k != req->mPendingBufferList.end(); k++ ) {
14596 if (k->buffer == buffer) {
14597 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14598 req->frame_number, buffer);
14599 k = req->mPendingBufferList.erase(k);
14600 if (req->mPendingBufferList.empty()) {
14601 // Remove this request from Map
14602 req = mPendingBuffersInRequest.erase(req);
14603 }
14604 buffer_found = true;
14605 break;
14606 }
14607 }
14608 if (buffer_found) {
14609 break;
14610 }
14611 }
14612 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14613 get_num_overall_buffers());
14614}
14615
14616/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014617 * FUNCTION : getBufErrStatus
14618 *
14619 * DESCRIPTION: get buffer error status
14620 *
14621 * PARAMETERS : @buffer: buffer handle
14622 *
14623 * RETURN : Error status
14624 *
14625 *==========================================================================*/
14626int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14627{
14628 for (auto& req : mPendingBuffersInRequest) {
14629 for (auto& k : req.mPendingBufferList) {
14630 if (k.buffer == buffer)
14631 return k.bufStatus;
14632 }
14633 }
14634 return CAMERA3_BUFFER_STATUS_OK;
14635}
14636
14637/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014638 * FUNCTION : setPAAFSupport
14639 *
14640 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14641 * feature mask according to stream type and filter
14642 * arrangement
14643 *
14644 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14645 * @stream_type: stream type
14646 * @filter_arrangement: filter arrangement
14647 *
14648 * RETURN : None
14649 *==========================================================================*/
14650void QCamera3HardwareInterface::setPAAFSupport(
14651 cam_feature_mask_t& feature_mask,
14652 cam_stream_type_t stream_type,
14653 cam_color_filter_arrangement_t filter_arrangement)
14654{
Thierry Strudel3d639192016-09-09 11:52:26 -070014655 switch (filter_arrangement) {
14656 case CAM_FILTER_ARRANGEMENT_RGGB:
14657 case CAM_FILTER_ARRANGEMENT_GRBG:
14658 case CAM_FILTER_ARRANGEMENT_GBRG:
14659 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014660 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14661 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014662 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014663 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14664 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014665 }
14666 break;
14667 case CAM_FILTER_ARRANGEMENT_Y:
14668 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14669 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14670 }
14671 break;
14672 default:
14673 break;
14674 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014675 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14676 feature_mask, stream_type, filter_arrangement);
14677
14678
Thierry Strudel3d639192016-09-09 11:52:26 -070014679}
14680
14681/*===========================================================================
14682* FUNCTION : getSensorMountAngle
14683*
14684* DESCRIPTION: Retrieve sensor mount angle
14685*
14686* PARAMETERS : None
14687*
14688* RETURN : sensor mount angle in uint32_t
14689*==========================================================================*/
14690uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14691{
14692 return gCamCapability[mCameraId]->sensor_mount_angle;
14693}
14694
14695/*===========================================================================
14696* FUNCTION : getRelatedCalibrationData
14697*
14698* DESCRIPTION: Retrieve related system calibration data
14699*
14700* PARAMETERS : None
14701*
14702* RETURN : Pointer of related system calibration data
14703*==========================================================================*/
14704const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14705{
14706 return (const cam_related_system_calibration_data_t *)
14707 &(gCamCapability[mCameraId]->related_cam_calibration);
14708}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014709
14710/*===========================================================================
14711 * FUNCTION : is60HzZone
14712 *
14713 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14714 *
14715 * PARAMETERS : None
14716 *
14717 * RETURN : True if in 60Hz zone, False otherwise
14718 *==========================================================================*/
14719bool QCamera3HardwareInterface::is60HzZone()
14720{
14721 time_t t = time(NULL);
14722 struct tm lt;
14723
14724 struct tm* r = localtime_r(&t, &lt);
14725
14726 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14727 return true;
14728 else
14729 return false;
14730}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014731
14732/*===========================================================================
14733 * FUNCTION : adjustBlackLevelForCFA
14734 *
14735 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14736 * of bayer CFA (Color Filter Array).
14737 *
14738 * PARAMETERS : @input: black level pattern in the order of RGGB
14739 * @output: black level pattern in the order of CFA
14740 * @color_arrangement: CFA color arrangement
14741 *
14742 * RETURN : None
14743 *==========================================================================*/
14744template<typename T>
14745void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14746 T input[BLACK_LEVEL_PATTERN_CNT],
14747 T output[BLACK_LEVEL_PATTERN_CNT],
14748 cam_color_filter_arrangement_t color_arrangement)
14749{
14750 switch (color_arrangement) {
14751 case CAM_FILTER_ARRANGEMENT_GRBG:
14752 output[0] = input[1];
14753 output[1] = input[0];
14754 output[2] = input[3];
14755 output[3] = input[2];
14756 break;
14757 case CAM_FILTER_ARRANGEMENT_GBRG:
14758 output[0] = input[2];
14759 output[1] = input[3];
14760 output[2] = input[0];
14761 output[3] = input[1];
14762 break;
14763 case CAM_FILTER_ARRANGEMENT_BGGR:
14764 output[0] = input[3];
14765 output[1] = input[2];
14766 output[2] = input[1];
14767 output[3] = input[0];
14768 break;
14769 case CAM_FILTER_ARRANGEMENT_RGGB:
14770 output[0] = input[0];
14771 output[1] = input[1];
14772 output[2] = input[2];
14773 output[3] = input[3];
14774 break;
14775 default:
14776 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14777 break;
14778 }
14779}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014780
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014781void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14782 CameraMetadata &resultMetadata,
14783 std::shared_ptr<metadata_buffer_t> settings)
14784{
14785 if (settings == nullptr) {
14786 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14787 return;
14788 }
14789
14790 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14791 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14792 }
14793
14794 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14795 String8 str((const char *)gps_methods);
14796 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14797 }
14798
14799 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14800 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14801 }
14802
14803 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14804 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14805 }
14806
14807 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14808 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14809 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14810 }
14811
14812 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14813 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14814 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14815 }
14816
14817 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14818 int32_t fwk_thumb_size[2];
14819 fwk_thumb_size[0] = thumb_size->width;
14820 fwk_thumb_size[1] = thumb_size->height;
14821 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14822 }
14823
14824 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14825 uint8_t fwk_intent = intent[0];
14826 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14827 }
14828}
14829
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014830bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14831 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chenec328c82017-08-30 16:41:08 -070014832 if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
14833 metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
14834 ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
14835 return false;
14836 }
14837
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014838 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14839 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14840 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014841 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014842 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014843 return false;
14844 }
14845
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014846 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014847 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14848 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014849 return false;
14850 }
14851
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014852 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14853 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14854 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14855 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14856 return false;
14857 }
14858
14859 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14860 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14861 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14862 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14863 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
14864 return false;
14865 }
14866
14867 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
14868 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
14869 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
14870 return false;
14871 }
14872
14873 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
14874 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
14875 ANDROID_CONTROL_EFFECT_MODE_OFF) {
14876 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
14877 return false;
14878 }
14879
14880 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
14881 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
14882 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
14883 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
14884 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
14885 return false;
14886 }
14887
14888 // TODO (b/32585046): support non-ZSL.
14889 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
14890 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
14891 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
14892 return false;
14893 }
14894
14895 // TODO (b/32586081): support flash.
14896 if (!metadata.exists(ANDROID_FLASH_MODE) ||
14897 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
14898 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
14899 return false;
14900 }
14901
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014902 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
14903 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
14904 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
14905 return false;
14906 }
14907
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014908
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014909 // TODO (b/36693254, b/36690506): support other outputs.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014910 if (!gEnableMultipleHdrplusOutputs && request.num_output_buffers != 1) {
14911 ALOGV("%s: Only support 1 output: %d", __FUNCTION__, request.num_output_buffers);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014912 return false;
14913 }
14914
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014915 switch (request.output_buffers[0].stream->format) {
14916 case HAL_PIXEL_FORMAT_BLOB:
14917 break;
14918 case HAL_PIXEL_FORMAT_YCbCr_420_888:
14919 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
14920 // TODO (b/36693254): Only support full size.
14921 if (!gEnableMultipleHdrplusOutputs) {
14922 if (static_cast<int>(request.output_buffers[0].stream->width) !=
14923 gCamCapability[mCameraId]->picture_sizes_tbl[0].width ||
14924 static_cast<int>(request.output_buffers[0].stream->height) !=
14925 gCamCapability[mCameraId]->picture_sizes_tbl[0].height) {
14926 ALOGV("%s: Only full size is supported.", __FUNCTION__);
14927 return false;
14928 }
14929 }
14930 break;
14931 default:
14932 ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
14933 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14934 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14935 request.output_buffers[0].stream->width,
14936 request.output_buffers[0].stream->height,
14937 request.output_buffers[0].stream->format);
14938 }
14939 return false;
14940 }
14941
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014942 return true;
14943}
14944
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014945void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
14946 if (hdrPlusRequest == nullptr) return;
14947
14948 for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
14949 // Find the stream for this buffer.
14950 for (auto streamInfo : mStreamInfo) {
14951 if (streamInfo->id == outputBufferIter.first) {
14952 if (streamInfo->channel == mPictureChannel) {
14953 // For picture channel, this buffer is internally allocated so return this
14954 // buffer to picture channel.
14955 mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
14956 } else {
14957 // Unregister this buffer for other channels.
14958 streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
14959 }
14960 break;
14961 }
14962 }
14963 }
14964
14965 hdrPlusRequest->outputBuffers.clear();
14966 hdrPlusRequest->frameworkOutputBuffers.clear();
14967}
14968
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014969bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14970 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14971 const CameraMetadata &metadata)
14972{
14973 if (hdrPlusRequest == nullptr) return false;
14974 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
14975
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014976 status_t res = OK;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014977 pbcamera::CaptureRequest pbRequest;
14978 pbRequest.id = request.frame_number;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014979 // Iterate through all requested output buffers and add them to an HDR+ request.
14980 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14981 // Find the index of the stream in mStreamInfo.
14982 uint32_t pbStreamId = 0;
14983 bool found = false;
14984 for (auto streamInfo : mStreamInfo) {
14985 if (streamInfo->stream == request.output_buffers[i].stream) {
14986 pbStreamId = streamInfo->id;
14987 found = true;
14988 break;
14989 }
14990 }
14991
14992 if (!found) {
14993 ALOGE("%s: requested stream was not configured.", __FUNCTION__);
14994 abortPendingHdrplusRequest(hdrPlusRequest);
14995 return false;
14996 }
14997 auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
14998 switch (request.output_buffers[i].stream->format) {
14999 case HAL_PIXEL_FORMAT_BLOB:
15000 {
15001 // For jpeg output, get a YUV buffer from pic channel.
15002 QCamera3PicChannel *picChannel =
15003 (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
15004 res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
15005 if (res != OK) {
15006 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
15007 __FUNCTION__, strerror(-res), res);
15008 abortPendingHdrplusRequest(hdrPlusRequest);
15009 return false;
15010 }
15011 break;
15012 }
15013 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15014 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15015 {
15016 // For YUV output, register the buffer and get the buffer def from the channel.
15017 QCamera3ProcessingChannel *channel =
15018 (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
15019 res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
15020 outBuffer.get());
15021 if (res != OK) {
15022 ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
15023 strerror(-res), res);
15024 abortPendingHdrplusRequest(hdrPlusRequest);
15025 return false;
15026 }
15027 break;
15028 }
15029 default:
15030 abortPendingHdrplusRequest(hdrPlusRequest);
15031 return false;
15032 }
15033
15034 pbcamera::StreamBuffer buffer;
15035 buffer.streamId = pbStreamId;
15036 buffer.dmaBufFd = outBuffer->fd;
15037 buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
15038 buffer.dataSize = outBuffer->frame_len;
15039
15040 pbRequest.outputBuffers.push_back(buffer);
15041
15042 hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
15043 hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
15044 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015045
15046 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070015047 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015048 if (res != OK) {
15049 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
15050 strerror(-res), res);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015051 abortPendingHdrplusRequest(hdrPlusRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015052 return false;
15053 }
15054
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015055 return true;
15056}
15057
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015058status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
15059{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015060 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
15061 return OK;
15062 }
15063
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015064 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015065 if (res != OK) {
15066 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
15067 strerror(-res), res);
15068 return res;
15069 }
15070 gHdrPlusClientOpening = true;
15071
15072 return OK;
15073}
15074
Chien-Yu Chenee335912017-02-09 17:53:20 -080015075status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
15076{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015077 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015078
Chien-Yu Chena6c99062017-05-23 13:45:06 -070015079 if (mHdrPlusModeEnabled) {
15080 return OK;
15081 }
15082
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015083 // Check if gHdrPlusClient is opened or being opened.
15084 if (gHdrPlusClient == nullptr) {
15085 if (gHdrPlusClientOpening) {
15086 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
15087 return OK;
15088 }
15089
15090 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015091 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015092 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
15093 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015094 return res;
15095 }
15096
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015097 // When opening HDR+ client completes, HDR+ mode will be enabled.
15098 return OK;
15099
Chien-Yu Chenee335912017-02-09 17:53:20 -080015100 }
15101
15102 // Configure stream for HDR+.
15103 res = configureHdrPlusStreamsLocked();
15104 if (res != OK) {
15105 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015106 return res;
15107 }
15108
15109 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
15110 res = gHdrPlusClient->setZslHdrPlusMode(true);
15111 if (res != OK) {
15112 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080015113 return res;
15114 }
15115
15116 mHdrPlusModeEnabled = true;
15117 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
15118
15119 return OK;
15120}
15121
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015122void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
15123{
15124 if (gHdrPlusClientOpening) {
15125 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
15126 }
15127}
15128
Chien-Yu Chenee335912017-02-09 17:53:20 -080015129void QCamera3HardwareInterface::disableHdrPlusModeLocked()
15130{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015131 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015132 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015133 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
15134 if (res != OK) {
15135 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
15136 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015137
15138 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015139 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015140 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015141 }
15142
15143 mHdrPlusModeEnabled = false;
15144 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
15145}
15146
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015147bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
15148{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015149 // Check that at least one YUV or one JPEG output is configured.
15150 // TODO: Support RAW (b/36690506)
15151 for (auto streamInfo : mStreamInfo) {
15152 if (streamInfo != nullptr && streamInfo->stream != nullptr) {
15153 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
15154 (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
15155 streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15156 streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
15157 return true;
15158 }
15159 }
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015160 }
15161
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015162 return false;
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015163}
15164
Chien-Yu Chenee335912017-02-09 17:53:20 -080015165status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015166{
15167 pbcamera::InputConfiguration inputConfig;
15168 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
15169 status_t res = OK;
15170
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015171 // Sensor MIPI will send data to Easel.
15172 inputConfig.isSensorInput = true;
15173 inputConfig.sensorMode.cameraId = mCameraId;
15174 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
15175 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
15176 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
15177 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
15178 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
15179 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
15180 if (mSensorModeInfo.num_raw_bits != 10) {
15181 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
15182 mSensorModeInfo.num_raw_bits);
15183 return BAD_VALUE;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015184 }
15185
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015186 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015187
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015188 // Iterate through configured output streams in HAL and configure those streams in HDR+
15189 // service.
15190 for (auto streamInfo : mStreamInfo) {
15191 pbcamera::StreamConfiguration outputConfig;
15192 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
15193 switch (streamInfo->stream->format) {
15194 case HAL_PIXEL_FORMAT_BLOB:
15195 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15196 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15197 res = fillPbStreamConfig(&outputConfig, streamInfo->id,
15198 streamInfo->channel, /*stream index*/0);
15199 if (res != OK) {
15200 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
15201 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015202
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015203 return res;
15204 }
15205
15206 outputStreamConfigs.push_back(outputConfig);
15207 break;
15208 default:
15209 // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
15210 break;
15211 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015212 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015213 }
15214
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015215 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015216 if (res != OK) {
15217 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
15218 strerror(-res), res);
15219 return res;
15220 }
15221
15222 return OK;
15223}
15224
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015225void QCamera3HardwareInterface::handleEaselFatalError()
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015226{
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015227 pthread_mutex_lock(&mMutex);
15228 mState = ERROR;
15229 pthread_mutex_unlock(&mMutex);
15230
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015231 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015232}
15233
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015234void QCamera3HardwareInterface::handleEaselFatalErrorAsync()
15235{
15236 if (mEaselErrorFuture.valid()) {
15237 // The error future has been invoked.
15238 return;
15239 }
15240
15241 // Launch a future to handle the fatal error.
15242 mEaselErrorFuture = std::async(std::launch::async,
15243 &QCamera3HardwareInterface::handleEaselFatalError, this);
15244}
15245
15246void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15247{
15248 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15249 handleEaselFatalErrorAsync();
15250}
15251
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015252void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15253{
Arnd Geis8cbfc182017-09-07 14:46:41 -070015254 int rc = NO_ERROR;
15255
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015256 if (client == nullptr) {
15257 ALOGE("%s: Opened client is null.", __FUNCTION__);
15258 return;
15259 }
15260
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015261 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015262 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15263
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015264 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015265 if (!gHdrPlusClientOpening) {
15266 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15267 return;
15268 }
15269
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015270 gHdrPlusClient = std::move(client);
15271 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015272 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015273
15274 // Set static metadata.
15275 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15276 if (res != OK) {
15277 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15278 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015279 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015280 gHdrPlusClient = nullptr;
15281 return;
15282 }
15283
15284 // Enable HDR+ mode.
15285 res = enableHdrPlusModeLocked();
15286 if (res != OK) {
15287 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15288 }
Arnd Geis8cbfc182017-09-07 14:46:41 -070015289
15290 // Get Easel firmware version
15291 if (EaselManagerClientOpened) {
15292 rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
15293 if (rc != OK) {
15294 ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
15295 } else {
15296 mEaselFwUpdated = true;
15297 }
15298 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015299}
15300
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015301void QCamera3HardwareInterface::onOpenFailed(status_t err)
15302{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015303 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015304 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015305 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015306 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015307}
15308
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015309void QCamera3HardwareInterface::onFatalError()
15310{
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015311 ALOGE("%s: HDR+ client encountered a fatal error.", __FUNCTION__);
15312 handleEaselFatalErrorAsync();
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015313}
15314
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015315void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15316{
15317 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15318 __LINE__, requestId, apSensorTimestampNs);
15319
15320 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15321}
15322
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015323void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15324{
15325 pthread_mutex_lock(&mMutex);
15326
15327 // Find the pending request for this result metadata.
15328 auto requestIter = mPendingRequestsList.begin();
15329 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15330 requestIter++;
15331 }
15332
15333 if (requestIter == mPendingRequestsList.end()) {
15334 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15335 pthread_mutex_unlock(&mMutex);
15336 return;
15337 }
15338
15339 requestIter->partial_result_cnt++;
15340
15341 CameraMetadata metadata;
15342 uint8_t ready = true;
15343 metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15344
15345 // Send it to framework.
15346 camera3_capture_result_t result = {};
15347
15348 result.result = metadata.getAndLock();
15349 // Populate metadata result
15350 result.frame_number = requestId;
15351 result.num_output_buffers = 0;
15352 result.output_buffers = NULL;
15353 result.partial_result = requestIter->partial_result_cnt;
15354
15355 orchestrateResult(&result);
15356 metadata.unlock(result.result);
15357
15358 pthread_mutex_unlock(&mMutex);
15359}
15360
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070015361void QCamera3HardwareInterface::onPostview(uint32_t requestId,
15362 std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
15363 uint32_t stride, int32_t format)
15364{
15365 if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
15366 ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
15367 __LINE__, width, height, requestId);
15368 char buf[FILENAME_MAX] = {};
15369 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
15370 requestId, width, height);
15371
15372 pbcamera::StreamConfiguration config = {};
15373 config.image.width = width;
15374 config.image.height = height;
15375 config.image.format = format;
15376
15377 pbcamera::PlaneConfiguration plane = {};
15378 plane.stride = stride;
15379 plane.scanline = height;
15380
15381 config.image.planes.push_back(plane);
15382
15383 pbcamera::StreamBuffer buffer = {};
15384 buffer.streamId = 0;
15385 buffer.dmaBufFd = -1;
15386 buffer.data = postview->data();
15387 buffer.dataSize = postview->size();
15388
15389 hdrplus_client_utils::writePpm(buf, config, buffer);
15390 }
15391
15392 pthread_mutex_lock(&mMutex);
15393
15394 // Find the pending request for this result metadata.
15395 auto requestIter = mPendingRequestsList.begin();
15396 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15397 requestIter++;
15398 }
15399
15400 if (requestIter == mPendingRequestsList.end()) {
15401 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15402 pthread_mutex_unlock(&mMutex);
15403 return;
15404 }
15405
15406 requestIter->partial_result_cnt++;
15407
15408 CameraMetadata metadata;
15409 int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
15410 static_cast<int32_t>(stride)};
15411 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
15412 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
15413
15414 // Send it to framework.
15415 camera3_capture_result_t result = {};
15416
15417 result.result = metadata.getAndLock();
15418 // Populate metadata result
15419 result.frame_number = requestId;
15420 result.num_output_buffers = 0;
15421 result.output_buffers = NULL;
15422 result.partial_result = requestIter->partial_result_cnt;
15423
15424 orchestrateResult(&result);
15425 metadata.unlock(result.result);
15426
15427 pthread_mutex_unlock(&mMutex);
15428}
15429
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015430void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015431 const camera_metadata_t &resultMetadata)
15432{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015433 if (result == nullptr) {
15434 ALOGE("%s: result is nullptr.", __FUNCTION__);
15435 return;
15436 }
15437
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015438 // Find the pending HDR+ request.
15439 HdrPlusPendingRequest pendingRequest;
15440 {
15441 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15442 auto req = mHdrPlusPendingRequests.find(result->requestId);
15443 pendingRequest = req->second;
15444 }
15445
15446 // Update the result metadata with the settings of the HDR+ still capture request because
15447 // the result metadata belongs to a ZSL buffer.
15448 CameraMetadata metadata;
15449 metadata = &resultMetadata;
15450 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15451 camera_metadata_t* updatedResultMetadata = metadata.release();
15452
15453 uint32_t halSnapshotStreamId = 0;
15454 if (mPictureChannel != nullptr) {
15455 halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
15456 }
15457
15458 auto halMetadata = std::make_shared<metadata_buffer_t>();
15459 clear_metadata_buffer(halMetadata.get());
15460
15461 // Convert updated result metadata to HAL metadata.
15462 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15463 halSnapshotStreamId, /*minFrameDuration*/0);
15464 if (res != 0) {
15465 ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
15466 }
15467
15468 for (auto &outputBuffer : result->outputBuffers) {
15469 uint32_t streamId = outputBuffer.streamId;
15470
15471 // Find the framework output buffer in the pending request.
15472 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15473 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15474 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15475 streamId);
15476 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015477 }
15478
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015479 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15480
15481 // Find the channel for the output buffer.
15482 QCamera3ProcessingChannel *channel =
15483 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15484
15485 // Find the output buffer def.
15486 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15487 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15488 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15489 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015490 }
15491
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015492 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015493
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015494 // Check whether to dump the buffer.
15495 if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15496 frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
15497 // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
15498 char prop[PROPERTY_VALUE_MAX];
15499 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15500 bool dumpYuvOutput = atoi(prop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015501
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015502 if (dumpYuvOutput) {
15503 // Dump yuv buffer to a ppm file.
15504 pbcamera::StreamConfiguration outputConfig;
15505 status_t rc = fillPbStreamConfig(&outputConfig, streamId,
15506 channel, /*stream index*/0);
15507 if (rc == OK) {
15508 char buf[FILENAME_MAX] = {};
15509 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15510 result->requestId, streamId,
15511 outputConfig.image.width, outputConfig.image.height);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015512
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015513 hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
15514 } else {
15515 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
15516 "%s (%d).", __FUNCTION__, strerror(-rc), rc);
15517 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015518 }
15519 }
15520
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015521 if (channel == mPictureChannel) {
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015522 // Return the buffer to pic channel for encoding.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015523 mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
15524 frameworkOutputBuffer->buffer, result->requestId,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015525 halMetadata);
15526 } else {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015527 // Return the buffer to camera framework.
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015528 pthread_mutex_lock(&mMutex);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015529 handleBufferWithLock(frameworkOutputBuffer, result->requestId);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015530 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015531
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015532 channel->unregisterBuffer(outputBufferDef.get());
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015533 }
15534 }
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015535
15536 // Send HDR+ metadata to framework.
15537 {
15538 pthread_mutex_lock(&mMutex);
15539
15540 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15541 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
15542 pthread_mutex_unlock(&mMutex);
15543 }
15544
15545 // Remove the HDR+ pending request.
15546 {
15547 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15548 auto req = mHdrPlusPendingRequests.find(result->requestId);
15549 mHdrPlusPendingRequests.erase(req);
15550 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015551}
15552
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015553void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15554{
15555 if (failedResult == nullptr) {
15556 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15557 return;
15558 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015559
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015560 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015561
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015562 // Find the pending HDR+ request.
15563 HdrPlusPendingRequest pendingRequest;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015564 {
15565 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015566 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15567 if (req == mHdrPlusPendingRequests.end()) {
15568 ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
15569 return;
15570 }
15571 pendingRequest = req->second;
15572 }
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015573
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015574 for (auto &outputBuffer : failedResult->outputBuffers) {
15575 uint32_t streamId = outputBuffer.streamId;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015576
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015577 // Find the channel
15578 // Find the framework output buffer in the pending request.
15579 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15580 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15581 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15582 streamId);
15583 continue;
15584 }
15585
15586 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15587
15588 // Find the channel for the output buffer.
15589 QCamera3ProcessingChannel *channel =
15590 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15591
15592 // Find the output buffer def.
15593 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15594 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15595 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15596 continue;
15597 }
15598
15599 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
15600
15601 if (channel == mPictureChannel) {
15602 // Return the buffer to pic channel.
15603 mPictureChannel->returnYuvBuffer(outputBufferDef.get());
15604 } else {
15605 channel->unregisterBuffer(outputBufferDef.get());
15606 }
15607 }
15608
15609 // Remove the HDR+ pending request.
15610 {
15611 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15612 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15613 mHdrPlusPendingRequests.erase(req);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015614 }
15615
15616 pthread_mutex_lock(&mMutex);
15617
15618 // Find the pending buffers.
15619 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15620 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15621 if (pendingBuffers->frame_number == failedResult->requestId) {
15622 break;
15623 }
15624 pendingBuffers++;
15625 }
15626
15627 // Send out buffer errors for the pending buffers.
15628 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15629 std::vector<camera3_stream_buffer_t> streamBuffers;
15630 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15631 // Prepare a stream buffer.
15632 camera3_stream_buffer_t streamBuffer = {};
15633 streamBuffer.stream = buffer.stream;
15634 streamBuffer.buffer = buffer.buffer;
15635 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15636 streamBuffer.acquire_fence = -1;
15637 streamBuffer.release_fence = -1;
15638
15639 streamBuffers.push_back(streamBuffer);
15640
15641 // Send out error buffer event.
15642 camera3_notify_msg_t notify_msg = {};
15643 notify_msg.type = CAMERA3_MSG_ERROR;
15644 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15645 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15646 notify_msg.message.error.error_stream = buffer.stream;
15647
15648 orchestrateNotify(&notify_msg);
15649 }
15650
15651 camera3_capture_result_t result = {};
15652 result.frame_number = pendingBuffers->frame_number;
15653 result.num_output_buffers = streamBuffers.size();
15654 result.output_buffers = &streamBuffers[0];
15655
15656 // Send out result with buffer errors.
15657 orchestrateResult(&result);
15658
15659 // Remove pending buffers.
15660 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15661 }
15662
15663 // Remove pending request.
15664 auto halRequest = mPendingRequestsList.begin();
15665 while (halRequest != mPendingRequestsList.end()) {
15666 if (halRequest->frame_number == failedResult->requestId) {
15667 mPendingRequestsList.erase(halRequest);
15668 break;
15669 }
15670 halRequest++;
15671 }
15672
15673 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015674}
15675
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015676
15677ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15678 mParent(parent) {}
15679
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015680void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015681{
15682 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015683
15684 if (isReprocess) {
15685 mReprocessShutters.emplace(frameNumber, Shutter());
15686 } else {
15687 mShutters.emplace(frameNumber, Shutter());
15688 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015689}
15690
15691void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15692{
15693 std::lock_guard<std::mutex> lock(mLock);
15694
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015695 std::map<uint32_t, Shutter> *shutters = nullptr;
15696
15697 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015698 auto shutter = mShutters.find(frameNumber);
15699 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015700 shutter = mReprocessShutters.find(frameNumber);
15701 if (shutter == mReprocessShutters.end()) {
15702 // Shutter was already sent.
15703 return;
15704 }
15705 shutters = &mReprocessShutters;
15706 } else {
15707 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015708 }
15709
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015710 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015711 shutter->second.ready = true;
15712 shutter->second.timestamp = timestamp;
15713
15714 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015715 shutter = shutters->begin();
15716 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015717 if (!shutter->second.ready) {
15718 // If this shutter is not ready, the following shutters can't be sent.
15719 break;
15720 }
15721
15722 camera3_notify_msg_t msg = {};
15723 msg.type = CAMERA3_MSG_SHUTTER;
15724 msg.message.shutter.frame_number = shutter->first;
15725 msg.message.shutter.timestamp = shutter->second.timestamp;
15726 mParent->orchestrateNotify(&msg);
15727
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015728 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015729 }
15730}
15731
15732void ShutterDispatcher::clear(uint32_t frameNumber)
15733{
15734 std::lock_guard<std::mutex> lock(mLock);
15735 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015736 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015737}
15738
15739void ShutterDispatcher::clear()
15740{
15741 std::lock_guard<std::mutex> lock(mLock);
15742
15743 // Log errors for stale shutters.
15744 for (auto &shutter : mShutters) {
15745 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15746 __FUNCTION__, shutter.first, shutter.second.ready,
15747 shutter.second.timestamp);
15748 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015749
15750 // Log errors for stale reprocess shutters.
15751 for (auto &shutter : mReprocessShutters) {
15752 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15753 __FUNCTION__, shutter.first, shutter.second.ready,
15754 shutter.second.timestamp);
15755 }
15756
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015757 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015758 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015759}
15760
15761OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15762 mParent(parent) {}
15763
15764status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15765{
15766 std::lock_guard<std::mutex> lock(mLock);
15767 mStreamBuffers.clear();
15768 if (!streamList) {
15769 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15770 return -EINVAL;
15771 }
15772
15773 // Create a "frame-number -> buffer" map for each stream.
15774 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15775 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15776 }
15777
15778 return OK;
15779}
15780
15781status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15782{
15783 std::lock_guard<std::mutex> lock(mLock);
15784
15785 // Find the "frame-number -> buffer" map for the stream.
15786 auto buffers = mStreamBuffers.find(stream);
15787 if (buffers == mStreamBuffers.end()) {
15788 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15789 return -EINVAL;
15790 }
15791
15792 // Create an unready buffer for this frame number.
15793 buffers->second.emplace(frameNumber, Buffer());
15794 return OK;
15795}
15796
15797void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15798 const camera3_stream_buffer_t &buffer)
15799{
15800 std::lock_guard<std::mutex> lock(mLock);
15801
15802 // Find the frame number -> buffer map for the stream.
15803 auto buffers = mStreamBuffers.find(buffer.stream);
15804 if (buffers == mStreamBuffers.end()) {
15805 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15806 return;
15807 }
15808
15809 // Find the unready buffer this frame number and mark it ready.
15810 auto pendingBuffer = buffers->second.find(frameNumber);
15811 if (pendingBuffer == buffers->second.end()) {
15812 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15813 return;
15814 }
15815
15816 pendingBuffer->second.ready = true;
15817 pendingBuffer->second.buffer = buffer;
15818
15819 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15820 pendingBuffer = buffers->second.begin();
15821 while (pendingBuffer != buffers->second.end()) {
15822 if (!pendingBuffer->second.ready) {
15823 // If this buffer is not ready, the following buffers can't be sent.
15824 break;
15825 }
15826
15827 camera3_capture_result_t result = {};
15828 result.frame_number = pendingBuffer->first;
15829 result.num_output_buffers = 1;
15830 result.output_buffers = &pendingBuffer->second.buffer;
15831
15832 // Send out result with buffer errors.
15833 mParent->orchestrateResult(&result);
15834
15835 pendingBuffer = buffers->second.erase(pendingBuffer);
15836 }
15837}
15838
15839void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15840{
15841 std::lock_guard<std::mutex> lock(mLock);
15842
15843 // Log errors for stale buffers.
15844 for (auto &buffers : mStreamBuffers) {
15845 for (auto &buffer : buffers.second) {
15846 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15847 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15848 }
15849 buffers.second.clear();
15850 }
15851
15852 if (clearConfiguredStreams) {
15853 mStreamBuffers.clear();
15854 }
15855}
15856
Thierry Strudel3d639192016-09-09 11:52:26 -070015857}; //end namespace qcamera