blob: 80eb78be874aa97aa0b5e81aaedc4810ee80a457 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070075// mm_camera has 2 partial results: 3A, and final result.
76// HDR+ requests have 3 partial results: postview, next request ready, and final result.
77#define PARTIAL_RESULT_COUNT 3
Thierry Strudel3d639192016-09-09 11:52:26 -070078#define FRAME_SKIP_DELAY 0
79
80#define MAX_VALUE_8BIT ((1<<8)-1)
81#define MAX_VALUE_10BIT ((1<<10)-1)
82#define MAX_VALUE_12BIT ((1<<12)-1)
83
84#define VIDEO_4K_WIDTH 3840
85#define VIDEO_4K_HEIGHT 2160
86
Jason Leeb9e76432017-03-10 17:14:19 -080087#define MAX_EIS_WIDTH 3840
88#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070089
90#define MAX_RAW_STREAMS 1
91#define MAX_STALLING_STREAMS 1
92#define MAX_PROCESSED_STREAMS 3
93/* Batch mode is enabled only if FPS set is equal to or greater than this */
94#define MIN_FPS_FOR_BATCH_MODE (120)
95#define PREVIEW_FPS_FOR_HFR (30)
96#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080097#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070098#define MAX_HFR_BATCH_SIZE (8)
99#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +0100101#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700148std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -0700155bool gEnableMultipleHdrplusOutputs = false; // Whether to enable multiple output from Easel HDR+.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800157// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
158bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700159
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700160std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700161
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
164 {"On", CAM_CDS_MODE_ON},
165 {"Off", CAM_CDS_MODE_OFF},
166 {"Auto",CAM_CDS_MODE_AUTO}
167};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700168const QCamera3HardwareInterface::QCameraMap<
169 camera_metadata_enum_android_video_hdr_mode_t,
170 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
171 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
172 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
173};
174
Thierry Strudel54dc9782017-02-15 12:12:10 -0800175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_binning_correction_mode_t,
177 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
178 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
179 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
180};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700181
182const QCamera3HardwareInterface::QCameraMap<
183 camera_metadata_enum_android_ir_mode_t,
184 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
185 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
186 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
187 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
188};
Thierry Strudel3d639192016-09-09 11:52:26 -0700189
190const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_effect_mode_t,
192 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
193 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
194 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
195 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
196 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
197 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
198 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
199 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
202};
203
204const QCamera3HardwareInterface::QCameraMap<
205 camera_metadata_enum_android_control_awb_mode_t,
206 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
207 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
208 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
209 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
210 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
211 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
212 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
215 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
216};
217
218const QCamera3HardwareInterface::QCameraMap<
219 camera_metadata_enum_android_control_scene_mode_t,
220 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
221 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
222 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
223 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
227 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
228 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
229 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
230 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
231 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
232 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
233 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
234 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
235 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800236 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
237 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_af_mode_t,
242 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
245 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
246 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
247 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_color_correction_aberration_mode_t,
254 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
255 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
256 CAM_COLOR_CORRECTION_ABERRATION_OFF },
257 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
258 CAM_COLOR_CORRECTION_ABERRATION_FAST },
259 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
260 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_control_ae_antibanding_mode_t,
265 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
270};
271
272const QCamera3HardwareInterface::QCameraMap<
273 camera_metadata_enum_android_control_ae_mode_t,
274 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
275 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
278 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700279 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
280 { (camera_metadata_enum_android_control_ae_mode_t)
281 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_flash_mode_t,
286 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
287 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
288 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
289 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_statistics_face_detect_mode_t,
294 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
297 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
298};
299
300const QCamera3HardwareInterface::QCameraMap<
301 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
302 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
303 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
304 CAM_FOCUS_UNCALIBRATED },
305 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
306 CAM_FOCUS_APPROXIMATE },
307 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
308 CAM_FOCUS_CALIBRATED }
309};
310
311const QCamera3HardwareInterface::QCameraMap<
312 camera_metadata_enum_android_lens_state_t,
313 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
314 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
315 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
316};
317
318const int32_t available_thumbnail_sizes[] = {0, 0,
319 176, 144,
320 240, 144,
321 256, 144,
322 240, 160,
323 256, 154,
324 240, 240,
325 320, 240};
326
327const QCamera3HardwareInterface::QCameraMap<
328 camera_metadata_enum_android_sensor_test_pattern_mode_t,
329 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
335 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
336};
337
338/* Since there is no mapping for all the options some Android enum are not listed.
339 * Also, the order in this list is important because while mapping from HAL to Android it will
340 * traverse from lower to higher index which means that for HAL values that are map to different
341 * Android values, the traverse logic will select the first one found.
342 */
343const QCamera3HardwareInterface::QCameraMap<
344 camera_metadata_enum_android_sensor_reference_illuminant1_t,
345 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
361 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
362};
363
364const QCamera3HardwareInterface::QCameraMap<
365 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
366 { 60, CAM_HFR_MODE_60FPS},
367 { 90, CAM_HFR_MODE_90FPS},
368 { 120, CAM_HFR_MODE_120FPS},
369 { 150, CAM_HFR_MODE_150FPS},
370 { 180, CAM_HFR_MODE_180FPS},
371 { 210, CAM_HFR_MODE_210FPS},
372 { 240, CAM_HFR_MODE_240FPS},
373 { 480, CAM_HFR_MODE_480FPS},
374};
375
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_instant_aec_mode_t,
378 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
379 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
381 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
382};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800383
384const QCamera3HardwareInterface::QCameraMap<
385 qcamera3_ext_exposure_meter_mode_t,
386 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
387 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
388 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
389 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
390 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
391 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
392 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
393 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
394};
395
396const QCamera3HardwareInterface::QCameraMap<
397 qcamera3_ext_iso_mode_t,
398 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
399 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
400 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
401 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
402 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
403 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
404 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
405 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
406 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
407};
408
Thierry Strudel3d639192016-09-09 11:52:26 -0700409camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
410 .initialize = QCamera3HardwareInterface::initialize,
411 .configure_streams = QCamera3HardwareInterface::configure_streams,
412 .register_stream_buffers = NULL,
413 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
414 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
415 .get_metadata_vendor_tag_ops = NULL,
416 .dump = QCamera3HardwareInterface::dump,
417 .flush = QCamera3HardwareInterface::flush,
418 .reserved = {0},
419};
420
421// initialise to some default value
422uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
423
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700424static inline void logEaselEvent(const char *tag, const char *event) {
425 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
426 struct timespec ts = {};
427 static int64_t kMsPerSec = 1000;
428 static int64_t kNsPerMs = 1000000;
429 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
430 if (res != OK) {
431 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
432 } else {
433 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
434 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
435 }
436 }
437}
438
Thierry Strudel3d639192016-09-09 11:52:26 -0700439/*===========================================================================
440 * FUNCTION : QCamera3HardwareInterface
441 *
442 * DESCRIPTION: constructor of QCamera3HardwareInterface
443 *
444 * PARAMETERS :
445 * @cameraId : camera ID
446 *
447 * RETURN : none
448 *==========================================================================*/
449QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
450 const camera_module_callbacks_t *callbacks)
451 : mCameraId(cameraId),
452 mCameraHandle(NULL),
453 mCameraInitialized(false),
454 mCallbackOps(NULL),
455 mMetadataChannel(NULL),
456 mPictureChannel(NULL),
457 mRawChannel(NULL),
458 mSupportChannel(NULL),
459 mAnalysisChannel(NULL),
460 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700461 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700462 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800463 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100464 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800465 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700466 mChannelHandle(0),
467 mFirstConfiguration(true),
468 mFlush(false),
469 mFlushPerf(false),
470 mParamHeap(NULL),
471 mParameters(NULL),
472 mPrevParameters(NULL),
473 m_bIsVideo(false),
474 m_bIs4KVideo(false),
475 m_bEisSupportedSize(false),
476 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800477 m_bEis3PropertyEnabled(false),
Binhao Lin09245482017-08-31 18:25:29 -0700478 m_bAVTimerEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700479 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700480 mShutterDispatcher(this),
481 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mMinProcessedFrameDuration(0),
483 mMinJpegFrameDuration(0),
484 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100485 mExpectedFrameDuration(0),
486 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700487 mMetaFrameCount(0U),
488 mUpdateDebugLevel(false),
489 mCallbacks(callbacks),
490 mCaptureIntent(0),
491 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800492 /* DevCamDebug metadata internal m control*/
493 mDevCamDebugMetaEnable(0),
494 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700495 mBatchSize(0),
496 mToBeQueuedVidBufs(0),
497 mHFRVideoFps(DEFAULT_VIDEO_FPS),
498 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800499 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800500 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mFirstFrameNumberInBatch(0),
502 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800503 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700504 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
505 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000506 mPDSupported(false),
507 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700508 mInstantAEC(false),
509 mResetInstantAEC(false),
510 mInstantAECSettledFrameNumber(0),
511 mAecSkipDisplayFrameBound(0),
512 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700513 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800514 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700515 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700517 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700518 mState(CLOSED),
519 mIsDeviceLinked(false),
520 mIsMainCamera(true),
521 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700522 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800523 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800524 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700525 mZslEnabled(false),
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700526 mEaselMipiStarted(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800527 mIsApInputUsedForHdrPlus(false),
528 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700529 m_bSensorHDREnabled(false),
Shuzhen Wang3569d4a2017-09-04 19:10:28 -0700530 mAfTrigger(),
531 mSceneDistance(-1)
Thierry Strudel3d639192016-09-09 11:52:26 -0700532{
533 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCommon.init(gCamCapability[cameraId]);
535 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700536#ifndef USE_HAL_3_3
537 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
538#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700539 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700540#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700541 mCameraDevice.common.close = close_camera_device;
542 mCameraDevice.ops = &mCameraOps;
543 mCameraDevice.priv = this;
544 gCamCapability[cameraId]->version = CAM_HAL_V3;
545 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
546 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
547 gCamCapability[cameraId]->min_num_pp_bufs = 3;
548
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800549 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700550
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800551 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700552 mPendingLiveRequest = 0;
553 mCurrentRequestId = -1;
554 pthread_mutex_init(&mMutex, NULL);
555
556 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
557 mDefaultMetadata[i] = NULL;
558
559 // Getting system props of different kinds
560 char prop[PROPERTY_VALUE_MAX];
561 memset(prop, 0, sizeof(prop));
562 property_get("persist.camera.raw.dump", prop, "0");
563 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800564 property_get("persist.camera.hal3.force.hdr", prop, "0");
565 mForceHdrSnapshot = atoi(prop);
566
Thierry Strudel3d639192016-09-09 11:52:26 -0700567 if (mEnableRawDump)
568 LOGD("Raw dump from Camera HAL enabled");
569
570 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
571 memset(mLdafCalib, 0, sizeof(mLdafCalib));
572
Arnd Geis082a4d72017-08-24 10:33:07 -0700573 memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
Arnd Geis8cbfc182017-09-07 14:46:41 -0700574 mEaselFwUpdated = false;
Arnd Geis082a4d72017-08-24 10:33:07 -0700575
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 memset(prop, 0, sizeof(prop));
577 property_get("persist.camera.tnr.preview", prop, "0");
578 m_bTnrPreview = (uint8_t)atoi(prop);
579
580 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800581 property_get("persist.camera.swtnr.preview", prop, "1");
582 m_bSwTnrPreview = (uint8_t)atoi(prop);
583
584 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700585 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700586 m_bTnrVideo = (uint8_t)atoi(prop);
587
588 memset(prop, 0, sizeof(prop));
589 property_get("persist.camera.avtimer.debug", prop, "0");
590 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800591 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700592
Thierry Strudel54dc9782017-02-15 12:12:10 -0800593 memset(prop, 0, sizeof(prop));
594 property_get("persist.camera.cacmode.disable", prop, "0");
595 m_cacModeDisabled = (uint8_t)atoi(prop);
596
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700597 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700598 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700599
Thierry Strudel3d639192016-09-09 11:52:26 -0700600 //Load and read GPU library.
601 lib_surface_utils = NULL;
602 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700603 mSurfaceStridePadding = CAM_PAD_TO_64;
604#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700605 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
606 if (lib_surface_utils) {
607 *(void **)&LINK_get_surface_pixel_alignment =
608 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
609 if (LINK_get_surface_pixel_alignment) {
610 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
611 }
612 dlclose(lib_surface_utils);
613 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700614#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000615 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
616 mPDSupported = (0 <= mPDIndex) ? true : false;
617
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700618 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700619}
620
621/*===========================================================================
622 * FUNCTION : ~QCamera3HardwareInterface
623 *
624 * DESCRIPTION: destructor of QCamera3HardwareInterface
625 *
626 * PARAMETERS : none
627 *
628 * RETURN : none
629 *==========================================================================*/
630QCamera3HardwareInterface::~QCamera3HardwareInterface()
631{
632 LOGD("E");
633
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800634 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700635
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800636 // Disable power hint and enable the perf lock for close camera
637 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
638 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
639
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700640 // Close HDR+ client first before destroying HAL.
641 {
642 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
643 finishHdrPlusClientOpeningLocked(l);
644 if (gHdrPlusClient != nullptr) {
645 // Disable HDR+ mode.
646 disableHdrPlusModeLocked();
647 // Disconnect Easel if it's connected.
648 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
649 gHdrPlusClient = nullptr;
650 }
651 }
652
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800653 // unlink of dualcam during close camera
654 if (mIsDeviceLinked) {
655 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
656 &m_pDualCamCmdPtr->bundle_info;
657 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
658 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
659 pthread_mutex_lock(&gCamLock);
660
661 if (mIsMainCamera == 1) {
662 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
663 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
664 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
665 // related session id should be session id of linked session
666 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
667 } else {
668 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
669 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
670 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
671 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
672 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800673 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800674 pthread_mutex_unlock(&gCamLock);
675
676 rc = mCameraHandle->ops->set_dual_cam_cmd(
677 mCameraHandle->camera_handle);
678 if (rc < 0) {
679 LOGE("Dualcam: Unlink failed, but still proceed to close");
680 }
681 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700682
683 /* We need to stop all streams before deleting any stream */
684 if (mRawDumpChannel) {
685 mRawDumpChannel->stop();
686 }
687
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700688 if (mHdrPlusRawSrcChannel) {
689 mHdrPlusRawSrcChannel->stop();
690 }
691
Thierry Strudel3d639192016-09-09 11:52:26 -0700692 // NOTE: 'camera3_stream_t *' objects are already freed at
693 // this stage by the framework
694 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
695 it != mStreamInfo.end(); it++) {
696 QCamera3ProcessingChannel *channel = (*it)->channel;
697 if (channel) {
698 channel->stop();
699 }
700 }
701 if (mSupportChannel)
702 mSupportChannel->stop();
703
704 if (mAnalysisChannel) {
705 mAnalysisChannel->stop();
706 }
707 if (mMetadataChannel) {
708 mMetadataChannel->stop();
709 }
710 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700711 stopChannelLocked(/*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700712 }
713
714 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
715 it != mStreamInfo.end(); it++) {
716 QCamera3ProcessingChannel *channel = (*it)->channel;
717 if (channel)
718 delete channel;
719 free (*it);
720 }
721 if (mSupportChannel) {
722 delete mSupportChannel;
723 mSupportChannel = NULL;
724 }
725
726 if (mAnalysisChannel) {
727 delete mAnalysisChannel;
728 mAnalysisChannel = NULL;
729 }
730 if (mRawDumpChannel) {
731 delete mRawDumpChannel;
732 mRawDumpChannel = NULL;
733 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700734 if (mHdrPlusRawSrcChannel) {
735 delete mHdrPlusRawSrcChannel;
736 mHdrPlusRawSrcChannel = NULL;
737 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700738 if (mDummyBatchChannel) {
739 delete mDummyBatchChannel;
740 mDummyBatchChannel = NULL;
741 }
742
743 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800744 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700745
746 if (mMetadataChannel) {
747 delete mMetadataChannel;
748 mMetadataChannel = NULL;
749 }
750
751 /* Clean up all channels */
752 if (mCameraInitialized) {
753 if(!mFirstConfiguration){
754 //send the last unconfigure
755 cam_stream_size_info_t stream_config_info;
756 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
757 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
758 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800759 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700760 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700761 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
763 stream_config_info);
764 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
765 if (rc < 0) {
766 LOGE("set_parms failed for unconfigure");
767 }
768 }
769 deinitParameters();
770 }
771
772 if (mChannelHandle) {
773 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
774 mChannelHandle);
775 LOGH("deleting channel %d", mChannelHandle);
776 mChannelHandle = 0;
777 }
778
779 if (mState != CLOSED)
780 closeCamera();
781
782 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
783 req.mPendingBufferList.clear();
784 }
785 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700786 for (pendingRequestIterator i = mPendingRequestsList.begin();
787 i != mPendingRequestsList.end();) {
788 i = erasePendingRequest(i);
789 }
790 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
791 if (mDefaultMetadata[i])
792 free_camera_metadata(mDefaultMetadata[i]);
793
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800794 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700795
796 pthread_cond_destroy(&mRequestCond);
797
798 pthread_cond_destroy(&mBuffersCond);
799
800 pthread_mutex_destroy(&mMutex);
801 LOGD("X");
802}
803
804/*===========================================================================
805 * FUNCTION : erasePendingRequest
806 *
807 * DESCRIPTION: function to erase a desired pending request after freeing any
808 * allocated memory
809 *
810 * PARAMETERS :
811 * @i : iterator pointing to pending request to be erased
812 *
813 * RETURN : iterator pointing to the next request
814 *==========================================================================*/
815QCamera3HardwareInterface::pendingRequestIterator
816 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
817{
818 if (i->input_buffer != NULL) {
819 free(i->input_buffer);
820 i->input_buffer = NULL;
821 }
822 if (i->settings != NULL)
823 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100824
825 mExpectedInflightDuration -= i->expectedFrameDuration;
826 if (mExpectedInflightDuration < 0) {
827 LOGE("Negative expected in-flight duration!");
828 mExpectedInflightDuration = 0;
829 }
830
Thierry Strudel3d639192016-09-09 11:52:26 -0700831 return mPendingRequestsList.erase(i);
832}
833
834/*===========================================================================
835 * FUNCTION : camEvtHandle
836 *
837 * DESCRIPTION: Function registered to mm-camera-interface to handle events
838 *
839 * PARAMETERS :
840 * @camera_handle : interface layer camera handle
841 * @evt : ptr to event
842 * @user_data : user data ptr
843 *
844 * RETURN : none
845 *==========================================================================*/
846void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
847 mm_camera_event_t *evt,
848 void *user_data)
849{
850 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
851 if (obj && evt) {
852 switch(evt->server_event_type) {
853 case CAM_EVENT_TYPE_DAEMON_DIED:
854 pthread_mutex_lock(&obj->mMutex);
855 obj->mState = ERROR;
856 pthread_mutex_unlock(&obj->mMutex);
857 LOGE("Fatal, camera daemon died");
858 break;
859
860 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
861 LOGD("HAL got request pull from Daemon");
862 pthread_mutex_lock(&obj->mMutex);
863 obj->mWokenUpByDaemon = true;
864 obj->unblockRequestIfNecessary();
865 pthread_mutex_unlock(&obj->mMutex);
866 break;
867
868 default:
869 LOGW("Warning: Unhandled event %d",
870 evt->server_event_type);
871 break;
872 }
873 } else {
874 LOGE("NULL user_data/evt");
875 }
876}
877
878/*===========================================================================
879 * FUNCTION : openCamera
880 *
881 * DESCRIPTION: open camera
882 *
883 * PARAMETERS :
884 * @hw_device : double ptr for camera device struct
885 *
886 * RETURN : int32_t type of status
887 * NO_ERROR -- success
888 * none-zero failure code
889 *==========================================================================*/
890int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
891{
892 int rc = 0;
893 if (mState != CLOSED) {
894 *hw_device = NULL;
895 return PERMISSION_DENIED;
896 }
897
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700898 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800899 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700900 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
901 mCameraId);
902
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700903 if (mCameraHandle) {
904 LOGE("Failure: Camera already opened");
905 return ALREADY_EXISTS;
906 }
907
908 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700909 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700910 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700911 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700912 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700913 if (rc != 0) {
914 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
915 return rc;
916 }
Arnd Geis8cbfc182017-09-07 14:46:41 -0700917 mEaselFwUpdated = false;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700918 }
919 }
920
Thierry Strudel3d639192016-09-09 11:52:26 -0700921 rc = openCamera();
922 if (rc == 0) {
923 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800924 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700925 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700926
927 // Suspend Easel because opening camera failed.
928 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700929 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700930 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
931 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700932 if (suspendErr != 0) {
933 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
934 strerror(-suspendErr), suspendErr);
935 }
936 }
937 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800938 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700939
Thierry Strudel3d639192016-09-09 11:52:26 -0700940 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
941 mCameraId, rc);
942
943 if (rc == NO_ERROR) {
944 mState = OPENED;
945 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800946
Thierry Strudel3d639192016-09-09 11:52:26 -0700947 return rc;
948}
949
950/*===========================================================================
951 * FUNCTION : openCamera
952 *
953 * DESCRIPTION: open camera
954 *
955 * PARAMETERS : none
956 *
957 * RETURN : int32_t type of status
958 * NO_ERROR -- success
959 * none-zero failure code
960 *==========================================================================*/
961int QCamera3HardwareInterface::openCamera()
962{
963 int rc = 0;
964 char value[PROPERTY_VALUE_MAX];
965
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800966 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800967
Thierry Strudel3d639192016-09-09 11:52:26 -0700968 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
969 if (rc < 0) {
970 LOGE("Failed to reserve flash for camera id: %d",
971 mCameraId);
972 return UNKNOWN_ERROR;
973 }
974
975 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
976 if (rc) {
977 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
978 return rc;
979 }
980
981 if (!mCameraHandle) {
982 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
983 return -ENODEV;
984 }
985
986 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
987 camEvtHandle, (void *)this);
988
989 if (rc < 0) {
990 LOGE("Error, failed to register event callback");
991 /* Not closing camera here since it is already handled in destructor */
992 return FAILED_TRANSACTION;
993 }
994
995 mExifParams.debug_params =
996 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
997 if (mExifParams.debug_params) {
998 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
999 } else {
1000 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
1001 return NO_MEMORY;
1002 }
1003 mFirstConfiguration = true;
1004
1005 //Notify display HAL that a camera session is active.
1006 //But avoid calling the same during bootup because camera service might open/close
1007 //cameras at boot time during its initialization and display service will also internally
1008 //wait for camera service to initialize first while calling this display API, resulting in a
1009 //deadlock situation. Since boot time camera open/close calls are made only to fetch
1010 //capabilities, no need of this display bw optimization.
1011 //Use "service.bootanim.exit" property to know boot status.
1012 property_get("service.bootanim.exit", value, "0");
1013 if (atoi(value) == 1) {
1014 pthread_mutex_lock(&gCamLock);
1015 if (gNumCameraSessions++ == 0) {
1016 setCameraLaunchStatus(true);
1017 }
1018 pthread_mutex_unlock(&gCamLock);
1019 }
1020
1021 //fill the session id needed while linking dual cam
1022 pthread_mutex_lock(&gCamLock);
1023 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1024 &sessionId[mCameraId]);
1025 pthread_mutex_unlock(&gCamLock);
1026
1027 if (rc < 0) {
1028 LOGE("Error, failed to get sessiion id");
1029 return UNKNOWN_ERROR;
1030 } else {
1031 //Allocate related cam sync buffer
1032 //this is needed for the payload that goes along with bundling cmd for related
1033 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001034 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1035 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001036 if(rc != OK) {
1037 rc = NO_MEMORY;
1038 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1039 return NO_MEMORY;
1040 }
1041
1042 //Map memory for related cam sync buffer
1043 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001044 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1045 m_pDualCamCmdHeap->getFd(0),
1046 sizeof(cam_dual_camera_cmd_info_t),
1047 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001048 if(rc < 0) {
1049 LOGE("Dualcam: failed to map Related cam sync buffer");
1050 rc = FAILED_TRANSACTION;
1051 return NO_MEMORY;
1052 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001053 m_pDualCamCmdPtr =
1054 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001055 }
1056
1057 LOGH("mCameraId=%d",mCameraId);
1058
1059 return NO_ERROR;
1060}
1061
1062/*===========================================================================
1063 * FUNCTION : closeCamera
1064 *
1065 * DESCRIPTION: close camera
1066 *
1067 * PARAMETERS : none
1068 *
1069 * RETURN : int32_t type of status
1070 * NO_ERROR -- success
1071 * none-zero failure code
1072 *==========================================================================*/
1073int QCamera3HardwareInterface::closeCamera()
1074{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001075 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001076 int rc = NO_ERROR;
1077 char value[PROPERTY_VALUE_MAX];
1078
1079 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1080 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001081
1082 // unmap memory for related cam sync buffer
1083 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001084 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001085 if (NULL != m_pDualCamCmdHeap) {
1086 m_pDualCamCmdHeap->deallocate();
1087 delete m_pDualCamCmdHeap;
1088 m_pDualCamCmdHeap = NULL;
1089 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001090 }
1091
Thierry Strudel3d639192016-09-09 11:52:26 -07001092 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1093 mCameraHandle = NULL;
1094
1095 //reset session id to some invalid id
1096 pthread_mutex_lock(&gCamLock);
1097 sessionId[mCameraId] = 0xDEADBEEF;
1098 pthread_mutex_unlock(&gCamLock);
1099
1100 //Notify display HAL that there is no active camera session
1101 //but avoid calling the same during bootup. Refer to openCamera
1102 //for more details.
1103 property_get("service.bootanim.exit", value, "0");
1104 if (atoi(value) == 1) {
1105 pthread_mutex_lock(&gCamLock);
1106 if (--gNumCameraSessions == 0) {
1107 setCameraLaunchStatus(false);
1108 }
1109 pthread_mutex_unlock(&gCamLock);
1110 }
1111
Thierry Strudel3d639192016-09-09 11:52:26 -07001112 if (mExifParams.debug_params) {
1113 free(mExifParams.debug_params);
1114 mExifParams.debug_params = NULL;
1115 }
1116 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1117 LOGW("Failed to release flash for camera id: %d",
1118 mCameraId);
1119 }
1120 mState = CLOSED;
1121 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1122 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001123
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001124 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001125 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001126 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001127 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001128 if (rc != 0) {
1129 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1130 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001131 }
1132 }
1133
Thierry Strudel3d639192016-09-09 11:52:26 -07001134 return rc;
1135}
1136
1137/*===========================================================================
1138 * FUNCTION : initialize
1139 *
1140 * DESCRIPTION: Initialize frameworks callback functions
1141 *
1142 * PARAMETERS :
1143 * @callback_ops : callback function to frameworks
1144 *
1145 * RETURN :
1146 *
1147 *==========================================================================*/
1148int QCamera3HardwareInterface::initialize(
1149 const struct camera3_callback_ops *callback_ops)
1150{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001151 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001152 int rc;
1153
1154 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1155 pthread_mutex_lock(&mMutex);
1156
1157 // Validate current state
1158 switch (mState) {
1159 case OPENED:
1160 /* valid state */
1161 break;
1162 default:
1163 LOGE("Invalid state %d", mState);
1164 rc = -ENODEV;
1165 goto err1;
1166 }
1167
1168 rc = initParameters();
1169 if (rc < 0) {
1170 LOGE("initParamters failed %d", rc);
1171 goto err1;
1172 }
1173 mCallbackOps = callback_ops;
1174
1175 mChannelHandle = mCameraHandle->ops->add_channel(
1176 mCameraHandle->camera_handle, NULL, NULL, this);
1177 if (mChannelHandle == 0) {
1178 LOGE("add_channel failed");
1179 rc = -ENOMEM;
1180 pthread_mutex_unlock(&mMutex);
1181 return rc;
1182 }
1183
1184 pthread_mutex_unlock(&mMutex);
1185 mCameraInitialized = true;
1186 mState = INITIALIZED;
1187 LOGI("X");
1188 return 0;
1189
1190err1:
1191 pthread_mutex_unlock(&mMutex);
1192 return rc;
1193}
1194
1195/*===========================================================================
1196 * FUNCTION : validateStreamDimensions
1197 *
1198 * DESCRIPTION: Check if the configuration requested are those advertised
1199 *
1200 * PARAMETERS :
1201 * @stream_list : streams to be configured
1202 *
1203 * RETURN :
1204 *
1205 *==========================================================================*/
1206int QCamera3HardwareInterface::validateStreamDimensions(
1207 camera3_stream_configuration_t *streamList)
1208{
1209 int rc = NO_ERROR;
1210 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001211 uint32_t depthWidth = 0;
1212 uint32_t depthHeight = 0;
1213 if (mPDSupported) {
1214 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1215 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1216 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001217
1218 camera3_stream_t *inputStream = NULL;
1219 /*
1220 * Loop through all streams to find input stream if it exists*
1221 */
1222 for (size_t i = 0; i< streamList->num_streams; i++) {
1223 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1224 if (inputStream != NULL) {
1225 LOGE("Error, Multiple input streams requested");
1226 return -EINVAL;
1227 }
1228 inputStream = streamList->streams[i];
1229 }
1230 }
1231 /*
1232 * Loop through all streams requested in configuration
1233 * Check if unsupported sizes have been requested on any of them
1234 */
1235 for (size_t j = 0; j < streamList->num_streams; j++) {
1236 bool sizeFound = false;
1237 camera3_stream_t *newStream = streamList->streams[j];
1238
1239 uint32_t rotatedHeight = newStream->height;
1240 uint32_t rotatedWidth = newStream->width;
1241 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1242 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1243 rotatedHeight = newStream->width;
1244 rotatedWidth = newStream->height;
1245 }
1246
1247 /*
1248 * Sizes are different for each type of stream format check against
1249 * appropriate table.
1250 */
1251 switch (newStream->format) {
1252 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1253 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1254 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001255 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1256 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1257 mPDSupported) {
1258 if ((depthWidth == newStream->width) &&
1259 (depthHeight == newStream->height)) {
1260 sizeFound = true;
1261 }
1262 break;
1263 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001264 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1265 for (size_t i = 0; i < count; i++) {
1266 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1267 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1268 sizeFound = true;
1269 break;
1270 }
1271 }
1272 break;
1273 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001274 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1275 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001276 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001277 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001278 if ((depthSamplesCount == newStream->width) &&
1279 (1 == newStream->height)) {
1280 sizeFound = true;
1281 }
1282 break;
1283 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001284 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1285 /* Verify set size against generated sizes table */
1286 for (size_t i = 0; i < count; i++) {
1287 if (((int32_t)rotatedWidth ==
1288 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1289 ((int32_t)rotatedHeight ==
1290 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1291 sizeFound = true;
1292 break;
1293 }
1294 }
1295 break;
1296 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1297 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1298 default:
1299 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1300 || newStream->stream_type == CAMERA3_STREAM_INPUT
1301 || IS_USAGE_ZSL(newStream->usage)) {
1302 if (((int32_t)rotatedWidth ==
1303 gCamCapability[mCameraId]->active_array_size.width) &&
1304 ((int32_t)rotatedHeight ==
1305 gCamCapability[mCameraId]->active_array_size.height)) {
1306 sizeFound = true;
1307 break;
1308 }
1309 /* We could potentially break here to enforce ZSL stream
1310 * set from frameworks always is full active array size
1311 * but it is not clear from the spc if framework will always
1312 * follow that, also we have logic to override to full array
1313 * size, so keeping the logic lenient at the moment
1314 */
1315 }
1316 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1317 MAX_SIZES_CNT);
1318 for (size_t i = 0; i < count; i++) {
1319 if (((int32_t)rotatedWidth ==
1320 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1321 ((int32_t)rotatedHeight ==
1322 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1323 sizeFound = true;
1324 break;
1325 }
1326 }
1327 break;
1328 } /* End of switch(newStream->format) */
1329
1330 /* We error out even if a single stream has unsupported size set */
1331 if (!sizeFound) {
1332 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1333 rotatedWidth, rotatedHeight, newStream->format,
1334 gCamCapability[mCameraId]->active_array_size.width,
1335 gCamCapability[mCameraId]->active_array_size.height);
1336 rc = -EINVAL;
1337 break;
1338 }
1339 } /* End of for each stream */
1340 return rc;
1341}
1342
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001343/*===========================================================================
1344 * FUNCTION : validateUsageFlags
1345 *
1346 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1347 *
1348 * PARAMETERS :
1349 * @stream_list : streams to be configured
1350 *
1351 * RETURN :
1352 * NO_ERROR if the usage flags are supported
1353 * error code if usage flags are not supported
1354 *
1355 *==========================================================================*/
1356int QCamera3HardwareInterface::validateUsageFlags(
1357 const camera3_stream_configuration_t* streamList)
1358{
1359 for (size_t j = 0; j < streamList->num_streams; j++) {
1360 const camera3_stream_t *newStream = streamList->streams[j];
1361
1362 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1363 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1364 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1365 continue;
1366 }
1367
Jason Leec4cf5032017-05-24 18:31:41 -07001368 // Here we only care whether it's EIS3 or not
1369 char is_type_value[PROPERTY_VALUE_MAX];
1370 property_get("persist.camera.is_type", is_type_value, "4");
1371 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1372 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1373 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1374 isType = IS_TYPE_NONE;
1375
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001376 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1377 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1378 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1379 bool forcePreviewUBWC = true;
1380 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1381 forcePreviewUBWC = false;
1382 }
1383 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001384 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001385 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001386 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001387 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001388 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001389
1390 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1391 // So color spaces will always match.
1392
1393 // Check whether underlying formats of shared streams match.
1394 if (isVideo && isPreview && videoFormat != previewFormat) {
1395 LOGE("Combined video and preview usage flag is not supported");
1396 return -EINVAL;
1397 }
1398 if (isPreview && isZSL && previewFormat != zslFormat) {
1399 LOGE("Combined preview and zsl usage flag is not supported");
1400 return -EINVAL;
1401 }
1402 if (isVideo && isZSL && videoFormat != zslFormat) {
1403 LOGE("Combined video and zsl usage flag is not supported");
1404 return -EINVAL;
1405 }
1406 }
1407 return NO_ERROR;
1408}
1409
1410/*===========================================================================
1411 * FUNCTION : validateUsageFlagsForEis
1412 *
1413 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1414 *
1415 * PARAMETERS :
1416 * @stream_list : streams to be configured
1417 *
1418 * RETURN :
1419 * NO_ERROR if the usage flags are supported
1420 * error code if usage flags are not supported
1421 *
1422 *==========================================================================*/
1423int QCamera3HardwareInterface::validateUsageFlagsForEis(
1424 const camera3_stream_configuration_t* streamList)
1425{
1426 for (size_t j = 0; j < streamList->num_streams; j++) {
1427 const camera3_stream_t *newStream = streamList->streams[j];
1428
1429 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1430 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1431
1432 // Because EIS is "hard-coded" for certain use case, and current
1433 // implementation doesn't support shared preview and video on the same
1434 // stream, return failure if EIS is forced on.
1435 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1436 LOGE("Combined video and preview usage flag is not supported due to EIS");
1437 return -EINVAL;
1438 }
1439 }
1440 return NO_ERROR;
1441}
1442
Thierry Strudel3d639192016-09-09 11:52:26 -07001443/*==============================================================================
1444 * FUNCTION : isSupportChannelNeeded
1445 *
1446 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1447 *
1448 * PARAMETERS :
1449 * @stream_list : streams to be configured
1450 * @stream_config_info : the config info for streams to be configured
1451 *
1452 * RETURN : Boolen true/false decision
1453 *
1454 *==========================================================================*/
1455bool QCamera3HardwareInterface::isSupportChannelNeeded(
1456 camera3_stream_configuration_t *streamList,
1457 cam_stream_size_info_t stream_config_info)
1458{
1459 uint32_t i;
1460 bool pprocRequested = false;
1461 /* Check for conditions where PProc pipeline does not have any streams*/
1462 for (i = 0; i < stream_config_info.num_streams; i++) {
1463 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1464 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1465 pprocRequested = true;
1466 break;
1467 }
1468 }
1469
1470 if (pprocRequested == false )
1471 return true;
1472
1473 /* Dummy stream needed if only raw or jpeg streams present */
1474 for (i = 0; i < streamList->num_streams; i++) {
1475 switch(streamList->streams[i]->format) {
1476 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1477 case HAL_PIXEL_FORMAT_RAW10:
1478 case HAL_PIXEL_FORMAT_RAW16:
1479 case HAL_PIXEL_FORMAT_BLOB:
1480 break;
1481 default:
1482 return false;
1483 }
1484 }
1485 return true;
1486}
1487
1488/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001489 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001490 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001491 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001492 *
1493 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001494 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001495 *
1496 * RETURN : int32_t type of status
1497 * NO_ERROR -- success
1498 * none-zero failure code
1499 *
1500 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001501int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001502{
1503 int32_t rc = NO_ERROR;
1504
1505 cam_dimension_t max_dim = {0, 0};
1506 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1507 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1508 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1509 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1510 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1511 }
1512
1513 clear_metadata_buffer(mParameters);
1514
1515 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1516 max_dim);
1517 if (rc != NO_ERROR) {
1518 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1519 return rc;
1520 }
1521
1522 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1523 if (rc != NO_ERROR) {
1524 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1525 return rc;
1526 }
1527
1528 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001529 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001530
1531 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1532 mParameters);
1533 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001534 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001535 return rc;
1536 }
1537
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001538 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001539 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1540 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1541 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1542 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1543 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001544
1545 return rc;
1546}
1547
1548/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001549 * FUNCTION : getCurrentSensorModeInfo
1550 *
1551 * DESCRIPTION: Get sensor mode information that is currently selected.
1552 *
1553 * PARAMETERS :
1554 * @sensorModeInfo : sensor mode information (output)
1555 *
1556 * RETURN : int32_t type of status
1557 * NO_ERROR -- success
1558 * none-zero failure code
1559 *
1560 *==========================================================================*/
1561int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1562{
1563 int32_t rc = NO_ERROR;
1564
1565 clear_metadata_buffer(mParameters);
1566 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1567
1568 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1569 mParameters);
1570 if (rc != NO_ERROR) {
1571 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1572 return rc;
1573 }
1574
1575 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1576 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1577 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1578 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1579 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1580 sensorModeInfo.num_raw_bits);
1581
1582 return rc;
1583}
1584
1585/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001586 * FUNCTION : addToPPFeatureMask
1587 *
1588 * DESCRIPTION: add additional features to pp feature mask based on
1589 * stream type and usecase
1590 *
1591 * PARAMETERS :
1592 * @stream_format : stream type for feature mask
1593 * @stream_idx : stream idx within postprocess_mask list to change
1594 *
1595 * RETURN : NULL
1596 *
1597 *==========================================================================*/
1598void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1599 uint32_t stream_idx)
1600{
1601 char feature_mask_value[PROPERTY_VALUE_MAX];
1602 cam_feature_mask_t feature_mask;
1603 int args_converted;
1604 int property_len;
1605
1606 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001607#ifdef _LE_CAMERA_
1608 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1609 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1610 property_len = property_get("persist.camera.hal3.feature",
1611 feature_mask_value, swtnr_feature_mask_value);
1612#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001613 property_len = property_get("persist.camera.hal3.feature",
1614 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001615#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001616 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1617 (feature_mask_value[1] == 'x')) {
1618 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1619 } else {
1620 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1621 }
1622 if (1 != args_converted) {
1623 feature_mask = 0;
1624 LOGE("Wrong feature mask %s", feature_mask_value);
1625 return;
1626 }
1627
1628 switch (stream_format) {
1629 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1630 /* Add LLVD to pp feature mask only if video hint is enabled */
1631 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1632 mStreamConfigInfo.postprocess_mask[stream_idx]
1633 |= CAM_QTI_FEATURE_SW_TNR;
1634 LOGH("Added SW TNR to pp feature mask");
1635 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1636 mStreamConfigInfo.postprocess_mask[stream_idx]
1637 |= CAM_QCOM_FEATURE_LLVD;
1638 LOGH("Added LLVD SeeMore to pp feature mask");
1639 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001640 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1641 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1642 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1643 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001644 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1645 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1646 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1647 CAM_QTI_FEATURE_BINNING_CORRECTION;
1648 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001649 break;
1650 }
1651 default:
1652 break;
1653 }
1654 LOGD("PP feature mask %llx",
1655 mStreamConfigInfo.postprocess_mask[stream_idx]);
1656}
1657
1658/*==============================================================================
1659 * FUNCTION : updateFpsInPreviewBuffer
1660 *
1661 * DESCRIPTION: update FPS information in preview buffer.
1662 *
1663 * PARAMETERS :
1664 * @metadata : pointer to metadata buffer
1665 * @frame_number: frame_number to look for in pending buffer list
1666 *
1667 * RETURN : None
1668 *
1669 *==========================================================================*/
1670void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1671 uint32_t frame_number)
1672{
1673 // Mark all pending buffers for this particular request
1674 // with corresponding framerate information
1675 for (List<PendingBuffersInRequest>::iterator req =
1676 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1677 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1678 for(List<PendingBufferInfo>::iterator j =
1679 req->mPendingBufferList.begin();
1680 j != req->mPendingBufferList.end(); j++) {
1681 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1682 if ((req->frame_number == frame_number) &&
1683 (channel->getStreamTypeMask() &
1684 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1685 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1686 CAM_INTF_PARM_FPS_RANGE, metadata) {
1687 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1688 struct private_handle_t *priv_handle =
1689 (struct private_handle_t *)(*(j->buffer));
1690 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1691 }
1692 }
1693 }
1694 }
1695}
1696
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001697/*==============================================================================
1698 * FUNCTION : updateTimeStampInPendingBuffers
1699 *
1700 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1701 * of a frame number
1702 *
1703 * PARAMETERS :
1704 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1705 * @timestamp : timestamp to be set
1706 *
1707 * RETURN : None
1708 *
1709 *==========================================================================*/
1710void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1711 uint32_t frameNumber, nsecs_t timestamp)
1712{
1713 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1714 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
Binhao Lin09245482017-08-31 18:25:29 -07001715 // WAR: save the av_timestamp to the next frame
1716 if(req->frame_number == frameNumber + 1) {
1717 req->av_timestamp = timestamp;
1718 }
1719
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001720 if (req->frame_number != frameNumber)
1721 continue;
1722
1723 for (auto k = req->mPendingBufferList.begin();
1724 k != req->mPendingBufferList.end(); k++ ) {
Binhao Lin09245482017-08-31 18:25:29 -07001725 // WAR: update timestamp when it's not VT usecase
1726 QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
1727 if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
1728 m_bAVTimerEnabled)) {
1729 struct private_handle_t *priv_handle =
1730 (struct private_handle_t *) (*(k->buffer));
1731 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1732 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001733 }
1734 }
1735 return;
1736}
1737
Thierry Strudel3d639192016-09-09 11:52:26 -07001738/*===========================================================================
1739 * FUNCTION : configureStreams
1740 *
1741 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1742 * and output streams.
1743 *
1744 * PARAMETERS :
1745 * @stream_list : streams to be configured
1746 *
1747 * RETURN :
1748 *
1749 *==========================================================================*/
1750int QCamera3HardwareInterface::configureStreams(
1751 camera3_stream_configuration_t *streamList)
1752{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001753 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001754 int rc = 0;
1755
1756 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001757 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001758 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001759 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001760
1761 return rc;
1762}
1763
1764/*===========================================================================
1765 * FUNCTION : configureStreamsPerfLocked
1766 *
1767 * DESCRIPTION: configureStreams while perfLock is held.
1768 *
1769 * PARAMETERS :
1770 * @stream_list : streams to be configured
1771 *
1772 * RETURN : int32_t type of status
1773 * NO_ERROR -- success
1774 * none-zero failure code
1775 *==========================================================================*/
1776int QCamera3HardwareInterface::configureStreamsPerfLocked(
1777 camera3_stream_configuration_t *streamList)
1778{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001779 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001780 int rc = 0;
1781
1782 // Sanity check stream_list
1783 if (streamList == NULL) {
1784 LOGE("NULL stream configuration");
1785 return BAD_VALUE;
1786 }
1787 if (streamList->streams == NULL) {
1788 LOGE("NULL stream list");
1789 return BAD_VALUE;
1790 }
1791
1792 if (streamList->num_streams < 1) {
1793 LOGE("Bad number of streams requested: %d",
1794 streamList->num_streams);
1795 return BAD_VALUE;
1796 }
1797
1798 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1799 LOGE("Maximum number of streams %d exceeded: %d",
1800 MAX_NUM_STREAMS, streamList->num_streams);
1801 return BAD_VALUE;
1802 }
1803
Jason Leec4cf5032017-05-24 18:31:41 -07001804 mOpMode = streamList->operation_mode;
1805 LOGD("mOpMode: %d", mOpMode);
1806
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001807 rc = validateUsageFlags(streamList);
1808 if (rc != NO_ERROR) {
1809 return rc;
1810 }
1811
Chien-Yu Chen11c8edc2017-09-11 20:54:24 -07001812 // Disable HDR+ if it's enabled;
Chien-Yu Chen153c5172017-09-08 11:33:19 -07001813 {
1814 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1815 finishHdrPlusClientOpeningLocked(l);
1816 disableHdrPlusModeLocked();
1817 }
1818
Thierry Strudel3d639192016-09-09 11:52:26 -07001819 /* first invalidate all the steams in the mStreamList
1820 * if they appear again, they will be validated */
1821 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1822 it != mStreamInfo.end(); it++) {
1823 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1824 if (channel) {
1825 channel->stop();
1826 }
1827 (*it)->status = INVALID;
1828 }
1829
1830 if (mRawDumpChannel) {
1831 mRawDumpChannel->stop();
1832 delete mRawDumpChannel;
1833 mRawDumpChannel = NULL;
1834 }
1835
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001836 if (mHdrPlusRawSrcChannel) {
1837 mHdrPlusRawSrcChannel->stop();
1838 delete mHdrPlusRawSrcChannel;
1839 mHdrPlusRawSrcChannel = NULL;
1840 }
1841
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 if (mSupportChannel)
1843 mSupportChannel->stop();
1844
1845 if (mAnalysisChannel) {
1846 mAnalysisChannel->stop();
1847 }
1848 if (mMetadataChannel) {
1849 /* If content of mStreamInfo is not 0, there is metadata stream */
1850 mMetadataChannel->stop();
1851 }
1852 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07001853 stopChannelLocked(/*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001854 }
1855
1856 pthread_mutex_lock(&mMutex);
1857
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001858 mPictureChannel = NULL;
1859
Thierry Strudel3d639192016-09-09 11:52:26 -07001860 // Check state
1861 switch (mState) {
1862 case INITIALIZED:
1863 case CONFIGURED:
1864 case STARTED:
1865 /* valid state */
1866 break;
1867 default:
1868 LOGE("Invalid state %d", mState);
1869 pthread_mutex_unlock(&mMutex);
1870 return -ENODEV;
1871 }
1872
1873 /* Check whether we have video stream */
1874 m_bIs4KVideo = false;
1875 m_bIsVideo = false;
1876 m_bEisSupportedSize = false;
1877 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001878 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001879 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001880 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001881 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001882 uint32_t videoWidth = 0U;
1883 uint32_t videoHeight = 0U;
1884 size_t rawStreamCnt = 0;
1885 size_t stallStreamCnt = 0;
1886 size_t processedStreamCnt = 0;
1887 // Number of streams on ISP encoder path
1888 size_t numStreamsOnEncoder = 0;
1889 size_t numYuv888OnEncoder = 0;
1890 bool bYuv888OverrideJpeg = false;
1891 cam_dimension_t largeYuv888Size = {0, 0};
1892 cam_dimension_t maxViewfinderSize = {0, 0};
1893 bool bJpegExceeds4K = false;
1894 bool bJpegOnEncoder = false;
1895 bool bUseCommonFeatureMask = false;
1896 cam_feature_mask_t commonFeatureMask = 0;
1897 bool bSmallJpegSize = false;
1898 uint32_t width_ratio;
1899 uint32_t height_ratio;
1900 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1901 camera3_stream_t *inputStream = NULL;
1902 bool isJpeg = false;
1903 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001904 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001905 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001906
1907 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1908
1909 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001910 uint8_t eis_prop_set;
1911 uint32_t maxEisWidth = 0;
1912 uint32_t maxEisHeight = 0;
1913
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001914 // Initialize all instant AEC related variables
1915 mInstantAEC = false;
1916 mResetInstantAEC = false;
1917 mInstantAECSettledFrameNumber = 0;
1918 mAecSkipDisplayFrameBound = 0;
1919 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001920 mCurrFeatureState = 0;
1921 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001922
Binhao Lin09245482017-08-31 18:25:29 -07001923 m_bAVTimerEnabled = false;
1924
Thierry Strudel3d639192016-09-09 11:52:26 -07001925 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1926
1927 size_t count = IS_TYPE_MAX;
1928 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1929 for (size_t i = 0; i < count; i++) {
1930 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001931 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1932 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 break;
1934 }
1935 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001936
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001937 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001938 maxEisWidth = MAX_EIS_WIDTH;
1939 maxEisHeight = MAX_EIS_HEIGHT;
1940 }
1941
1942 /* EIS setprop control */
1943 char eis_prop[PROPERTY_VALUE_MAX];
1944 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001945 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001946 eis_prop_set = (uint8_t)atoi(eis_prop);
1947
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001948 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001949 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1950
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001951 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1952 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001953
Thierry Strudel3d639192016-09-09 11:52:26 -07001954 /* stream configurations */
1955 for (size_t i = 0; i < streamList->num_streams; i++) {
1956 camera3_stream_t *newStream = streamList->streams[i];
1957 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1958 "height = %d, rotation = %d, usage = 0x%x",
1959 i, newStream->stream_type, newStream->format,
1960 newStream->width, newStream->height, newStream->rotation,
1961 newStream->usage);
1962 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1963 newStream->stream_type == CAMERA3_STREAM_INPUT){
1964 isZsl = true;
1965 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001966 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1967 IS_USAGE_PREVIEW(newStream->usage)) {
1968 isPreview = true;
1969 }
1970
Thierry Strudel3d639192016-09-09 11:52:26 -07001971 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1972 inputStream = newStream;
1973 }
1974
Emilian Peev7650c122017-01-19 08:24:33 -08001975 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1976 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001977 isJpeg = true;
1978 jpegSize.width = newStream->width;
1979 jpegSize.height = newStream->height;
1980 if (newStream->width > VIDEO_4K_WIDTH ||
1981 newStream->height > VIDEO_4K_HEIGHT)
1982 bJpegExceeds4K = true;
1983 }
1984
1985 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1986 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1987 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001988 // In HAL3 we can have multiple different video streams.
1989 // The variables video width and height are used below as
1990 // dimensions of the biggest of them
1991 if (videoWidth < newStream->width ||
1992 videoHeight < newStream->height) {
1993 videoWidth = newStream->width;
1994 videoHeight = newStream->height;
1995 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001996 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1997 (VIDEO_4K_HEIGHT <= newStream->height)) {
1998 m_bIs4KVideo = true;
1999 }
2000 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
2001 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002002
Thierry Strudel3d639192016-09-09 11:52:26 -07002003 }
2004 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
2005 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
2006 switch (newStream->format) {
2007 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002008 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2009 depthPresent = true;
2010 break;
2011 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002012 stallStreamCnt++;
2013 if (isOnEncoder(maxViewfinderSize, newStream->width,
2014 newStream->height)) {
2015 numStreamsOnEncoder++;
2016 bJpegOnEncoder = true;
2017 }
2018 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
2019 newStream->width);
2020 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2021 newStream->height);;
2022 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2023 "FATAL: max_downscale_factor cannot be zero and so assert");
2024 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2025 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2026 LOGH("Setting small jpeg size flag to true");
2027 bSmallJpegSize = true;
2028 }
2029 break;
2030 case HAL_PIXEL_FORMAT_RAW10:
2031 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2032 case HAL_PIXEL_FORMAT_RAW16:
2033 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002034 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2035 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2036 pdStatCount++;
2037 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002038 break;
2039 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2040 processedStreamCnt++;
2041 if (isOnEncoder(maxViewfinderSize, newStream->width,
2042 newStream->height)) {
2043 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2044 !IS_USAGE_ZSL(newStream->usage)) {
2045 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2046 }
2047 numStreamsOnEncoder++;
2048 }
2049 break;
2050 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2051 processedStreamCnt++;
2052 if (isOnEncoder(maxViewfinderSize, newStream->width,
2053 newStream->height)) {
2054 // If Yuv888 size is not greater than 4K, set feature mask
2055 // to SUPERSET so that it support concurrent request on
2056 // YUV and JPEG.
2057 if (newStream->width <= VIDEO_4K_WIDTH &&
2058 newStream->height <= VIDEO_4K_HEIGHT) {
2059 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2060 }
2061 numStreamsOnEncoder++;
2062 numYuv888OnEncoder++;
2063 largeYuv888Size.width = newStream->width;
2064 largeYuv888Size.height = newStream->height;
2065 }
2066 break;
2067 default:
2068 processedStreamCnt++;
2069 if (isOnEncoder(maxViewfinderSize, newStream->width,
2070 newStream->height)) {
2071 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2072 numStreamsOnEncoder++;
2073 }
2074 break;
2075 }
2076
2077 }
2078 }
2079
2080 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2081 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2082 !m_bIsVideo) {
2083 m_bEisEnable = false;
2084 }
2085
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002086 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2087 pthread_mutex_unlock(&mMutex);
2088 return -EINVAL;
2089 }
2090
Thierry Strudel54dc9782017-02-15 12:12:10 -08002091 uint8_t forceEnableTnr = 0;
2092 char tnr_prop[PROPERTY_VALUE_MAX];
2093 memset(tnr_prop, 0, sizeof(tnr_prop));
2094 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2095 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2096
Thierry Strudel3d639192016-09-09 11:52:26 -07002097 /* Logic to enable/disable TNR based on specific config size/etc.*/
2098 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002099 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2100 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002101 else if (forceEnableTnr)
2102 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002103
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002104 char videoHdrProp[PROPERTY_VALUE_MAX];
2105 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2106 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2107 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2108
2109 if (hdr_mode_prop == 1 && m_bIsVideo &&
2110 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2111 m_bVideoHdrEnabled = true;
2112 else
2113 m_bVideoHdrEnabled = false;
2114
2115
Thierry Strudel3d639192016-09-09 11:52:26 -07002116 /* Check if num_streams is sane */
2117 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2118 rawStreamCnt > MAX_RAW_STREAMS ||
2119 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2120 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2121 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2122 pthread_mutex_unlock(&mMutex);
2123 return -EINVAL;
2124 }
2125 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002126 if (isZsl && m_bIs4KVideo) {
2127 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 }
2131 /* Check if stream sizes are sane */
2132 if (numStreamsOnEncoder > 2) {
2133 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2134 pthread_mutex_unlock(&mMutex);
2135 return -EINVAL;
2136 } else if (1 < numStreamsOnEncoder){
2137 bUseCommonFeatureMask = true;
2138 LOGH("Multiple streams above max viewfinder size, common mask needed");
2139 }
2140
2141 /* Check if BLOB size is greater than 4k in 4k recording case */
2142 if (m_bIs4KVideo && bJpegExceeds4K) {
2143 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2144 pthread_mutex_unlock(&mMutex);
2145 return -EINVAL;
2146 }
2147
Emilian Peev7650c122017-01-19 08:24:33 -08002148 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2149 depthPresent) {
2150 LOGE("HAL doesn't support depth streams in HFR mode!");
2151 pthread_mutex_unlock(&mMutex);
2152 return -EINVAL;
2153 }
2154
Thierry Strudel3d639192016-09-09 11:52:26 -07002155 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2156 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2157 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2158 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2159 // configurations:
2160 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2161 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2162 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2163 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2164 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2165 __func__);
2166 pthread_mutex_unlock(&mMutex);
2167 return -EINVAL;
2168 }
2169
2170 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2171 // the YUV stream's size is greater or equal to the JPEG size, set common
2172 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2173 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2174 jpegSize.width, jpegSize.height) &&
2175 largeYuv888Size.width > jpegSize.width &&
2176 largeYuv888Size.height > jpegSize.height) {
2177 bYuv888OverrideJpeg = true;
2178 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2179 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2180 }
2181
2182 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2183 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2184 commonFeatureMask);
2185 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2186 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2187
2188 rc = validateStreamDimensions(streamList);
2189 if (rc == NO_ERROR) {
2190 rc = validateStreamRotations(streamList);
2191 }
2192 if (rc != NO_ERROR) {
2193 LOGE("Invalid stream configuration requested!");
2194 pthread_mutex_unlock(&mMutex);
2195 return rc;
2196 }
2197
Emilian Peev0f3c3162017-03-15 12:57:46 +00002198 if (1 < pdStatCount) {
2199 LOGE("HAL doesn't support multiple PD streams");
2200 pthread_mutex_unlock(&mMutex);
2201 return -EINVAL;
2202 }
2203
2204 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2205 (1 == pdStatCount)) {
2206 LOGE("HAL doesn't support PD streams in HFR mode!");
2207 pthread_mutex_unlock(&mMutex);
2208 return -EINVAL;
2209 }
2210
Thierry Strudel3d639192016-09-09 11:52:26 -07002211 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2212 for (size_t i = 0; i < streamList->num_streams; i++) {
2213 camera3_stream_t *newStream = streamList->streams[i];
2214 LOGH("newStream type = %d, stream format = %d "
2215 "stream size : %d x %d, stream rotation = %d",
2216 newStream->stream_type, newStream->format,
2217 newStream->width, newStream->height, newStream->rotation);
2218 //if the stream is in the mStreamList validate it
2219 bool stream_exists = false;
2220 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2221 it != mStreamInfo.end(); it++) {
2222 if ((*it)->stream == newStream) {
2223 QCamera3ProcessingChannel *channel =
2224 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2225 stream_exists = true;
2226 if (channel)
2227 delete channel;
2228 (*it)->status = VALID;
2229 (*it)->stream->priv = NULL;
2230 (*it)->channel = NULL;
2231 }
2232 }
2233 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2234 //new stream
2235 stream_info_t* stream_info;
2236 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2237 if (!stream_info) {
2238 LOGE("Could not allocate stream info");
2239 rc = -ENOMEM;
2240 pthread_mutex_unlock(&mMutex);
2241 return rc;
2242 }
2243 stream_info->stream = newStream;
2244 stream_info->status = VALID;
2245 stream_info->channel = NULL;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07002246 stream_info->id = i;
Thierry Strudel3d639192016-09-09 11:52:26 -07002247 mStreamInfo.push_back(stream_info);
2248 }
2249 /* Covers Opaque ZSL and API1 F/W ZSL */
2250 if (IS_USAGE_ZSL(newStream->usage)
2251 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2252 if (zslStream != NULL) {
2253 LOGE("Multiple input/reprocess streams requested!");
2254 pthread_mutex_unlock(&mMutex);
2255 return BAD_VALUE;
2256 }
2257 zslStream = newStream;
2258 }
2259 /* Covers YUV reprocess */
2260 if (inputStream != NULL) {
2261 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2262 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2263 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2264 && inputStream->width == newStream->width
2265 && inputStream->height == newStream->height) {
2266 if (zslStream != NULL) {
2267 /* This scenario indicates multiple YUV streams with same size
2268 * as input stream have been requested, since zsl stream handle
2269 * is solely use for the purpose of overriding the size of streams
2270 * which share h/w streams we will just make a guess here as to
2271 * which of the stream is a ZSL stream, this will be refactored
2272 * once we make generic logic for streams sharing encoder output
2273 */
2274 LOGH("Warning, Multiple ip/reprocess streams requested!");
2275 }
2276 zslStream = newStream;
2277 }
2278 }
2279 }
2280
2281 /* If a zsl stream is set, we know that we have configured at least one input or
2282 bidirectional stream */
2283 if (NULL != zslStream) {
2284 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2285 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2286 mInputStreamInfo.format = zslStream->format;
2287 mInputStreamInfo.usage = zslStream->usage;
2288 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2289 mInputStreamInfo.dim.width,
2290 mInputStreamInfo.dim.height,
2291 mInputStreamInfo.format, mInputStreamInfo.usage);
2292 }
2293
2294 cleanAndSortStreamInfo();
2295 if (mMetadataChannel) {
2296 delete mMetadataChannel;
2297 mMetadataChannel = NULL;
2298 }
2299 if (mSupportChannel) {
2300 delete mSupportChannel;
2301 mSupportChannel = NULL;
2302 }
2303
2304 if (mAnalysisChannel) {
2305 delete mAnalysisChannel;
2306 mAnalysisChannel = NULL;
2307 }
2308
2309 if (mDummyBatchChannel) {
2310 delete mDummyBatchChannel;
2311 mDummyBatchChannel = NULL;
2312 }
2313
Emilian Peev7650c122017-01-19 08:24:33 -08002314 if (mDepthChannel) {
2315 mDepthChannel = NULL;
2316 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002317 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002318
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002319 mShutterDispatcher.clear();
2320 mOutputBufferDispatcher.clear();
2321
Thierry Strudel2896d122017-02-23 19:18:03 -08002322 char is_type_value[PROPERTY_VALUE_MAX];
2323 property_get("persist.camera.is_type", is_type_value, "4");
2324 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2325
Binhao Line406f062017-05-03 14:39:44 -07002326 char property_value[PROPERTY_VALUE_MAX];
2327 property_get("persist.camera.gzoom.at", property_value, "0");
2328 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002329 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2330 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2331 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2332 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002333
2334 property_get("persist.camera.gzoom.4k", property_value, "0");
2335 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2336
Thierry Strudel3d639192016-09-09 11:52:26 -07002337 //Create metadata channel and initialize it
2338 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2339 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2340 gCamCapability[mCameraId]->color_arrangement);
2341 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2342 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002343 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002344 if (mMetadataChannel == NULL) {
2345 LOGE("failed to allocate metadata channel");
2346 rc = -ENOMEM;
2347 pthread_mutex_unlock(&mMutex);
2348 return rc;
2349 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002350 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002351 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2352 if (rc < 0) {
2353 LOGE("metadata channel initialization failed");
2354 delete mMetadataChannel;
2355 mMetadataChannel = NULL;
2356 pthread_mutex_unlock(&mMutex);
2357 return rc;
2358 }
2359
Thierry Strudel2896d122017-02-23 19:18:03 -08002360 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002361 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002362 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002363 // Keep track of preview/video streams indices.
2364 // There could be more than one preview streams, but only one video stream.
2365 int32_t video_stream_idx = -1;
2366 int32_t preview_stream_idx[streamList->num_streams];
2367 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002368 bool previewTnr[streamList->num_streams];
2369 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2370 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2371 // Loop through once to determine preview TNR conditions before creating channels.
2372 for (size_t i = 0; i < streamList->num_streams; i++) {
2373 camera3_stream_t *newStream = streamList->streams[i];
2374 uint32_t stream_usage = newStream->usage;
2375 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2376 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2377 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2378 video_stream_idx = (int32_t)i;
2379 else
2380 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2381 }
2382 }
2383 // By default, preview stream TNR is disabled.
2384 // Enable TNR to the preview stream if all conditions below are satisfied:
2385 // 1. preview resolution == video resolution.
2386 // 2. video stream TNR is enabled.
2387 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2388 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2389 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2390 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2391 if (m_bTnrEnabled && m_bTnrVideo &&
2392 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2393 video_stream->width == preview_stream->width &&
2394 video_stream->height == preview_stream->height) {
2395 previewTnr[preview_stream_idx[i]] = true;
2396 }
2397 }
2398
Thierry Strudel3d639192016-09-09 11:52:26 -07002399 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2400 /* Allocate channel objects for the requested streams */
2401 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002402
Thierry Strudel3d639192016-09-09 11:52:26 -07002403 camera3_stream_t *newStream = streamList->streams[i];
2404 uint32_t stream_usage = newStream->usage;
2405 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2406 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2407 struct camera_info *p_info = NULL;
2408 pthread_mutex_lock(&gCamLock);
2409 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2410 pthread_mutex_unlock(&gCamLock);
2411 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2412 || IS_USAGE_ZSL(newStream->usage)) &&
2413 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002414 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002415 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002416 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2417 if (bUseCommonFeatureMask)
2418 zsl_ppmask = commonFeatureMask;
2419 else
2420 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002421 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002422 if (numStreamsOnEncoder > 0)
2423 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2424 else
2425 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002426 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002427 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002428 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002429 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002430 LOGH("Input stream configured, reprocess config");
2431 } else {
2432 //for non zsl streams find out the format
2433 switch (newStream->format) {
2434 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2435 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002436 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2438 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2439 /* add additional features to pp feature mask */
2440 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2441 mStreamConfigInfo.num_streams);
2442
2443 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2444 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2445 CAM_STREAM_TYPE_VIDEO;
2446 if (m_bTnrEnabled && m_bTnrVideo) {
2447 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2448 CAM_QCOM_FEATURE_CPP_TNR;
2449 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2450 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2451 ~CAM_QCOM_FEATURE_CDS;
2452 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002453 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2454 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2455 CAM_QTI_FEATURE_PPEISCORE;
2456 }
Binhao Line406f062017-05-03 14:39:44 -07002457 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2458 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2459 CAM_QCOM_FEATURE_GOOG_ZOOM;
2460 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002461 } else {
2462 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2463 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002464 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002465 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2466 CAM_QCOM_FEATURE_CPP_TNR;
2467 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2468 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2469 ~CAM_QCOM_FEATURE_CDS;
2470 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002471 if(!m_bSwTnrPreview) {
2472 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2473 ~CAM_QTI_FEATURE_SW_TNR;
2474 }
Binhao Line406f062017-05-03 14:39:44 -07002475 if (is_goog_zoom_preview_enabled) {
2476 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2477 CAM_QCOM_FEATURE_GOOG_ZOOM;
2478 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002479 padding_info.width_padding = mSurfaceStridePadding;
2480 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002481 previewSize.width = (int32_t)newStream->width;
2482 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002483 }
2484 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2485 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2486 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2487 newStream->height;
2488 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2489 newStream->width;
2490 }
2491 }
2492 break;
2493 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002494 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002495 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2496 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2497 if (bUseCommonFeatureMask)
2498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2499 commonFeatureMask;
2500 else
2501 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2502 CAM_QCOM_FEATURE_NONE;
2503 } else {
2504 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2505 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2506 }
2507 break;
2508 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002509 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002510 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2511 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2512 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2513 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2514 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002515 /* Remove rotation if it is not supported
2516 for 4K LiveVideo snapshot case (online processing) */
2517 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2518 CAM_QCOM_FEATURE_ROTATION)) {
2519 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2520 &= ~CAM_QCOM_FEATURE_ROTATION;
2521 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002522 } else {
2523 if (bUseCommonFeatureMask &&
2524 isOnEncoder(maxViewfinderSize, newStream->width,
2525 newStream->height)) {
2526 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2527 } else {
2528 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2529 }
2530 }
2531 if (isZsl) {
2532 if (zslStream) {
2533 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2534 (int32_t)zslStream->width;
2535 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2536 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002537 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2538 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002539 } else {
2540 LOGE("Error, No ZSL stream identified");
2541 pthread_mutex_unlock(&mMutex);
2542 return -EINVAL;
2543 }
2544 } else if (m_bIs4KVideo) {
2545 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2546 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2547 } else if (bYuv888OverrideJpeg) {
2548 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2549 (int32_t)largeYuv888Size.width;
2550 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2551 (int32_t)largeYuv888Size.height;
2552 }
2553 break;
2554 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2555 case HAL_PIXEL_FORMAT_RAW16:
2556 case HAL_PIXEL_FORMAT_RAW10:
2557 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2558 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2559 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002560 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2561 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2562 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2563 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2564 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2565 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2566 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2567 gCamCapability[mCameraId]->dt[mPDIndex];
2568 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2569 gCamCapability[mCameraId]->vc[mPDIndex];
2570 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002571 break;
2572 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002573 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002574 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2575 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2576 break;
2577 }
2578 }
2579
2580 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2581 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2582 gCamCapability[mCameraId]->color_arrangement);
2583
2584 if (newStream->priv == NULL) {
2585 //New stream, construct channel
2586 switch (newStream->stream_type) {
2587 case CAMERA3_STREAM_INPUT:
2588 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2589 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2590 break;
2591 case CAMERA3_STREAM_BIDIRECTIONAL:
2592 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2593 GRALLOC_USAGE_HW_CAMERA_WRITE;
2594 break;
2595 case CAMERA3_STREAM_OUTPUT:
2596 /* For video encoding stream, set read/write rarely
2597 * flag so that they may be set to un-cached */
2598 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2599 newStream->usage |=
2600 (GRALLOC_USAGE_SW_READ_RARELY |
2601 GRALLOC_USAGE_SW_WRITE_RARELY |
2602 GRALLOC_USAGE_HW_CAMERA_WRITE);
2603 else if (IS_USAGE_ZSL(newStream->usage))
2604 {
2605 LOGD("ZSL usage flag skipping");
2606 }
2607 else if (newStream == zslStream
2608 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2609 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2610 } else
2611 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2612 break;
2613 default:
2614 LOGE("Invalid stream_type %d", newStream->stream_type);
2615 break;
2616 }
2617
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002618 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002619 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2620 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2621 QCamera3ProcessingChannel *channel = NULL;
2622 switch (newStream->format) {
2623 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2624 if ((newStream->usage &
2625 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2626 (streamList->operation_mode ==
2627 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2628 ) {
2629 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2630 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002631 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002632 this,
2633 newStream,
2634 (cam_stream_type_t)
2635 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2636 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2637 mMetadataChannel,
2638 0); //heap buffers are not required for HFR video channel
2639 if (channel == NULL) {
2640 LOGE("allocation of channel failed");
2641 pthread_mutex_unlock(&mMutex);
2642 return -ENOMEM;
2643 }
2644 //channel->getNumBuffers() will return 0 here so use
2645 //MAX_INFLIGH_HFR_REQUESTS
2646 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2647 newStream->priv = channel;
2648 LOGI("num video buffers in HFR mode: %d",
2649 MAX_INFLIGHT_HFR_REQUESTS);
2650 } else {
2651 /* Copy stream contents in HFR preview only case to create
2652 * dummy batch channel so that sensor streaming is in
2653 * HFR mode */
2654 if (!m_bIsVideo && (streamList->operation_mode ==
2655 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2656 mDummyBatchStream = *newStream;
2657 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002658 int bufferCount = MAX_INFLIGHT_REQUESTS;
2659 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2660 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002661 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2662 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2663 bufferCount = m_bIs4KVideo ?
2664 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2665 }
2666
Thierry Strudel2896d122017-02-23 19:18:03 -08002667 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002668 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2669 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002670 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002671 this,
2672 newStream,
2673 (cam_stream_type_t)
2674 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2675 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2676 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002677 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002678 if (channel == NULL) {
2679 LOGE("allocation of channel failed");
2680 pthread_mutex_unlock(&mMutex);
2681 return -ENOMEM;
2682 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002683 /* disable UBWC for preview, though supported,
2684 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002685 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002686 (previewSize.width == (int32_t)videoWidth)&&
2687 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002688 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002689 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002690 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002691 /* When goog_zoom is linked to the preview or video stream,
2692 * disable ubwc to the linked stream */
2693 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2694 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2695 channel->setUBWCEnabled(false);
2696 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002697 newStream->max_buffers = channel->getNumBuffers();
2698 newStream->priv = channel;
2699 }
2700 break;
2701 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2702 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2703 mChannelHandle,
2704 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002705 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 this,
2707 newStream,
2708 (cam_stream_type_t)
2709 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2710 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2711 mMetadataChannel);
2712 if (channel == NULL) {
2713 LOGE("allocation of YUV channel failed");
2714 pthread_mutex_unlock(&mMutex);
2715 return -ENOMEM;
2716 }
2717 newStream->max_buffers = channel->getNumBuffers();
2718 newStream->priv = channel;
2719 break;
2720 }
2721 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2722 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002723 case HAL_PIXEL_FORMAT_RAW10: {
2724 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2725 (HAL_DATASPACE_DEPTH != newStream->data_space))
2726 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002727 mRawChannel = new QCamera3RawChannel(
2728 mCameraHandle->camera_handle, mChannelHandle,
2729 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002730 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002731 this, newStream,
2732 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002733 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002734 if (mRawChannel == NULL) {
2735 LOGE("allocation of raw channel failed");
2736 pthread_mutex_unlock(&mMutex);
2737 return -ENOMEM;
2738 }
2739 newStream->max_buffers = mRawChannel->getNumBuffers();
2740 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2741 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002742 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002743 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002744 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2745 mDepthChannel = new QCamera3DepthChannel(
2746 mCameraHandle->camera_handle, mChannelHandle,
2747 mCameraHandle->ops, NULL, NULL, &padding_info,
2748 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2749 mMetadataChannel);
2750 if (NULL == mDepthChannel) {
2751 LOGE("Allocation of depth channel failed");
2752 pthread_mutex_unlock(&mMutex);
2753 return NO_MEMORY;
2754 }
2755 newStream->priv = mDepthChannel;
2756 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2757 } else {
2758 // Max live snapshot inflight buffer is 1. This is to mitigate
2759 // frame drop issues for video snapshot. The more buffers being
2760 // allocated, the more frame drops there are.
2761 mPictureChannel = new QCamera3PicChannel(
2762 mCameraHandle->camera_handle, mChannelHandle,
2763 mCameraHandle->ops, captureResultCb,
2764 setBufferErrorStatus, &padding_info, this, newStream,
2765 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2766 m_bIs4KVideo, isZsl, mMetadataChannel,
2767 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2768 if (mPictureChannel == NULL) {
2769 LOGE("allocation of channel failed");
2770 pthread_mutex_unlock(&mMutex);
2771 return -ENOMEM;
2772 }
2773 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2774 newStream->max_buffers = mPictureChannel->getNumBuffers();
2775 mPictureChannel->overrideYuvSize(
2776 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2777 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002778 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002779 break;
2780
2781 default:
2782 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002783 pthread_mutex_unlock(&mMutex);
2784 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002785 }
2786 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2787 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2788 } else {
2789 LOGE("Error, Unknown stream type");
2790 pthread_mutex_unlock(&mMutex);
2791 return -EINVAL;
2792 }
2793
2794 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002795 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002796 // Here we only care whether it's EIS3 or not
2797 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2798 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2799 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2800 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002801 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002802 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002803 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002804 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2805 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2806 }
2807 }
2808
2809 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2810 it != mStreamInfo.end(); it++) {
2811 if ((*it)->stream == newStream) {
2812 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2813 break;
2814 }
2815 }
2816 } else {
2817 // Channel already exists for this stream
2818 // Do nothing for now
2819 }
2820 padding_info = gCamCapability[mCameraId]->padding_info;
2821
Emilian Peev7650c122017-01-19 08:24:33 -08002822 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002823 * since there is no real stream associated with it
2824 */
Emilian Peev7650c122017-01-19 08:24:33 -08002825 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002826 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2827 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002828 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002829 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002830 }
2831
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002832 // Let buffer dispatcher know the configured streams.
2833 mOutputBufferDispatcher.configureStreams(streamList);
2834
Thierry Strudel2896d122017-02-23 19:18:03 -08002835 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2836 onlyRaw = false;
2837 }
2838
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002839 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002840 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002841 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002842 cam_analysis_info_t analysisInfo;
2843 int32_t ret = NO_ERROR;
2844 ret = mCommon.getAnalysisInfo(
2845 FALSE,
2846 analysisFeatureMask,
2847 &analysisInfo);
2848 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002849 cam_color_filter_arrangement_t analysis_color_arrangement =
2850 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2851 CAM_FILTER_ARRANGEMENT_Y :
2852 gCamCapability[mCameraId]->color_arrangement);
2853 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2854 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002855 cam_dimension_t analysisDim;
2856 analysisDim = mCommon.getMatchingDimension(previewSize,
2857 analysisInfo.analysis_recommended_res);
2858
2859 mAnalysisChannel = new QCamera3SupportChannel(
2860 mCameraHandle->camera_handle,
2861 mChannelHandle,
2862 mCameraHandle->ops,
2863 &analysisInfo.analysis_padding_info,
2864 analysisFeatureMask,
2865 CAM_STREAM_TYPE_ANALYSIS,
2866 &analysisDim,
2867 (analysisInfo.analysis_format
2868 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2869 : CAM_FORMAT_YUV_420_NV21),
2870 analysisInfo.hw_analysis_supported,
2871 gCamCapability[mCameraId]->color_arrangement,
2872 this,
2873 0); // force buffer count to 0
2874 } else {
2875 LOGW("getAnalysisInfo failed, ret = %d", ret);
2876 }
2877 if (!mAnalysisChannel) {
2878 LOGW("Analysis channel cannot be created");
2879 }
2880 }
2881
Thierry Strudel3d639192016-09-09 11:52:26 -07002882 //RAW DUMP channel
2883 if (mEnableRawDump && isRawStreamRequested == false){
2884 cam_dimension_t rawDumpSize;
2885 rawDumpSize = getMaxRawSize(mCameraId);
2886 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2887 setPAAFSupport(rawDumpFeatureMask,
2888 CAM_STREAM_TYPE_RAW,
2889 gCamCapability[mCameraId]->color_arrangement);
2890 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2891 mChannelHandle,
2892 mCameraHandle->ops,
2893 rawDumpSize,
2894 &padding_info,
2895 this, rawDumpFeatureMask);
2896 if (!mRawDumpChannel) {
2897 LOGE("Raw Dump channel cannot be created");
2898 pthread_mutex_unlock(&mMutex);
2899 return -ENOMEM;
2900 }
2901 }
2902
Thierry Strudel3d639192016-09-09 11:52:26 -07002903 if (mAnalysisChannel) {
2904 cam_analysis_info_t analysisInfo;
2905 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2906 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2907 CAM_STREAM_TYPE_ANALYSIS;
2908 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2909 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002910 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002911 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2912 &analysisInfo);
2913 if (rc != NO_ERROR) {
2914 LOGE("getAnalysisInfo failed, ret = %d", rc);
2915 pthread_mutex_unlock(&mMutex);
2916 return rc;
2917 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002918 cam_color_filter_arrangement_t analysis_color_arrangement =
2919 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2920 CAM_FILTER_ARRANGEMENT_Y :
2921 gCamCapability[mCameraId]->color_arrangement);
2922 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2923 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2924 analysis_color_arrangement);
2925
Thierry Strudel3d639192016-09-09 11:52:26 -07002926 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002927 mCommon.getMatchingDimension(previewSize,
2928 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002929 mStreamConfigInfo.num_streams++;
2930 }
2931
Thierry Strudel2896d122017-02-23 19:18:03 -08002932 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002933 cam_analysis_info_t supportInfo;
2934 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2935 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2936 setPAAFSupport(callbackFeatureMask,
2937 CAM_STREAM_TYPE_CALLBACK,
2938 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002939 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002940 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002941 if (ret != NO_ERROR) {
2942 /* Ignore the error for Mono camera
2943 * because the PAAF bit mask is only set
2944 * for CAM_STREAM_TYPE_ANALYSIS stream type
2945 */
2946 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2947 LOGW("getAnalysisInfo failed, ret = %d", ret);
2948 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002949 }
2950 mSupportChannel = new QCamera3SupportChannel(
2951 mCameraHandle->camera_handle,
2952 mChannelHandle,
2953 mCameraHandle->ops,
2954 &gCamCapability[mCameraId]->padding_info,
2955 callbackFeatureMask,
2956 CAM_STREAM_TYPE_CALLBACK,
2957 &QCamera3SupportChannel::kDim,
2958 CAM_FORMAT_YUV_420_NV21,
2959 supportInfo.hw_analysis_supported,
2960 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002961 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002962 if (!mSupportChannel) {
2963 LOGE("dummy channel cannot be created");
2964 pthread_mutex_unlock(&mMutex);
2965 return -ENOMEM;
2966 }
2967 }
2968
2969 if (mSupportChannel) {
2970 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2971 QCamera3SupportChannel::kDim;
2972 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2973 CAM_STREAM_TYPE_CALLBACK;
2974 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2975 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2976 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2977 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2978 gCamCapability[mCameraId]->color_arrangement);
2979 mStreamConfigInfo.num_streams++;
2980 }
2981
2982 if (mRawDumpChannel) {
2983 cam_dimension_t rawSize;
2984 rawSize = getMaxRawSize(mCameraId);
2985 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2986 rawSize;
2987 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2988 CAM_STREAM_TYPE_RAW;
2989 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2990 CAM_QCOM_FEATURE_NONE;
2991 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2992 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2993 gCamCapability[mCameraId]->color_arrangement);
2994 mStreamConfigInfo.num_streams++;
2995 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002996
2997 if (mHdrPlusRawSrcChannel) {
2998 cam_dimension_t rawSize;
2999 rawSize = getMaxRawSize(mCameraId);
3000 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
3001 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
3002 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
3003 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3004 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3005 gCamCapability[mCameraId]->color_arrangement);
3006 mStreamConfigInfo.num_streams++;
3007 }
3008
Thierry Strudel3d639192016-09-09 11:52:26 -07003009 /* In HFR mode, if video stream is not added, create a dummy channel so that
3010 * ISP can create a batch mode even for preview only case. This channel is
3011 * never 'start'ed (no stream-on), it is only 'initialized' */
3012 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
3013 !m_bIsVideo) {
3014 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3015 setPAAFSupport(dummyFeatureMask,
3016 CAM_STREAM_TYPE_VIDEO,
3017 gCamCapability[mCameraId]->color_arrangement);
3018 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
3019 mChannelHandle,
3020 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003021 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07003022 this,
3023 &mDummyBatchStream,
3024 CAM_STREAM_TYPE_VIDEO,
3025 dummyFeatureMask,
3026 mMetadataChannel);
3027 if (NULL == mDummyBatchChannel) {
3028 LOGE("creation of mDummyBatchChannel failed."
3029 "Preview will use non-hfr sensor mode ");
3030 }
3031 }
3032 if (mDummyBatchChannel) {
3033 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3034 mDummyBatchStream.width;
3035 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3036 mDummyBatchStream.height;
3037 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3038 CAM_STREAM_TYPE_VIDEO;
3039 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3040 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3041 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3042 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3043 gCamCapability[mCameraId]->color_arrangement);
3044 mStreamConfigInfo.num_streams++;
3045 }
3046
3047 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3048 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003049 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003050 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003051
3052 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3053 for (pendingRequestIterator i = mPendingRequestsList.begin();
3054 i != mPendingRequestsList.end();) {
3055 i = erasePendingRequest(i);
3056 }
3057 mPendingFrameDropList.clear();
3058 // Initialize/Reset the pending buffers list
3059 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3060 req.mPendingBufferList.clear();
3061 }
3062 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003063 mExpectedInflightDuration = 0;
3064 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003065
Thierry Strudel3d639192016-09-09 11:52:26 -07003066 mCurJpegMeta.clear();
3067 //Get min frame duration for this streams configuration
3068 deriveMinFrameDuration();
3069
Chien-Yu Chenee335912017-02-09 17:53:20 -08003070 mFirstPreviewIntentSeen = false;
3071
Thierry Strudel3d639192016-09-09 11:52:26 -07003072 // Update state
3073 mState = CONFIGURED;
3074
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003075 mFirstMetadataCallback = true;
3076
Thierry Strudel3d639192016-09-09 11:52:26 -07003077 pthread_mutex_unlock(&mMutex);
3078
3079 return rc;
3080}
3081
3082/*===========================================================================
3083 * FUNCTION : validateCaptureRequest
3084 *
3085 * DESCRIPTION: validate a capture request from camera service
3086 *
3087 * PARAMETERS :
3088 * @request : request from framework to process
3089 *
3090 * RETURN :
3091 *
3092 *==========================================================================*/
3093int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003094 camera3_capture_request_t *request,
3095 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003096{
3097 ssize_t idx = 0;
3098 const camera3_stream_buffer_t *b;
3099 CameraMetadata meta;
3100
3101 /* Sanity check the request */
3102 if (request == NULL) {
3103 LOGE("NULL capture request");
3104 return BAD_VALUE;
3105 }
3106
3107 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3108 /*settings cannot be null for the first request*/
3109 return BAD_VALUE;
3110 }
3111
3112 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003113 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3114 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003115 LOGE("Request %d: No output buffers provided!",
3116 __FUNCTION__, frameNumber);
3117 return BAD_VALUE;
3118 }
3119 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3120 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3121 request->num_output_buffers, MAX_NUM_STREAMS);
3122 return BAD_VALUE;
3123 }
3124 if (request->input_buffer != NULL) {
3125 b = request->input_buffer;
3126 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3127 LOGE("Request %d: Buffer %ld: Status not OK!",
3128 frameNumber, (long)idx);
3129 return BAD_VALUE;
3130 }
3131 if (b->release_fence != -1) {
3132 LOGE("Request %d: Buffer %ld: Has a release fence!",
3133 frameNumber, (long)idx);
3134 return BAD_VALUE;
3135 }
3136 if (b->buffer == NULL) {
3137 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3138 frameNumber, (long)idx);
3139 return BAD_VALUE;
3140 }
3141 }
3142
3143 // Validate all buffers
3144 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003145 if (b == NULL) {
3146 return BAD_VALUE;
3147 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003148 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003149 QCamera3ProcessingChannel *channel =
3150 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3151 if (channel == NULL) {
3152 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3153 frameNumber, (long)idx);
3154 return BAD_VALUE;
3155 }
3156 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3157 LOGE("Request %d: Buffer %ld: Status not OK!",
3158 frameNumber, (long)idx);
3159 return BAD_VALUE;
3160 }
3161 if (b->release_fence != -1) {
3162 LOGE("Request %d: Buffer %ld: Has a release fence!",
3163 frameNumber, (long)idx);
3164 return BAD_VALUE;
3165 }
3166 if (b->buffer == NULL) {
3167 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3168 frameNumber, (long)idx);
3169 return BAD_VALUE;
3170 }
3171 if (*(b->buffer) == NULL) {
3172 LOGE("Request %d: Buffer %ld: NULL private handle!",
3173 frameNumber, (long)idx);
3174 return BAD_VALUE;
3175 }
3176 idx++;
3177 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003178 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003179 return NO_ERROR;
3180}
3181
3182/*===========================================================================
3183 * FUNCTION : deriveMinFrameDuration
3184 *
3185 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3186 * on currently configured streams.
3187 *
3188 * PARAMETERS : NONE
3189 *
3190 * RETURN : NONE
3191 *
3192 *==========================================================================*/
3193void QCamera3HardwareInterface::deriveMinFrameDuration()
3194{
3195 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003196 bool hasRaw = false;
3197
3198 mMinRawFrameDuration = 0;
3199 mMinJpegFrameDuration = 0;
3200 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003201
3202 maxJpegDim = 0;
3203 maxProcessedDim = 0;
3204 maxRawDim = 0;
3205
3206 // Figure out maximum jpeg, processed, and raw dimensions
3207 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3208 it != mStreamInfo.end(); it++) {
3209
3210 // Input stream doesn't have valid stream_type
3211 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3212 continue;
3213
3214 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3215 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3216 if (dimension > maxJpegDim)
3217 maxJpegDim = dimension;
3218 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3219 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3220 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003221 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003222 if (dimension > maxRawDim)
3223 maxRawDim = dimension;
3224 } else {
3225 if (dimension > maxProcessedDim)
3226 maxProcessedDim = dimension;
3227 }
3228 }
3229
3230 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3231 MAX_SIZES_CNT);
3232
3233 //Assume all jpeg dimensions are in processed dimensions.
3234 if (maxJpegDim > maxProcessedDim)
3235 maxProcessedDim = maxJpegDim;
3236 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003237 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003238 maxRawDim = INT32_MAX;
3239
3240 for (size_t i = 0; i < count; i++) {
3241 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3242 gCamCapability[mCameraId]->raw_dim[i].height;
3243 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3244 maxRawDim = dimension;
3245 }
3246 }
3247
3248 //Find minimum durations for processed, jpeg, and raw
3249 for (size_t i = 0; i < count; i++) {
3250 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3251 gCamCapability[mCameraId]->raw_dim[i].height) {
3252 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3253 break;
3254 }
3255 }
3256 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3257 for (size_t i = 0; i < count; i++) {
3258 if (maxProcessedDim ==
3259 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3260 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3261 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3262 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3263 break;
3264 }
3265 }
3266}
3267
3268/*===========================================================================
3269 * FUNCTION : getMinFrameDuration
3270 *
3271 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3272 * and current request configuration.
3273 *
3274 * PARAMETERS : @request: requset sent by the frameworks
3275 *
3276 * RETURN : min farme duration for a particular request
3277 *
3278 *==========================================================================*/
3279int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3280{
3281 bool hasJpegStream = false;
3282 bool hasRawStream = false;
3283 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3284 const camera3_stream_t *stream = request->output_buffers[i].stream;
3285 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3286 hasJpegStream = true;
3287 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3288 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3289 stream->format == HAL_PIXEL_FORMAT_RAW16)
3290 hasRawStream = true;
3291 }
3292
3293 if (!hasJpegStream)
3294 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3295 else
3296 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3297}
3298
3299/*===========================================================================
3300 * FUNCTION : handleBuffersDuringFlushLock
3301 *
3302 * DESCRIPTION: Account for buffers returned from back-end during flush
3303 * This function is executed while mMutex is held by the caller.
3304 *
3305 * PARAMETERS :
3306 * @buffer: image buffer for the callback
3307 *
3308 * RETURN :
3309 *==========================================================================*/
3310void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3311{
3312 bool buffer_found = false;
3313 for (List<PendingBuffersInRequest>::iterator req =
3314 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3315 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3316 for (List<PendingBufferInfo>::iterator i =
3317 req->mPendingBufferList.begin();
3318 i != req->mPendingBufferList.end(); i++) {
3319 if (i->buffer == buffer->buffer) {
3320 mPendingBuffersMap.numPendingBufsAtFlush--;
3321 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3322 buffer->buffer, req->frame_number,
3323 mPendingBuffersMap.numPendingBufsAtFlush);
3324 buffer_found = true;
3325 break;
3326 }
3327 }
3328 if (buffer_found) {
3329 break;
3330 }
3331 }
3332 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3333 //signal the flush()
3334 LOGD("All buffers returned to HAL. Continue flush");
3335 pthread_cond_signal(&mBuffersCond);
3336 }
3337}
3338
Thierry Strudel3d639192016-09-09 11:52:26 -07003339/*===========================================================================
3340 * FUNCTION : handleBatchMetadata
3341 *
3342 * DESCRIPTION: Handles metadata buffer callback in batch mode
3343 *
3344 * PARAMETERS : @metadata_buf: metadata buffer
3345 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3346 * the meta buf in this method
3347 *
3348 * RETURN :
3349 *
3350 *==========================================================================*/
3351void QCamera3HardwareInterface::handleBatchMetadata(
3352 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3353{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003354 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003355
3356 if (NULL == metadata_buf) {
3357 LOGE("metadata_buf is NULL");
3358 return;
3359 }
3360 /* In batch mode, the metdata will contain the frame number and timestamp of
3361 * the last frame in the batch. Eg: a batch containing buffers from request
3362 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3363 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3364 * multiple process_capture_results */
3365 metadata_buffer_t *metadata =
3366 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3367 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3368 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3369 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3370 uint32_t frame_number = 0, urgent_frame_number = 0;
3371 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3372 bool invalid_metadata = false;
3373 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3374 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003375 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003376
3377 int32_t *p_frame_number_valid =
3378 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3379 uint32_t *p_frame_number =
3380 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3381 int64_t *p_capture_time =
3382 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3383 int32_t *p_urgent_frame_number_valid =
3384 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3385 uint32_t *p_urgent_frame_number =
3386 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3387
3388 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3389 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3390 (NULL == p_urgent_frame_number)) {
3391 LOGE("Invalid metadata");
3392 invalid_metadata = true;
3393 } else {
3394 frame_number_valid = *p_frame_number_valid;
3395 last_frame_number = *p_frame_number;
3396 last_frame_capture_time = *p_capture_time;
3397 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3398 last_urgent_frame_number = *p_urgent_frame_number;
3399 }
3400
3401 /* In batchmode, when no video buffers are requested, set_parms are sent
3402 * for every capture_request. The difference between consecutive urgent
3403 * frame numbers and frame numbers should be used to interpolate the
3404 * corresponding frame numbers and time stamps */
3405 pthread_mutex_lock(&mMutex);
3406 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003407 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3408 if(idx < 0) {
3409 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3410 last_urgent_frame_number);
3411 mState = ERROR;
3412 pthread_mutex_unlock(&mMutex);
3413 return;
3414 }
3415 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003416 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3417 first_urgent_frame_number;
3418
3419 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3420 urgent_frame_number_valid,
3421 first_urgent_frame_number, last_urgent_frame_number);
3422 }
3423
3424 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003425 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3426 if(idx < 0) {
3427 LOGE("Invalid frame number received: %d. Irrecoverable error",
3428 last_frame_number);
3429 mState = ERROR;
3430 pthread_mutex_unlock(&mMutex);
3431 return;
3432 }
3433 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003434 frameNumDiff = last_frame_number + 1 -
3435 first_frame_number;
3436 mPendingBatchMap.removeItem(last_frame_number);
3437
3438 LOGD("frm: valid: %d frm_num: %d - %d",
3439 frame_number_valid,
3440 first_frame_number, last_frame_number);
3441
3442 }
3443 pthread_mutex_unlock(&mMutex);
3444
3445 if (urgent_frame_number_valid || frame_number_valid) {
3446 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3447 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3448 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3449 urgentFrameNumDiff, last_urgent_frame_number);
3450 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3451 LOGE("frameNumDiff: %d frameNum: %d",
3452 frameNumDiff, last_frame_number);
3453 }
3454
3455 for (size_t i = 0; i < loopCount; i++) {
3456 /* handleMetadataWithLock is called even for invalid_metadata for
3457 * pipeline depth calculation */
3458 if (!invalid_metadata) {
3459 /* Infer frame number. Batch metadata contains frame number of the
3460 * last frame */
3461 if (urgent_frame_number_valid) {
3462 if (i < urgentFrameNumDiff) {
3463 urgent_frame_number =
3464 first_urgent_frame_number + i;
3465 LOGD("inferred urgent frame_number: %d",
3466 urgent_frame_number);
3467 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3468 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3469 } else {
3470 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3471 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3472 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3473 }
3474 }
3475
3476 /* Infer frame number. Batch metadata contains frame number of the
3477 * last frame */
3478 if (frame_number_valid) {
3479 if (i < frameNumDiff) {
3480 frame_number = first_frame_number + i;
3481 LOGD("inferred frame_number: %d", frame_number);
3482 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3483 CAM_INTF_META_FRAME_NUMBER, frame_number);
3484 } else {
3485 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3486 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3487 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3488 }
3489 }
3490
3491 if (last_frame_capture_time) {
3492 //Infer timestamp
3493 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003494 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003496 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003497 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3498 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3499 LOGD("batch capture_time: %lld, capture_time: %lld",
3500 last_frame_capture_time, capture_time);
3501 }
3502 }
3503 pthread_mutex_lock(&mMutex);
3504 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003505 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003506 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3507 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003508 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003509 pthread_mutex_unlock(&mMutex);
3510 }
3511
3512 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003513 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003514 mMetadataChannel->bufDone(metadata_buf);
3515 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003516 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003517 }
3518}
3519
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003520void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3521 camera3_error_msg_code_t errorCode)
3522{
3523 camera3_notify_msg_t notify_msg;
3524 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3525 notify_msg.type = CAMERA3_MSG_ERROR;
3526 notify_msg.message.error.error_code = errorCode;
3527 notify_msg.message.error.error_stream = NULL;
3528 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003529 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003530
3531 return;
3532}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003533
3534/*===========================================================================
3535 * FUNCTION : sendPartialMetadataWithLock
3536 *
3537 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3538 *
3539 * PARAMETERS : @metadata: metadata buffer
3540 * @requestIter: The iterator for the pending capture request for
3541 * which the partial result is being sen
3542 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3543 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003544 * @isJumpstartMetadata: Whether this is a partial metadata for
3545 * jumpstart, i.e. even though it doesn't map to a valid partial
3546 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003547 *
3548 * RETURN :
3549 *
3550 *==========================================================================*/
3551
3552void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3553 metadata_buffer_t *metadata,
3554 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003555 bool lastUrgentMetadataInBatch,
3556 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003557{
3558 camera3_capture_result_t result;
3559 memset(&result, 0, sizeof(camera3_capture_result_t));
3560
3561 requestIter->partial_result_cnt++;
3562
3563 // Extract 3A metadata
3564 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003565 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3566 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003567 // Populate metadata result
3568 result.frame_number = requestIter->frame_number;
3569 result.num_output_buffers = 0;
3570 result.output_buffers = NULL;
3571 result.partial_result = requestIter->partial_result_cnt;
3572
3573 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003574 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003575 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3576 // Notify HDR+ client about the partial metadata.
3577 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3578 result.partial_result == PARTIAL_RESULT_COUNT);
3579 }
3580 }
3581
3582 orchestrateResult(&result);
3583 LOGD("urgent frame_number = %u", result.frame_number);
3584 free_camera_metadata((camera_metadata_t *)result.result);
3585}
3586
Thierry Strudel3d639192016-09-09 11:52:26 -07003587/*===========================================================================
3588 * FUNCTION : handleMetadataWithLock
3589 *
3590 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3591 *
3592 * PARAMETERS : @metadata_buf: metadata buffer
3593 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3594 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003595 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3596 * last urgent metadata in a batch. Always true for non-batch mode
3597 * @lastMetadataInBatch: Boolean to indicate whether this is the
3598 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003599 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3600 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003601 *
3602 * RETURN :
3603 *
3604 *==========================================================================*/
3605void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003606 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003607 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3608 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003609{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003610 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003611 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3612 //during flush do not send metadata from this thread
3613 LOGD("not sending metadata during flush or when mState is error");
3614 if (free_and_bufdone_meta_buf) {
3615 mMetadataChannel->bufDone(metadata_buf);
3616 free(metadata_buf);
3617 }
3618 return;
3619 }
3620
3621 //not in flush
3622 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3623 int32_t frame_number_valid, urgent_frame_number_valid;
3624 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003625 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003626 nsecs_t currentSysTime;
3627
3628 int32_t *p_frame_number_valid =
3629 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3630 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3631 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003632 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003633 int32_t *p_urgent_frame_number_valid =
3634 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3635 uint32_t *p_urgent_frame_number =
3636 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3637 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3638 metadata) {
3639 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3640 *p_frame_number_valid, *p_frame_number);
3641 }
3642
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003643 camera_metadata_t *resultMetadata = nullptr;
3644
Thierry Strudel3d639192016-09-09 11:52:26 -07003645 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3646 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3647 LOGE("Invalid metadata");
3648 if (free_and_bufdone_meta_buf) {
3649 mMetadataChannel->bufDone(metadata_buf);
3650 free(metadata_buf);
3651 }
3652 goto done_metadata;
3653 }
3654 frame_number_valid = *p_frame_number_valid;
3655 frame_number = *p_frame_number;
3656 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003657 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003658 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3659 urgent_frame_number = *p_urgent_frame_number;
3660 currentSysTime = systemTime(CLOCK_MONOTONIC);
3661
Jason Lee603176d2017-05-31 11:43:27 -07003662 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3663 const int tries = 3;
3664 nsecs_t bestGap, measured;
3665 for (int i = 0; i < tries; ++i) {
3666 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3667 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3668 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3669 const nsecs_t gap = tmono2 - tmono;
3670 if (i == 0 || gap < bestGap) {
3671 bestGap = gap;
3672 measured = tbase - ((tmono + tmono2) >> 1);
3673 }
3674 }
3675 capture_time -= measured;
3676 }
3677
Thierry Strudel3d639192016-09-09 11:52:26 -07003678 // Detect if buffers from any requests are overdue
3679 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003680 int64_t timeout;
3681 {
3682 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3683 // If there is a pending HDR+ request, the following requests may be blocked until the
3684 // HDR+ request is done. So allow a longer timeout.
3685 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3686 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003687 if (timeout < mExpectedInflightDuration) {
3688 timeout = mExpectedInflightDuration;
3689 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003690 }
3691
3692 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003693 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003694 assert(missed.stream->priv);
3695 if (missed.stream->priv) {
3696 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3697 assert(ch->mStreams[0]);
3698 if (ch->mStreams[0]) {
3699 LOGE("Cancel missing frame = %d, buffer = %p,"
3700 "stream type = %d, stream format = %d",
3701 req.frame_number, missed.buffer,
3702 ch->mStreams[0]->getMyType(), missed.stream->format);
3703 ch->timeoutFrame(req.frame_number);
3704 }
3705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003706 }
3707 }
3708 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003709 //For the very first metadata callback, regardless whether it contains valid
3710 //frame number, send the partial metadata for the jumpstarting requests.
3711 //Note that this has to be done even if the metadata doesn't contain valid
3712 //urgent frame number, because in the case only 1 request is ever submitted
3713 //to HAL, there won't be subsequent valid urgent frame number.
3714 if (mFirstMetadataCallback) {
3715 for (pendingRequestIterator i =
3716 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3717 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003718 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3719 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003720 }
3721 }
3722 mFirstMetadataCallback = false;
3723 }
3724
Thierry Strudel3d639192016-09-09 11:52:26 -07003725 //Partial result on process_capture_result for timestamp
3726 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003727 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003728
3729 //Recieved an urgent Frame Number, handle it
3730 //using partial results
3731 for (pendingRequestIterator i =
3732 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3733 LOGD("Iterator Frame = %d urgent frame = %d",
3734 i->frame_number, urgent_frame_number);
3735
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003736 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07003737 (i->partial_result_cnt == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003738 LOGE("Error: HAL missed urgent metadata for frame number %d",
3739 i->frame_number);
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07003740 i->partialResultDropped = true;
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003741 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003742 }
3743
3744 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003745 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003746 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3747 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003748 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3749 // Instant AEC settled for this frame.
3750 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3751 mInstantAECSettledFrameNumber = urgent_frame_number;
3752 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003753 break;
3754 }
3755 }
3756 }
3757
3758 if (!frame_number_valid) {
3759 LOGD("Not a valid normal frame number, used as SOF only");
3760 if (free_and_bufdone_meta_buf) {
3761 mMetadataChannel->bufDone(metadata_buf);
3762 free(metadata_buf);
3763 }
3764 goto done_metadata;
3765 }
3766 LOGH("valid frame_number = %u, capture_time = %lld",
3767 frame_number, capture_time);
3768
Emilian Peev4e0fe952017-06-30 12:40:09 -07003769 handleDepthDataLocked(metadata->depth_data, frame_number,
3770 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003771
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003772 // Check whether any stream buffer corresponding to this is dropped or not
3773 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3774 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3775 for (auto & pendingRequest : mPendingRequestsList) {
3776 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3777 mInstantAECSettledFrameNumber)) {
3778 camera3_notify_msg_t notify_msg = {};
3779 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003780 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003781 QCamera3ProcessingChannel *channel =
3782 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003783 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003784 if (p_cam_frame_drop) {
3785 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003786 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003787 // Got the stream ID for drop frame.
3788 dropFrame = true;
3789 break;
3790 }
3791 }
3792 } else {
3793 // This is instant AEC case.
3794 // For instant AEC drop the stream untill AEC is settled.
3795 dropFrame = true;
3796 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003797
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003798 if (dropFrame) {
3799 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3800 if (p_cam_frame_drop) {
3801 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003802 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003803 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003804 } else {
3805 // For instant AEC, inform frame drop and frame number
3806 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3807 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003808 pendingRequest.frame_number, streamID,
3809 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003810 }
3811 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003812 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003813 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003814 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003815 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003816 if (p_cam_frame_drop) {
3817 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003818 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003819 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003820 } else {
3821 // For instant AEC, inform frame drop and frame number
3822 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3823 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003824 pendingRequest.frame_number, streamID,
3825 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003826 }
3827 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003828 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003829 PendingFrameDrop.stream_ID = streamID;
3830 // Add the Frame drop info to mPendingFrameDropList
3831 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003832 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003833 }
3834 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003835 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003836
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003837 for (auto & pendingRequest : mPendingRequestsList) {
3838 // Find the pending request with the frame number.
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07003839 if (pendingRequest.frame_number < frame_number) {
3840 // Workaround for case where shutter is missing due to dropped
3841 // metadata
3842 mShutterDispatcher.markShutterReady(pendingRequest.frame_number, capture_time);
3843 } else if (pendingRequest.frame_number == frame_number) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003844 // Update the sensor timestamp.
3845 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003846
Thierry Strudel3d639192016-09-09 11:52:26 -07003847
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003848 /* Set the timestamp in display metadata so that clients aware of
3849 private_handle such as VT can use this un-modified timestamps.
3850 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003851 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003852
Thierry Strudel3d639192016-09-09 11:52:26 -07003853 // Find channel requiring metadata, meaning internal offline postprocess
3854 // is needed.
3855 //TODO: for now, we don't support two streams requiring metadata at the same time.
3856 // (because we are not making copies, and metadata buffer is not reference counted.
3857 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003858 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3859 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003860 if (iter->need_metadata) {
3861 internalPproc = true;
3862 QCamera3ProcessingChannel *channel =
3863 (QCamera3ProcessingChannel *)iter->stream->priv;
3864 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003865 if(p_is_metabuf_queued != NULL) {
3866 *p_is_metabuf_queued = true;
3867 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003868 break;
3869 }
3870 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003871 for (auto itr = pendingRequest.internalRequestList.begin();
3872 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003873 if (itr->need_metadata) {
3874 internalPproc = true;
3875 QCamera3ProcessingChannel *channel =
3876 (QCamera3ProcessingChannel *)itr->stream->priv;
3877 channel->queueReprocMetadata(metadata_buf);
3878 break;
3879 }
3880 }
3881
Thierry Strudel54dc9782017-02-15 12:12:10 -08003882 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003883
3884 bool *enableZsl = nullptr;
3885 if (gExposeEnableZslKey) {
3886 enableZsl = &pendingRequest.enableZsl;
3887 }
3888
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003889 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003890 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003891 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003892
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003893 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003894
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003895 if (pendingRequest.blob_request) {
3896 //Dump tuning metadata if enabled and available
3897 char prop[PROPERTY_VALUE_MAX];
3898 memset(prop, 0, sizeof(prop));
3899 property_get("persist.camera.dumpmetadata", prop, "0");
3900 int32_t enabled = atoi(prop);
3901 if (enabled && metadata->is_tuning_params_valid) {
3902 dumpMetadataToFile(metadata->tuning_params,
3903 mMetaFrameCount,
3904 enabled,
3905 "Snapshot",
3906 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003907 }
3908 }
3909
3910 if (!internalPproc) {
3911 LOGD("couldn't find need_metadata for this metadata");
3912 // Return metadata buffer
3913 if (free_and_bufdone_meta_buf) {
3914 mMetadataChannel->bufDone(metadata_buf);
3915 free(metadata_buf);
3916 }
3917 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003918
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003919 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003920 }
3921 }
3922
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003923 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3924
3925 // Try to send out capture result metadata.
3926 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003927 return;
3928
Thierry Strudel3d639192016-09-09 11:52:26 -07003929done_metadata:
3930 for (pendingRequestIterator i = mPendingRequestsList.begin();
3931 i != mPendingRequestsList.end() ;i++) {
3932 i->pipeline_depth++;
3933 }
3934 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3935 unblockRequestIfNecessary();
3936}
3937
3938/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003939 * FUNCTION : handleDepthDataWithLock
3940 *
3941 * DESCRIPTION: Handles incoming depth data
3942 *
3943 * PARAMETERS : @depthData : Depth data
3944 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003945 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003946 *
3947 * RETURN :
3948 *
3949 *==========================================================================*/
3950void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003951 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003952 uint32_t currentFrameNumber;
3953 buffer_handle_t *depthBuffer;
3954
3955 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003956 return;
3957 }
3958
3959 camera3_stream_buffer_t resultBuffer =
3960 {.acquire_fence = -1,
3961 .release_fence = -1,
3962 .status = CAMERA3_BUFFER_STATUS_OK,
3963 .buffer = nullptr,
3964 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003965 do {
3966 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3967 if (nullptr == depthBuffer) {
3968 break;
3969 }
3970
Emilian Peev7650c122017-01-19 08:24:33 -08003971 resultBuffer.buffer = depthBuffer;
3972 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003973 if (valid) {
3974 int32_t rc = mDepthChannel->populateDepthData(depthData,
3975 frameNumber);
3976 if (NO_ERROR != rc) {
3977 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3978 } else {
3979 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3980 }
Emilian Peev7650c122017-01-19 08:24:33 -08003981 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003982 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003983 }
3984 } else if (currentFrameNumber > frameNumber) {
3985 break;
3986 } else {
3987 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3988 {{currentFrameNumber, mDepthChannel->getStream(),
3989 CAMERA3_MSG_ERROR_BUFFER}}};
3990 orchestrateNotify(&notify_msg);
3991
3992 LOGE("Depth buffer for frame number: %d is missing "
3993 "returning back!", currentFrameNumber);
3994 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3995 }
3996 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003997 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003998 } while (currentFrameNumber < frameNumber);
3999}
4000
4001/*===========================================================================
4002 * FUNCTION : notifyErrorFoPendingDepthData
4003 *
4004 * DESCRIPTION: Returns error for any pending depth buffers
4005 *
4006 * PARAMETERS : depthCh - depth channel that needs to get flushed
4007 *
4008 * RETURN :
4009 *
4010 *==========================================================================*/
4011void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
4012 QCamera3DepthChannel *depthCh) {
4013 uint32_t currentFrameNumber;
4014 buffer_handle_t *depthBuffer;
4015
4016 if (nullptr == depthCh) {
4017 return;
4018 }
4019
4020 camera3_notify_msg_t notify_msg =
4021 {.type = CAMERA3_MSG_ERROR,
4022 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4023 camera3_stream_buffer_t resultBuffer =
4024 {.acquire_fence = -1,
4025 .release_fence = -1,
4026 .buffer = nullptr,
4027 .stream = depthCh->getStream(),
4028 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004029
4030 while (nullptr !=
4031 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4032 depthCh->unmapBuffer(currentFrameNumber);
4033
4034 notify_msg.message.error.frame_number = currentFrameNumber;
4035 orchestrateNotify(&notify_msg);
4036
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004037 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004038 };
4039}
4040
4041/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004042 * FUNCTION : hdrPlusPerfLock
4043 *
4044 * DESCRIPTION: perf lock for HDR+ using custom intent
4045 *
4046 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4047 *
4048 * RETURN : None
4049 *
4050 *==========================================================================*/
4051void QCamera3HardwareInterface::hdrPlusPerfLock(
4052 mm_camera_super_buf_t *metadata_buf)
4053{
4054 if (NULL == metadata_buf) {
4055 LOGE("metadata_buf is NULL");
4056 return;
4057 }
4058 metadata_buffer_t *metadata =
4059 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4060 int32_t *p_frame_number_valid =
4061 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4062 uint32_t *p_frame_number =
4063 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4064
4065 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4066 LOGE("%s: Invalid metadata", __func__);
4067 return;
4068 }
4069
Wei Wang01385482017-08-03 10:49:34 -07004070 //acquire perf lock for 2 secs after the last HDR frame is captured
4071 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004072 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4073 if ((p_frame_number != NULL) &&
4074 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004075 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004076 }
4077 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004078}
4079
4080/*===========================================================================
4081 * FUNCTION : handleInputBufferWithLock
4082 *
4083 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4084 *
4085 * PARAMETERS : @frame_number: frame number of the input buffer
4086 *
4087 * RETURN :
4088 *
4089 *==========================================================================*/
4090void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4091{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004092 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004093 pendingRequestIterator i = mPendingRequestsList.begin();
4094 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4095 i++;
4096 }
4097 if (i != mPendingRequestsList.end() && i->input_buffer) {
4098 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004099 CameraMetadata settings;
4100 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4101 if(i->settings) {
4102 settings = i->settings;
4103 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4104 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004105 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004106 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004107 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004108 } else {
4109 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004110 }
4111
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004112 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4113 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4114 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004115
4116 camera3_capture_result result;
4117 memset(&result, 0, sizeof(camera3_capture_result));
4118 result.frame_number = frame_number;
4119 result.result = i->settings;
4120 result.input_buffer = i->input_buffer;
4121 result.partial_result = PARTIAL_RESULT_COUNT;
4122
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004123 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004124 LOGD("Input request metadata and input buffer frame_number = %u",
4125 i->frame_number);
4126 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004127
4128 // Dispatch result metadata that may be just unblocked by this reprocess result.
4129 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004130 } else {
4131 LOGE("Could not find input request for frame number %d", frame_number);
4132 }
4133}
4134
4135/*===========================================================================
4136 * FUNCTION : handleBufferWithLock
4137 *
4138 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4139 *
4140 * PARAMETERS : @buffer: image buffer for the callback
4141 * @frame_number: frame number of the image buffer
4142 *
4143 * RETURN :
4144 *
4145 *==========================================================================*/
4146void QCamera3HardwareInterface::handleBufferWithLock(
4147 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4148{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004149 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004150
4151 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4152 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4153 }
4154
Thierry Strudel3d639192016-09-09 11:52:26 -07004155 /* Nothing to be done during error state */
4156 if ((ERROR == mState) || (DEINIT == mState)) {
4157 return;
4158 }
4159 if (mFlushPerf) {
4160 handleBuffersDuringFlushLock(buffer);
4161 return;
4162 }
4163 //not in flush
4164 // If the frame number doesn't exist in the pending request list,
4165 // directly send the buffer to the frameworks, and update pending buffers map
4166 // Otherwise, book-keep the buffer.
4167 pendingRequestIterator i = mPendingRequestsList.begin();
4168 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4169 i++;
4170 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004171
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004172 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004173 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004174 // For a reprocessing request, try to send out result metadata.
4175 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004176 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004177 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004178
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004179 // Check if this frame was dropped.
4180 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4181 m != mPendingFrameDropList.end(); m++) {
4182 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4183 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4184 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4185 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4186 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4187 frame_number, streamID);
4188 m = mPendingFrameDropList.erase(m);
4189 break;
4190 }
4191 }
4192
Binhao Lin09245482017-08-31 18:25:29 -07004193 // WAR for encoder avtimer timestamp issue
4194 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4195 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
4196 m_bAVTimerEnabled) {
4197 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
4198 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
4199 if (req->frame_number != frame_number)
4200 continue;
4201 if(req->av_timestamp == 0) {
4202 buffer->status |= CAMERA3_BUFFER_STATUS_ERROR;
4203 }
4204 else {
4205 struct private_handle_t *priv_handle =
4206 (struct private_handle_t *) (*(buffer->buffer));
4207 setMetaData(priv_handle, SET_VT_TIMESTAMP, &(req->av_timestamp));
4208 }
4209 }
4210 }
4211
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004212 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4213 LOGH("result frame_number = %d, buffer = %p",
4214 frame_number, buffer->buffer);
4215
4216 mPendingBuffersMap.removeBuf(buffer->buffer);
4217 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4218
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004219 if (mPreviewStarted == false) {
4220 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4221 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004222 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4223
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004224 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4225 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4226 mPreviewStarted = true;
4227
4228 // Set power hint for preview
4229 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4230 }
4231 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004232}
4233
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004234void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004235 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004236{
4237 // Find the pending request for this result metadata.
4238 auto requestIter = mPendingRequestsList.begin();
4239 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4240 requestIter++;
4241 }
4242
4243 if (requestIter == mPendingRequestsList.end()) {
4244 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4245 return;
4246 }
4247
4248 // Update the result metadata
4249 requestIter->resultMetadata = resultMetadata;
4250
4251 // Check what type of request this is.
4252 bool liveRequest = false;
4253 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004254 // HDR+ request doesn't have partial results.
4255 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004256 } else if (requestIter->input_buffer != nullptr) {
4257 // Reprocessing request result is the same as settings.
4258 requestIter->resultMetadata = requestIter->settings;
4259 // Reprocessing request doesn't have partial results.
4260 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4261 } else {
4262 liveRequest = true;
Chien-Yu Chen0a921f92017-08-27 17:25:33 -07004263 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004264 mPendingLiveRequest--;
4265
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004266 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004267 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004268 // For a live request, send the metadata to HDR+ client.
4269 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4270 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4271 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4272 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004273 }
4274 }
4275
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004276 // Remove len shading map if it's not requested.
4277 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4278 CameraMetadata metadata;
4279 metadata.acquire(resultMetadata);
4280 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4281 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4282 &requestIter->requestedLensShadingMapMode, 1);
4283
4284 requestIter->resultMetadata = metadata.release();
4285 }
4286
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004287 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4288}
4289
4290void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4291 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004292 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4293 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004294 bool readyToSend = true;
4295
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004296 // Iterate through the pending requests to send out result metadata that are ready. Also if
4297 // this result metadata belongs to a live request, notify errors for previous live requests
4298 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004299 auto iter = mPendingRequestsList.begin();
4300 while (iter != mPendingRequestsList.end()) {
4301 // Check if current pending request is ready. If it's not ready, the following pending
4302 // requests are also not ready.
4303 if (readyToSend && iter->resultMetadata == nullptr) {
4304 readyToSend = false;
4305 }
4306
4307 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07004308 bool errorResult = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004309
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004310 camera3_capture_result_t result = {};
4311 result.frame_number = iter->frame_number;
4312 result.result = iter->resultMetadata;
4313 result.partial_result = iter->partial_result_cnt;
4314
4315 // If this pending buffer has result metadata, we may be able to send out shutter callback
4316 // and result metadata.
4317 if (iter->resultMetadata != nullptr) {
4318 if (!readyToSend) {
4319 // If any of the previous pending request is not ready, this pending request is
4320 // also not ready to send in order to keep shutter callbacks and result metadata
4321 // in order.
4322 iter++;
4323 continue;
4324 }
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07004325 // Notify ERROR_RESULT if partial result was dropped.
4326 errorResult = iter->partialResultDropped;
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004327 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004328 // If the result metadata belongs to a live request, notify errors for previous pending
4329 // live requests.
4330 mPendingLiveRequest--;
4331
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07004332 LOGE("Error: HAL missed metadata for frame number %d", iter->frame_number);
4333 errorResult = true;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004334 } else {
4335 iter++;
4336 continue;
4337 }
4338
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07004339 if (errorResult) {
4340 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
4341 } else {
4342 result.output_buffers = nullptr;
4343 result.num_output_buffers = 0;
4344 orchestrateResult(&result);
4345 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004346 // For reprocessing, result metadata is the same as settings so do not free it here to
4347 // avoid double free.
4348 if (result.result != iter->settings) {
4349 free_camera_metadata((camera_metadata_t *)result.result);
4350 }
4351 iter->resultMetadata = nullptr;
4352 iter = erasePendingRequest(iter);
4353 }
4354
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004355 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004356 for (auto &iter : mPendingRequestsList) {
4357 // Increment pipeline depth for the following pending requests.
4358 if (iter.frame_number > frameNumber) {
4359 iter.pipeline_depth++;
4360 }
4361 }
4362 }
4363
4364 unblockRequestIfNecessary();
4365}
4366
Thierry Strudel3d639192016-09-09 11:52:26 -07004367/*===========================================================================
4368 * FUNCTION : unblockRequestIfNecessary
4369 *
4370 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4371 * that mMutex is held when this function is called.
4372 *
4373 * PARAMETERS :
4374 *
4375 * RETURN :
4376 *
4377 *==========================================================================*/
4378void QCamera3HardwareInterface::unblockRequestIfNecessary()
4379{
4380 // Unblock process_capture_request
4381 pthread_cond_signal(&mRequestCond);
4382}
4383
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004384/*===========================================================================
4385 * FUNCTION : isHdrSnapshotRequest
4386 *
4387 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4388 *
4389 * PARAMETERS : camera3 request structure
4390 *
4391 * RETURN : boolean decision variable
4392 *
4393 *==========================================================================*/
4394bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4395{
4396 if (request == NULL) {
4397 LOGE("Invalid request handle");
4398 assert(0);
4399 return false;
4400 }
4401
4402 if (!mForceHdrSnapshot) {
4403 CameraMetadata frame_settings;
4404 frame_settings = request->settings;
4405
4406 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4407 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4408 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4409 return false;
4410 }
4411 } else {
4412 return false;
4413 }
4414
4415 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4416 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4417 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4418 return false;
4419 }
4420 } else {
4421 return false;
4422 }
4423 }
4424
4425 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4426 if (request->output_buffers[i].stream->format
4427 == HAL_PIXEL_FORMAT_BLOB) {
4428 return true;
4429 }
4430 }
4431
4432 return false;
4433}
4434/*===========================================================================
4435 * FUNCTION : orchestrateRequest
4436 *
4437 * DESCRIPTION: Orchestrates a capture request from camera service
4438 *
4439 * PARAMETERS :
4440 * @request : request from framework to process
4441 *
4442 * RETURN : Error status codes
4443 *
4444 *==========================================================================*/
4445int32_t QCamera3HardwareInterface::orchestrateRequest(
4446 camera3_capture_request_t *request)
4447{
4448
4449 uint32_t originalFrameNumber = request->frame_number;
4450 uint32_t originalOutputCount = request->num_output_buffers;
4451 const camera_metadata_t *original_settings = request->settings;
4452 List<InternalRequest> internallyRequestedStreams;
4453 List<InternalRequest> emptyInternalList;
4454
4455 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4456 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4457 uint32_t internalFrameNumber;
4458 CameraMetadata modified_meta;
4459
4460
4461 /* Add Blob channel to list of internally requested streams */
4462 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4463 if (request->output_buffers[i].stream->format
4464 == HAL_PIXEL_FORMAT_BLOB) {
4465 InternalRequest streamRequested;
4466 streamRequested.meteringOnly = 1;
4467 streamRequested.need_metadata = 0;
4468 streamRequested.stream = request->output_buffers[i].stream;
4469 internallyRequestedStreams.push_back(streamRequested);
4470 }
4471 }
4472 request->num_output_buffers = 0;
4473 auto itr = internallyRequestedStreams.begin();
4474
4475 /* Modify setting to set compensation */
4476 modified_meta = request->settings;
4477 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4478 uint8_t aeLock = 1;
4479 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4480 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4481 camera_metadata_t *modified_settings = modified_meta.release();
4482 request->settings = modified_settings;
4483
4484 /* Capture Settling & -2x frame */
4485 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4486 request->frame_number = internalFrameNumber;
4487 processCaptureRequest(request, internallyRequestedStreams);
4488
4489 request->num_output_buffers = originalOutputCount;
4490 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4491 request->frame_number = internalFrameNumber;
4492 processCaptureRequest(request, emptyInternalList);
4493 request->num_output_buffers = 0;
4494
4495 modified_meta = modified_settings;
4496 expCompensation = 0;
4497 aeLock = 1;
4498 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4499 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4500 modified_settings = modified_meta.release();
4501 request->settings = modified_settings;
4502
4503 /* Capture Settling & 0X frame */
4504
4505 itr = internallyRequestedStreams.begin();
4506 if (itr == internallyRequestedStreams.end()) {
4507 LOGE("Error Internally Requested Stream list is empty");
4508 assert(0);
4509 } else {
4510 itr->need_metadata = 0;
4511 itr->meteringOnly = 1;
4512 }
4513
4514 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4515 request->frame_number = internalFrameNumber;
4516 processCaptureRequest(request, internallyRequestedStreams);
4517
4518 itr = internallyRequestedStreams.begin();
4519 if (itr == internallyRequestedStreams.end()) {
4520 ALOGE("Error Internally Requested Stream list is empty");
4521 assert(0);
4522 } else {
4523 itr->need_metadata = 1;
4524 itr->meteringOnly = 0;
4525 }
4526
4527 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4528 request->frame_number = internalFrameNumber;
4529 processCaptureRequest(request, internallyRequestedStreams);
4530
4531 /* Capture 2X frame*/
4532 modified_meta = modified_settings;
4533 expCompensation = GB_HDR_2X_STEP_EV;
4534 aeLock = 1;
4535 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4536 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4537 modified_settings = modified_meta.release();
4538 request->settings = modified_settings;
4539
4540 itr = internallyRequestedStreams.begin();
4541 if (itr == internallyRequestedStreams.end()) {
4542 ALOGE("Error Internally Requested Stream list is empty");
4543 assert(0);
4544 } else {
4545 itr->need_metadata = 0;
4546 itr->meteringOnly = 1;
4547 }
4548 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4549 request->frame_number = internalFrameNumber;
4550 processCaptureRequest(request, internallyRequestedStreams);
4551
4552 itr = internallyRequestedStreams.begin();
4553 if (itr == internallyRequestedStreams.end()) {
4554 ALOGE("Error Internally Requested Stream list is empty");
4555 assert(0);
4556 } else {
4557 itr->need_metadata = 1;
4558 itr->meteringOnly = 0;
4559 }
4560
4561 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4562 request->frame_number = internalFrameNumber;
4563 processCaptureRequest(request, internallyRequestedStreams);
4564
4565
4566 /* Capture 2X on original streaming config*/
4567 internallyRequestedStreams.clear();
4568
4569 /* Restore original settings pointer */
4570 request->settings = original_settings;
4571 } else {
4572 uint32_t internalFrameNumber;
4573 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4574 request->frame_number = internalFrameNumber;
4575 return processCaptureRequest(request, internallyRequestedStreams);
4576 }
4577
4578 return NO_ERROR;
4579}
4580
4581/*===========================================================================
4582 * FUNCTION : orchestrateResult
4583 *
4584 * DESCRIPTION: Orchestrates a capture result to camera service
4585 *
4586 * PARAMETERS :
4587 * @request : request from framework to process
4588 *
4589 * RETURN :
4590 *
4591 *==========================================================================*/
4592void QCamera3HardwareInterface::orchestrateResult(
4593 camera3_capture_result_t *result)
4594{
4595 uint32_t frameworkFrameNumber;
4596 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4597 frameworkFrameNumber);
4598 if (rc != NO_ERROR) {
4599 LOGE("Cannot find translated frameworkFrameNumber");
4600 assert(0);
4601 } else {
4602 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004603 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004604 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004605 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004606 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4607 camera_metadata_entry_t entry;
4608 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4609 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004610 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004611 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4612 if (ret != OK)
4613 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004614 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004615 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004616 result->frame_number = frameworkFrameNumber;
4617 mCallbackOps->process_capture_result(mCallbackOps, result);
4618 }
4619 }
4620}
4621
4622/*===========================================================================
4623 * FUNCTION : orchestrateNotify
4624 *
4625 * DESCRIPTION: Orchestrates a notify to camera service
4626 *
4627 * PARAMETERS :
4628 * @request : request from framework to process
4629 *
4630 * RETURN :
4631 *
4632 *==========================================================================*/
4633void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4634{
4635 uint32_t frameworkFrameNumber;
4636 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004637 int32_t rc = NO_ERROR;
4638
4639 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004640 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004641
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004642 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004643 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4644 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4645 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004646 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004647 LOGE("Cannot find translated frameworkFrameNumber");
4648 assert(0);
4649 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004650 }
4651 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004652
4653 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4654 LOGD("Internal Request drop the notifyCb");
4655 } else {
4656 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4657 mCallbackOps->notify(mCallbackOps, notify_msg);
4658 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004659}
4660
4661/*===========================================================================
4662 * FUNCTION : FrameNumberRegistry
4663 *
4664 * DESCRIPTION: Constructor
4665 *
4666 * PARAMETERS :
4667 *
4668 * RETURN :
4669 *
4670 *==========================================================================*/
4671FrameNumberRegistry::FrameNumberRegistry()
4672{
4673 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4674}
4675
4676/*===========================================================================
4677 * FUNCTION : ~FrameNumberRegistry
4678 *
4679 * DESCRIPTION: Destructor
4680 *
4681 * PARAMETERS :
4682 *
4683 * RETURN :
4684 *
4685 *==========================================================================*/
4686FrameNumberRegistry::~FrameNumberRegistry()
4687{
4688}
4689
4690/*===========================================================================
4691 * FUNCTION : PurgeOldEntriesLocked
4692 *
4693 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4694 *
4695 * PARAMETERS :
4696 *
4697 * RETURN : NONE
4698 *
4699 *==========================================================================*/
4700void FrameNumberRegistry::purgeOldEntriesLocked()
4701{
4702 while (_register.begin() != _register.end()) {
4703 auto itr = _register.begin();
4704 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4705 _register.erase(itr);
4706 } else {
4707 return;
4708 }
4709 }
4710}
4711
4712/*===========================================================================
4713 * FUNCTION : allocStoreInternalFrameNumber
4714 *
4715 * DESCRIPTION: Method to note down a framework request and associate a new
4716 * internal request number against it
4717 *
4718 * PARAMETERS :
4719 * @fFrameNumber: Identifier given by framework
4720 * @internalFN : Output parameter which will have the newly generated internal
4721 * entry
4722 *
4723 * RETURN : Error code
4724 *
4725 *==========================================================================*/
4726int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4727 uint32_t &internalFrameNumber)
4728{
4729 Mutex::Autolock lock(mRegistryLock);
4730 internalFrameNumber = _nextFreeInternalNumber++;
4731 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4732 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4733 purgeOldEntriesLocked();
4734 return NO_ERROR;
4735}
4736
4737/*===========================================================================
4738 * FUNCTION : generateStoreInternalFrameNumber
4739 *
4740 * DESCRIPTION: Method to associate a new internal request number independent
4741 * of any associate with framework requests
4742 *
4743 * PARAMETERS :
4744 * @internalFrame#: Output parameter which will have the newly generated internal
4745 *
4746 *
4747 * RETURN : Error code
4748 *
4749 *==========================================================================*/
4750int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4751{
4752 Mutex::Autolock lock(mRegistryLock);
4753 internalFrameNumber = _nextFreeInternalNumber++;
4754 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4755 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4756 purgeOldEntriesLocked();
4757 return NO_ERROR;
4758}
4759
4760/*===========================================================================
4761 * FUNCTION : getFrameworkFrameNumber
4762 *
4763 * DESCRIPTION: Method to query the framework framenumber given an internal #
4764 *
4765 * PARAMETERS :
4766 * @internalFrame#: Internal reference
4767 * @frameworkframenumber: Output parameter holding framework frame entry
4768 *
4769 * RETURN : Error code
4770 *
4771 *==========================================================================*/
4772int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4773 uint32_t &frameworkFrameNumber)
4774{
4775 Mutex::Autolock lock(mRegistryLock);
4776 auto itr = _register.find(internalFrameNumber);
4777 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004778 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004779 return -ENOENT;
4780 }
4781
4782 frameworkFrameNumber = itr->second;
4783 purgeOldEntriesLocked();
4784 return NO_ERROR;
4785}
Thierry Strudel3d639192016-09-09 11:52:26 -07004786
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004787status_t QCamera3HardwareInterface::fillPbStreamConfig(
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004788 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
4789 uint32_t streamIndex) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004790 if (config == nullptr) {
4791 LOGE("%s: config is null", __FUNCTION__);
4792 return BAD_VALUE;
4793 }
4794
4795 if (channel == nullptr) {
4796 LOGE("%s: channel is null", __FUNCTION__);
4797 return BAD_VALUE;
4798 }
4799
4800 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4801 if (stream == nullptr) {
4802 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4803 return NAME_NOT_FOUND;
4804 }
4805
4806 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4807 if (streamInfo == nullptr) {
4808 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4809 return NAME_NOT_FOUND;
4810 }
4811
4812 config->id = pbStreamId;
4813 config->image.width = streamInfo->dim.width;
4814 config->image.height = streamInfo->dim.height;
4815 config->image.padding = 0;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004816
4817 int bytesPerPixel = 0;
4818
4819 switch (streamInfo->fmt) {
4820 case CAM_FORMAT_YUV_420_NV21:
4821 config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
4822 bytesPerPixel = 1;
4823 break;
4824 case CAM_FORMAT_YUV_420_NV12:
4825 case CAM_FORMAT_YUV_420_NV12_VENUS:
4826 config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
4827 bytesPerPixel = 1;
4828 break;
4829 default:
4830 ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
4831 return BAD_VALUE;
4832 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004833
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004834 uint32_t totalPlaneSize = 0;
4835
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004836 // Fill plane information.
4837 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4838 pbcamera::PlaneConfiguration plane;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004839 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004840 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4841 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004842
4843 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004844 }
4845
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004846 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004847 return OK;
4848}
4849
Thierry Strudel3d639192016-09-09 11:52:26 -07004850/*===========================================================================
4851 * FUNCTION : processCaptureRequest
4852 *
4853 * DESCRIPTION: process a capture request from camera service
4854 *
4855 * PARAMETERS :
4856 * @request : request from framework to process
4857 *
4858 * RETURN :
4859 *
4860 *==========================================================================*/
4861int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004862 camera3_capture_request_t *request,
4863 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004864{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004865 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004866 int rc = NO_ERROR;
4867 int32_t request_id;
4868 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004869 bool isVidBufRequested = false;
4870 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004871 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004872
4873 pthread_mutex_lock(&mMutex);
4874
4875 // Validate current state
4876 switch (mState) {
4877 case CONFIGURED:
4878 case STARTED:
4879 /* valid state */
4880 break;
4881
4882 case ERROR:
4883 pthread_mutex_unlock(&mMutex);
4884 handleCameraDeviceError();
4885 return -ENODEV;
4886
4887 default:
4888 LOGE("Invalid state %d", mState);
4889 pthread_mutex_unlock(&mMutex);
4890 return -ENODEV;
4891 }
4892
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004893 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004894 if (rc != NO_ERROR) {
4895 LOGE("incoming request is not valid");
4896 pthread_mutex_unlock(&mMutex);
4897 return rc;
4898 }
4899
4900 meta = request->settings;
4901
4902 // For first capture request, send capture intent, and
4903 // stream on all streams
4904 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004905 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004906 // send an unconfigure to the backend so that the isp
4907 // resources are deallocated
4908 if (!mFirstConfiguration) {
4909 cam_stream_size_info_t stream_config_info;
4910 int32_t hal_version = CAM_HAL_V3;
4911 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4912 stream_config_info.buffer_info.min_buffers =
4913 MIN_INFLIGHT_REQUESTS;
4914 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004915 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004916 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004917 clear_metadata_buffer(mParameters);
4918 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4919 CAM_INTF_PARM_HAL_VERSION, hal_version);
4920 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4921 CAM_INTF_META_STREAM_INFO, stream_config_info);
4922 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4923 mParameters);
4924 if (rc < 0) {
4925 LOGE("set_parms for unconfigure failed");
4926 pthread_mutex_unlock(&mMutex);
4927 return rc;
4928 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004929
Thierry Strudel3d639192016-09-09 11:52:26 -07004930 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004931 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004932 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004933 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004934 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004935 property_get("persist.camera.is_type", is_type_value, "4");
4936 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4937 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4938 property_get("persist.camera.is_type_preview", is_type_value, "4");
4939 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4940 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004941
4942 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4943 int32_t hal_version = CAM_HAL_V3;
4944 uint8_t captureIntent =
4945 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4946 mCaptureIntent = captureIntent;
4947 clear_metadata_buffer(mParameters);
4948 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4949 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4950 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004951 if (mFirstConfiguration) {
4952 // configure instant AEC
4953 // Instant AEC is a session based parameter and it is needed only
4954 // once per complete session after open camera.
4955 // i.e. This is set only once for the first capture request, after open camera.
4956 setInstantAEC(meta);
4957 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004958 uint8_t fwkVideoStabMode=0;
4959 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4960 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4961 }
4962
Xue Tuecac74e2017-04-17 13:58:15 -07004963 // If EIS setprop is enabled then only turn it on for video/preview
4964 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004965 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004966 int32_t vsMode;
4967 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4968 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4969 rc = BAD_VALUE;
4970 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004971 LOGD("setEis %d", setEis);
4972 bool eis3Supported = false;
4973 size_t count = IS_TYPE_MAX;
4974 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4975 for (size_t i = 0; i < count; i++) {
4976 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4977 eis3Supported = true;
4978 break;
4979 }
4980 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004981
4982 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004983 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004984 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4985 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004986 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4987 is_type = isTypePreview;
4988 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4989 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4990 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004991 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004992 } else {
4993 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004994 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004995 } else {
4996 is_type = IS_TYPE_NONE;
4997 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004998 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004999 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005000 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
5001 }
5002 }
5003
5004 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5005 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
5006
Thierry Strudel54dc9782017-02-15 12:12:10 -08005007 //Disable tintless only if the property is set to 0
5008 memset(prop, 0, sizeof(prop));
5009 property_get("persist.camera.tintless.enable", prop, "1");
5010 int32_t tintless_value = atoi(prop);
5011
Thierry Strudel3d639192016-09-09 11:52:26 -07005012 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5013 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08005014
Thierry Strudel3d639192016-09-09 11:52:26 -07005015 //Disable CDS for HFR mode or if DIS/EIS is on.
5016 //CDS is a session parameter in the backend/ISP, so need to be set/reset
5017 //after every configure_stream
5018 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
5019 (m_bIsVideo)) {
5020 int32_t cds = CAM_CDS_MODE_OFF;
5021 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5022 CAM_INTF_PARM_CDS_MODE, cds))
5023 LOGE("Failed to disable CDS for HFR mode");
5024
5025 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005026
5027 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
5028 uint8_t* use_av_timer = NULL;
5029
5030 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005031 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005032 use_av_timer = &m_debug_avtimer;
Binhao Lin09245482017-08-31 18:25:29 -07005033 m_bAVTimerEnabled = true;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005034 }
5035 else{
5036 use_av_timer =
5037 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005038 if (use_av_timer) {
Binhao Lin09245482017-08-31 18:25:29 -07005039 m_bAVTimerEnabled = true;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005040 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
5041 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005042 }
5043
5044 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
5045 rc = BAD_VALUE;
5046 }
5047 }
5048
Thierry Strudel3d639192016-09-09 11:52:26 -07005049 setMobicat();
5050
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005051 uint8_t nrMode = 0;
5052 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5053 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5054 }
5055
Thierry Strudel3d639192016-09-09 11:52:26 -07005056 /* Set fps and hfr mode while sending meta stream info so that sensor
5057 * can configure appropriate streaming mode */
5058 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005059 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5060 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005061 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5062 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005063 if (rc == NO_ERROR) {
5064 int32_t max_fps =
5065 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005066 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005067 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5068 }
5069 /* For HFR, more buffers are dequeued upfront to improve the performance */
5070 if (mBatchSize) {
5071 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5072 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5073 }
5074 }
5075 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005076 LOGE("setHalFpsRange failed");
5077 }
5078 }
5079 if (meta.exists(ANDROID_CONTROL_MODE)) {
5080 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5081 rc = extractSceneMode(meta, metaMode, mParameters);
5082 if (rc != NO_ERROR) {
5083 LOGE("extractSceneMode failed");
5084 }
5085 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005086 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005087
Thierry Strudel04e026f2016-10-10 11:27:36 -07005088 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5089 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5090 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5091 rc = setVideoHdrMode(mParameters, vhdr);
5092 if (rc != NO_ERROR) {
5093 LOGE("setVideoHDR is failed");
5094 }
5095 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005096
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005097 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005098 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005099 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005100 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5101 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5102 sensorModeFullFov)) {
5103 rc = BAD_VALUE;
5104 }
5105 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005106 //TODO: validate the arguments, HSV scenemode should have only the
5107 //advertised fps ranges
5108
5109 /*set the capture intent, hal version, tintless, stream info,
5110 *and disenable parameters to the backend*/
5111 LOGD("set_parms META_STREAM_INFO " );
5112 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005113 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5114 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005115 mStreamConfigInfo.type[i],
5116 mStreamConfigInfo.stream_sizes[i].width,
5117 mStreamConfigInfo.stream_sizes[i].height,
5118 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005119 mStreamConfigInfo.format[i],
5120 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005121 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005122
Thierry Strudel3d639192016-09-09 11:52:26 -07005123 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5124 mParameters);
5125 if (rc < 0) {
5126 LOGE("set_parms failed for hal version, stream info");
5127 }
5128
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005129 cam_sensor_mode_info_t sensorModeInfo = {};
5130 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005131 if (rc != NO_ERROR) {
5132 LOGE("Failed to get sensor output size");
5133 pthread_mutex_unlock(&mMutex);
5134 goto error_exit;
5135 }
5136
5137 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5138 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005139 sensorModeInfo.active_array_size.width,
5140 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005141
5142 /* Set batchmode before initializing channel. Since registerBuffer
5143 * internally initializes some of the channels, better set batchmode
5144 * even before first register buffer */
5145 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5146 it != mStreamInfo.end(); it++) {
5147 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5148 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5149 && mBatchSize) {
5150 rc = channel->setBatchSize(mBatchSize);
5151 //Disable per frame map unmap for HFR/batchmode case
5152 rc |= channel->setPerFrameMapUnmap(false);
5153 if (NO_ERROR != rc) {
5154 LOGE("Channel init failed %d", rc);
5155 pthread_mutex_unlock(&mMutex);
5156 goto error_exit;
5157 }
5158 }
5159 }
5160
5161 //First initialize all streams
5162 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5163 it != mStreamInfo.end(); it++) {
5164 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005165
5166 /* Initial value of NR mode is needed before stream on */
5167 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005168 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5169 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005170 setEis) {
5171 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5172 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5173 is_type = mStreamConfigInfo.is_type[i];
5174 break;
5175 }
5176 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005177 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005178 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005179 rc = channel->initialize(IS_TYPE_NONE);
5180 }
5181 if (NO_ERROR != rc) {
5182 LOGE("Channel initialization failed %d", rc);
5183 pthread_mutex_unlock(&mMutex);
5184 goto error_exit;
5185 }
5186 }
5187
5188 if (mRawDumpChannel) {
5189 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5190 if (rc != NO_ERROR) {
5191 LOGE("Error: Raw Dump Channel init failed");
5192 pthread_mutex_unlock(&mMutex);
5193 goto error_exit;
5194 }
5195 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005196 if (mHdrPlusRawSrcChannel) {
5197 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5198 if (rc != NO_ERROR) {
5199 LOGE("Error: HDR+ RAW Source Channel init failed");
5200 pthread_mutex_unlock(&mMutex);
5201 goto error_exit;
5202 }
5203 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005204 if (mSupportChannel) {
5205 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5206 if (rc < 0) {
5207 LOGE("Support channel initialization failed");
5208 pthread_mutex_unlock(&mMutex);
5209 goto error_exit;
5210 }
5211 }
5212 if (mAnalysisChannel) {
5213 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5214 if (rc < 0) {
5215 LOGE("Analysis channel initialization failed");
5216 pthread_mutex_unlock(&mMutex);
5217 goto error_exit;
5218 }
5219 }
5220 if (mDummyBatchChannel) {
5221 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5222 if (rc < 0) {
5223 LOGE("mDummyBatchChannel setBatchSize failed");
5224 pthread_mutex_unlock(&mMutex);
5225 goto error_exit;
5226 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005227 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005228 if (rc < 0) {
5229 LOGE("mDummyBatchChannel initialization failed");
5230 pthread_mutex_unlock(&mMutex);
5231 goto error_exit;
5232 }
5233 }
5234
5235 // Set bundle info
5236 rc = setBundleInfo();
5237 if (rc < 0) {
5238 LOGE("setBundleInfo failed %d", rc);
5239 pthread_mutex_unlock(&mMutex);
5240 goto error_exit;
5241 }
5242
5243 //update settings from app here
5244 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5245 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5246 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5247 }
5248 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5249 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5250 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5251 }
5252 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5253 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5254 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5255
5256 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5257 (mLinkedCameraId != mCameraId) ) {
5258 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5259 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005260 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005261 goto error_exit;
5262 }
5263 }
5264
5265 // add bundle related cameras
5266 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5267 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005268 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5269 &m_pDualCamCmdPtr->bundle_info;
5270 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005271 if (mIsDeviceLinked)
5272 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5273 else
5274 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5275
5276 pthread_mutex_lock(&gCamLock);
5277
5278 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5279 LOGE("Dualcam: Invalid Session Id ");
5280 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005281 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005282 goto error_exit;
5283 }
5284
5285 if (mIsMainCamera == 1) {
5286 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5287 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005288 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005289 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005290 // related session id should be session id of linked session
5291 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5292 } else {
5293 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5294 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005295 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005296 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005297 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5298 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005299 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005300 pthread_mutex_unlock(&gCamLock);
5301
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005302 rc = mCameraHandle->ops->set_dual_cam_cmd(
5303 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005304 if (rc < 0) {
5305 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005306 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005307 goto error_exit;
5308 }
5309 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005310 goto no_error;
5311error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005312 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005313 return rc;
5314no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005315 mWokenUpByDaemon = false;
5316 mPendingLiveRequest = 0;
5317 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005318 }
5319
5320 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005321 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005322
5323 if (mFlushPerf) {
5324 //we cannot accept any requests during flush
5325 LOGE("process_capture_request cannot proceed during flush");
5326 pthread_mutex_unlock(&mMutex);
5327 return NO_ERROR; //should return an error
5328 }
5329
5330 if (meta.exists(ANDROID_REQUEST_ID)) {
5331 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5332 mCurrentRequestId = request_id;
5333 LOGD("Received request with id: %d", request_id);
5334 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5335 LOGE("Unable to find request id field, \
5336 & no previous id available");
5337 pthread_mutex_unlock(&mMutex);
5338 return NAME_NOT_FOUND;
5339 } else {
5340 LOGD("Re-using old request id");
5341 request_id = mCurrentRequestId;
5342 }
5343
5344 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5345 request->num_output_buffers,
5346 request->input_buffer,
5347 frameNumber);
5348 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005349 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005350 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005351 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005352 uint32_t snapshotStreamId = 0;
5353 for (size_t i = 0; i < request->num_output_buffers; i++) {
5354 const camera3_stream_buffer_t& output = request->output_buffers[i];
5355 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5356
Emilian Peev7650c122017-01-19 08:24:33 -08005357 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5358 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005359 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005360 blob_request = 1;
5361 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5362 }
5363
5364 if (output.acquire_fence != -1) {
5365 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5366 close(output.acquire_fence);
5367 if (rc != OK) {
5368 LOGE("sync wait failed %d", rc);
5369 pthread_mutex_unlock(&mMutex);
5370 return rc;
5371 }
5372 }
5373
Emilian Peev0f3c3162017-03-15 12:57:46 +00005374 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5375 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005376 depthRequestPresent = true;
5377 continue;
5378 }
5379
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005380 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005381 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005382
5383 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5384 isVidBufRequested = true;
5385 }
5386 }
5387
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005388 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5389 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5390 itr++) {
5391 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5392 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5393 channel->getStreamID(channel->getStreamTypeMask());
5394
5395 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5396 isVidBufRequested = true;
5397 }
5398 }
5399
Thierry Strudel3d639192016-09-09 11:52:26 -07005400 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005401 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005402 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005403 }
5404 if (blob_request && mRawDumpChannel) {
5405 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005406 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005407 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005408 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005409 }
5410
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005411 {
5412 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5413 // Request a RAW buffer if
5414 // 1. mHdrPlusRawSrcChannel is valid.
5415 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5416 // 3. There is no pending HDR+ request.
5417 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5418 mHdrPlusPendingRequests.size() == 0) {
5419 streamsArray.stream_request[streamsArray.num_streams].streamID =
5420 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5421 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5422 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005423 }
5424
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005425 //extract capture intent
5426 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5427 mCaptureIntent =
5428 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5429 }
5430
5431 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5432 mCacMode =
5433 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5434 }
5435
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005436 uint8_t requestedLensShadingMapMode;
5437 // Get the shading map mode.
5438 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5439 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5440 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5441 } else {
5442 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5443 }
5444
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005445 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005446 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005447
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005448 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005449 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005450 // If this request has a still capture intent, try to submit an HDR+ request.
5451 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5452 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5453 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5454 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005455 }
5456
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005457 if (hdrPlusRequest) {
5458 // For a HDR+ request, just set the frame parameters.
5459 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5460 if (rc < 0) {
5461 LOGE("fail to set frame parameters");
5462 pthread_mutex_unlock(&mMutex);
5463 return rc;
5464 }
5465 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005466 /* Parse the settings:
5467 * - For every request in NORMAL MODE
5468 * - For every request in HFR mode during preview only case
5469 * - For first request of every batch in HFR mode during video
5470 * recording. In batchmode the same settings except frame number is
5471 * repeated in each request of the batch.
5472 */
5473 if (!mBatchSize ||
5474 (mBatchSize && !isVidBufRequested) ||
5475 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005476 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005477 if (rc < 0) {
5478 LOGE("fail to set frame parameters");
5479 pthread_mutex_unlock(&mMutex);
5480 return rc;
5481 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005482
5483 {
5484 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5485 // will be reported in result metadata.
5486 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5487 if (mHdrPlusModeEnabled) {
5488 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5489 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5490 }
5491 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005492 }
5493 /* For batchMode HFR, setFrameParameters is not called for every
5494 * request. But only frame number of the latest request is parsed.
5495 * Keep track of first and last frame numbers in a batch so that
5496 * metadata for the frame numbers of batch can be duplicated in
5497 * handleBatchMetadta */
5498 if (mBatchSize) {
5499 if (!mToBeQueuedVidBufs) {
5500 //start of the batch
5501 mFirstFrameNumberInBatch = request->frame_number;
5502 }
5503 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5504 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5505 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005506 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005507 return BAD_VALUE;
5508 }
5509 }
5510 if (mNeedSensorRestart) {
5511 /* Unlock the mutex as restartSensor waits on the channels to be
5512 * stopped, which in turn calls stream callback functions -
5513 * handleBufferWithLock and handleMetadataWithLock */
5514 pthread_mutex_unlock(&mMutex);
5515 rc = dynamicUpdateMetaStreamInfo();
5516 if (rc != NO_ERROR) {
5517 LOGE("Restarting the sensor failed");
5518 return BAD_VALUE;
5519 }
5520 mNeedSensorRestart = false;
5521 pthread_mutex_lock(&mMutex);
5522 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005523 if(mResetInstantAEC) {
5524 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5525 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5526 mResetInstantAEC = false;
5527 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005528 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005529 if (request->input_buffer->acquire_fence != -1) {
5530 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5531 close(request->input_buffer->acquire_fence);
5532 if (rc != OK) {
5533 LOGE("input buffer sync wait failed %d", rc);
5534 pthread_mutex_unlock(&mMutex);
5535 return rc;
5536 }
5537 }
5538 }
5539
5540 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5541 mLastCustIntentFrmNum = frameNumber;
5542 }
5543 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005544 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005545 pendingRequestIterator latestRequest;
5546 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005547 pendingRequest.num_buffers = depthRequestPresent ?
5548 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005549 pendingRequest.request_id = request_id;
5550 pendingRequest.blob_request = blob_request;
5551 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005552 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005553 if (request->input_buffer) {
5554 pendingRequest.input_buffer =
5555 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5556 *(pendingRequest.input_buffer) = *(request->input_buffer);
5557 pInputBuffer = pendingRequest.input_buffer;
5558 } else {
5559 pendingRequest.input_buffer = NULL;
5560 pInputBuffer = NULL;
5561 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005562 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005563
5564 pendingRequest.pipeline_depth = 0;
5565 pendingRequest.partial_result_cnt = 0;
5566 extractJpegMetadata(mCurJpegMeta, request);
5567 pendingRequest.jpegMetadata = mCurJpegMeta;
5568 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005569 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005570 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
Shuzhen Wang77b049a2017-08-30 12:24:36 -07005571 pendingRequest.hybrid_ae_enable =
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005572 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5573 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005574
Samuel Ha68ba5172016-12-15 18:41:12 -08005575 /* DevCamDebug metadata processCaptureRequest */
5576 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5577 mDevCamDebugMetaEnable =
5578 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5579 }
5580 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5581 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005582
5583 //extract CAC info
5584 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5585 mCacMode =
5586 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5587 }
5588 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005589 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005590 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5591 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005592
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005593 // extract enableZsl info
5594 if (gExposeEnableZslKey) {
5595 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5596 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5597 mZslEnabled = pendingRequest.enableZsl;
5598 } else {
5599 pendingRequest.enableZsl = mZslEnabled;
5600 }
5601 }
5602
Thierry Strudel3d639192016-09-09 11:52:26 -07005603 PendingBuffersInRequest bufsForCurRequest;
5604 bufsForCurRequest.frame_number = frameNumber;
5605 // Mark current timestamp for the new request
5606 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Binhao Lin09245482017-08-31 18:25:29 -07005607 bufsForCurRequest.av_timestamp = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005608 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005609
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005610 if (hdrPlusRequest) {
5611 // Save settings for this request.
5612 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5613 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5614
5615 // Add to pending HDR+ request queue.
5616 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5617 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5618
5619 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5620 }
5621
Thierry Strudel3d639192016-09-09 11:52:26 -07005622 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005623 if ((request->output_buffers[i].stream->data_space ==
5624 HAL_DATASPACE_DEPTH) &&
5625 (HAL_PIXEL_FORMAT_BLOB ==
5626 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005627 continue;
5628 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005629 RequestedBufferInfo requestedBuf;
5630 memset(&requestedBuf, 0, sizeof(requestedBuf));
5631 requestedBuf.stream = request->output_buffers[i].stream;
5632 requestedBuf.buffer = NULL;
5633 pendingRequest.buffers.push_back(requestedBuf);
5634
5635 // Add to buffer handle the pending buffers list
5636 PendingBufferInfo bufferInfo;
5637 bufferInfo.buffer = request->output_buffers[i].buffer;
5638 bufferInfo.stream = request->output_buffers[i].stream;
5639 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5640 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5641 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5642 frameNumber, bufferInfo.buffer,
5643 channel->getStreamTypeMask(), bufferInfo.stream->format);
5644 }
5645 // Add this request packet into mPendingBuffersMap
5646 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5647 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5648 mPendingBuffersMap.get_num_overall_buffers());
5649
5650 latestRequest = mPendingRequestsList.insert(
5651 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005652
5653 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5654 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005655 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005656 for (size_t i = 0; i < request->num_output_buffers; i++) {
5657 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5658 }
5659
Thierry Strudel3d639192016-09-09 11:52:26 -07005660 if(mFlush) {
5661 LOGI("mFlush is true");
5662 pthread_mutex_unlock(&mMutex);
5663 return NO_ERROR;
5664 }
5665
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005666 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5667 // channel.
5668 if (!hdrPlusRequest) {
5669 int indexUsed;
5670 // Notify metadata channel we receive a request
5671 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005672
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005673 if(request->input_buffer != NULL){
5674 LOGD("Input request, frame_number %d", frameNumber);
5675 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5676 if (NO_ERROR != rc) {
5677 LOGE("fail to set reproc parameters");
5678 pthread_mutex_unlock(&mMutex);
5679 return rc;
5680 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005681 }
5682
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005683 // Call request on other streams
5684 uint32_t streams_need_metadata = 0;
5685 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5686 for (size_t i = 0; i < request->num_output_buffers; i++) {
5687 const camera3_stream_buffer_t& output = request->output_buffers[i];
5688 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5689
5690 if (channel == NULL) {
5691 LOGW("invalid channel pointer for stream");
5692 continue;
5693 }
5694
5695 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5696 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5697 output.buffer, request->input_buffer, frameNumber);
5698 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005699 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005700 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5701 if (rc < 0) {
5702 LOGE("Fail to request on picture channel");
5703 pthread_mutex_unlock(&mMutex);
5704 return rc;
5705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005706 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005707 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5708 assert(NULL != mDepthChannel);
5709 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005710
Emilian Peev7650c122017-01-19 08:24:33 -08005711 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5712 if (rc < 0) {
5713 LOGE("Fail to map on depth buffer");
5714 pthread_mutex_unlock(&mMutex);
5715 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005716 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005717 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005718 } else {
5719 LOGD("snapshot request with buffer %p, frame_number %d",
5720 output.buffer, frameNumber);
5721 if (!request->settings) {
5722 rc = channel->request(output.buffer, frameNumber,
5723 NULL, mPrevParameters, indexUsed);
5724 } else {
5725 rc = channel->request(output.buffer, frameNumber,
5726 NULL, mParameters, indexUsed);
5727 }
5728 if (rc < 0) {
5729 LOGE("Fail to request on picture channel");
5730 pthread_mutex_unlock(&mMutex);
5731 return rc;
5732 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005733
Emilian Peev7650c122017-01-19 08:24:33 -08005734 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5735 uint32_t j = 0;
5736 for (j = 0; j < streamsArray.num_streams; j++) {
5737 if (streamsArray.stream_request[j].streamID == streamId) {
5738 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5739 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5740 else
5741 streamsArray.stream_request[j].buf_index = indexUsed;
5742 break;
5743 }
5744 }
5745 if (j == streamsArray.num_streams) {
5746 LOGE("Did not find matching stream to update index");
5747 assert(0);
5748 }
5749
5750 pendingBufferIter->need_metadata = true;
5751 streams_need_metadata++;
5752 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005753 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005754 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5755 bool needMetadata = false;
5756 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5757 rc = yuvChannel->request(output.buffer, frameNumber,
5758 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5759 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005760 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005761 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005762 pthread_mutex_unlock(&mMutex);
5763 return rc;
5764 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005765
5766 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5767 uint32_t j = 0;
5768 for (j = 0; j < streamsArray.num_streams; j++) {
5769 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005770 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5771 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5772 else
5773 streamsArray.stream_request[j].buf_index = indexUsed;
5774 break;
5775 }
5776 }
5777 if (j == streamsArray.num_streams) {
5778 LOGE("Did not find matching stream to update index");
5779 assert(0);
5780 }
5781
5782 pendingBufferIter->need_metadata = needMetadata;
5783 if (needMetadata)
5784 streams_need_metadata += 1;
5785 LOGD("calling YUV channel request, need_metadata is %d",
5786 needMetadata);
5787 } else {
5788 LOGD("request with buffer %p, frame_number %d",
5789 output.buffer, frameNumber);
5790
5791 rc = channel->request(output.buffer, frameNumber, indexUsed);
5792
5793 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5794 uint32_t j = 0;
5795 for (j = 0; j < streamsArray.num_streams; j++) {
5796 if (streamsArray.stream_request[j].streamID == streamId) {
5797 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5798 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5799 else
5800 streamsArray.stream_request[j].buf_index = indexUsed;
5801 break;
5802 }
5803 }
5804 if (j == streamsArray.num_streams) {
5805 LOGE("Did not find matching stream to update index");
5806 assert(0);
5807 }
5808
5809 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5810 && mBatchSize) {
5811 mToBeQueuedVidBufs++;
5812 if (mToBeQueuedVidBufs == mBatchSize) {
5813 channel->queueBatchBuf();
5814 }
5815 }
5816 if (rc < 0) {
5817 LOGE("request failed");
5818 pthread_mutex_unlock(&mMutex);
5819 return rc;
5820 }
5821 }
5822 pendingBufferIter++;
5823 }
5824
5825 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5826 itr++) {
5827 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5828
5829 if (channel == NULL) {
5830 LOGE("invalid channel pointer for stream");
5831 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005832 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005833 return BAD_VALUE;
5834 }
5835
5836 InternalRequest requestedStream;
5837 requestedStream = (*itr);
5838
5839
5840 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5841 LOGD("snapshot request internally input buffer %p, frame_number %d",
5842 request->input_buffer, frameNumber);
5843 if(request->input_buffer != NULL){
5844 rc = channel->request(NULL, frameNumber,
5845 pInputBuffer, &mReprocMeta, indexUsed, true,
5846 requestedStream.meteringOnly);
5847 if (rc < 0) {
5848 LOGE("Fail to request on picture channel");
5849 pthread_mutex_unlock(&mMutex);
5850 return rc;
5851 }
5852 } else {
5853 LOGD("snapshot request with frame_number %d", frameNumber);
5854 if (!request->settings) {
5855 rc = channel->request(NULL, frameNumber,
5856 NULL, mPrevParameters, indexUsed, true,
5857 requestedStream.meteringOnly);
5858 } else {
5859 rc = channel->request(NULL, frameNumber,
5860 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5861 }
5862 if (rc < 0) {
5863 LOGE("Fail to request on picture channel");
5864 pthread_mutex_unlock(&mMutex);
5865 return rc;
5866 }
5867
5868 if ((*itr).meteringOnly != 1) {
5869 requestedStream.need_metadata = 1;
5870 streams_need_metadata++;
5871 }
5872 }
5873
5874 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5875 uint32_t j = 0;
5876 for (j = 0; j < streamsArray.num_streams; j++) {
5877 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005878 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5879 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5880 else
5881 streamsArray.stream_request[j].buf_index = indexUsed;
5882 break;
5883 }
5884 }
5885 if (j == streamsArray.num_streams) {
5886 LOGE("Did not find matching stream to update index");
5887 assert(0);
5888 }
5889
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005890 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005891 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005892 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005893 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005894 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005895 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005896 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005897 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005898
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005899 //If 2 streams have need_metadata set to true, fail the request, unless
5900 //we copy/reference count the metadata buffer
5901 if (streams_need_metadata > 1) {
5902 LOGE("not supporting request in which two streams requires"
5903 " 2 HAL metadata for reprocessing");
5904 pthread_mutex_unlock(&mMutex);
5905 return -EINVAL;
5906 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005907
Emilian Peev656e4fa2017-06-02 16:47:04 +01005908 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5909 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5910 if (depthRequestPresent && mDepthChannel) {
5911 if (request->settings) {
5912 camera_metadata_ro_entry entry;
5913 if (find_camera_metadata_ro_entry(request->settings,
5914 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5915 if (entry.data.u8[0]) {
5916 pdafEnable = CAM_PD_DATA_ENABLED;
5917 } else {
5918 pdafEnable = CAM_PD_DATA_SKIP;
5919 }
5920 mDepthCloudMode = pdafEnable;
5921 } else {
5922 pdafEnable = mDepthCloudMode;
5923 }
5924 } else {
5925 pdafEnable = mDepthCloudMode;
5926 }
5927 }
5928
Emilian Peev7650c122017-01-19 08:24:33 -08005929 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5930 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5931 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5932 pthread_mutex_unlock(&mMutex);
5933 return BAD_VALUE;
5934 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005935
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005936 if (request->input_buffer == NULL) {
5937 /* Set the parameters to backend:
5938 * - For every request in NORMAL MODE
5939 * - For every request in HFR mode during preview only case
5940 * - Once every batch in HFR mode during video recording
5941 */
5942 if (!mBatchSize ||
5943 (mBatchSize && !isVidBufRequested) ||
5944 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5945 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5946 mBatchSize, isVidBufRequested,
5947 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005948
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005949 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5950 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5951 uint32_t m = 0;
5952 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5953 if (streamsArray.stream_request[k].streamID ==
5954 mBatchedStreamsArray.stream_request[m].streamID)
5955 break;
5956 }
5957 if (m == mBatchedStreamsArray.num_streams) {
5958 mBatchedStreamsArray.stream_request\
5959 [mBatchedStreamsArray.num_streams].streamID =
5960 streamsArray.stream_request[k].streamID;
5961 mBatchedStreamsArray.stream_request\
5962 [mBatchedStreamsArray.num_streams].buf_index =
5963 streamsArray.stream_request[k].buf_index;
5964 mBatchedStreamsArray.num_streams =
5965 mBatchedStreamsArray.num_streams + 1;
5966 }
5967 }
5968 streamsArray = mBatchedStreamsArray;
5969 }
5970 /* Update stream id of all the requested buffers */
5971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5972 streamsArray)) {
5973 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005974 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005975 return BAD_VALUE;
5976 }
5977
5978 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5979 mParameters);
5980 if (rc < 0) {
5981 LOGE("set_parms failed");
5982 }
5983 /* reset to zero coz, the batch is queued */
5984 mToBeQueuedVidBufs = 0;
5985 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5986 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5987 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005988 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5989 uint32_t m = 0;
5990 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5991 if (streamsArray.stream_request[k].streamID ==
5992 mBatchedStreamsArray.stream_request[m].streamID)
5993 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005994 }
5995 if (m == mBatchedStreamsArray.num_streams) {
5996 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5997 streamID = streamsArray.stream_request[k].streamID;
5998 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5999 buf_index = streamsArray.stream_request[k].buf_index;
6000 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
6001 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08006002 }
6003 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006004 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006005
6006 // Start all streams after the first setting is sent, so that the
6007 // setting can be applied sooner: (0 + apply_delay)th frame.
6008 if (mState == CONFIGURED && mChannelHandle) {
6009 //Then start them.
6010 LOGH("Start META Channel");
6011 rc = mMetadataChannel->start();
6012 if (rc < 0) {
6013 LOGE("META channel start failed");
6014 pthread_mutex_unlock(&mMutex);
6015 return rc;
6016 }
6017
6018 if (mAnalysisChannel) {
6019 rc = mAnalysisChannel->start();
6020 if (rc < 0) {
6021 LOGE("Analysis channel start failed");
6022 mMetadataChannel->stop();
6023 pthread_mutex_unlock(&mMutex);
6024 return rc;
6025 }
6026 }
6027
6028 if (mSupportChannel) {
6029 rc = mSupportChannel->start();
6030 if (rc < 0) {
6031 LOGE("Support channel start failed");
6032 mMetadataChannel->stop();
6033 /* Although support and analysis are mutually exclusive today
6034 adding it in anycase for future proofing */
6035 if (mAnalysisChannel) {
6036 mAnalysisChannel->stop();
6037 }
6038 pthread_mutex_unlock(&mMutex);
6039 return rc;
6040 }
6041 }
6042 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6043 it != mStreamInfo.end(); it++) {
6044 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
6045 LOGH("Start Processing Channel mask=%d",
6046 channel->getStreamTypeMask());
6047 rc = channel->start();
6048 if (rc < 0) {
6049 LOGE("channel start failed");
6050 pthread_mutex_unlock(&mMutex);
6051 return rc;
6052 }
6053 }
6054
6055 if (mRawDumpChannel) {
6056 LOGD("Starting raw dump stream");
6057 rc = mRawDumpChannel->start();
6058 if (rc != NO_ERROR) {
6059 LOGE("Error Starting Raw Dump Channel");
6060 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6061 it != mStreamInfo.end(); it++) {
6062 QCamera3Channel *channel =
6063 (QCamera3Channel *)(*it)->stream->priv;
6064 LOGH("Stopping Processing Channel mask=%d",
6065 channel->getStreamTypeMask());
6066 channel->stop();
6067 }
6068 if (mSupportChannel)
6069 mSupportChannel->stop();
6070 if (mAnalysisChannel) {
6071 mAnalysisChannel->stop();
6072 }
6073 mMetadataChannel->stop();
6074 pthread_mutex_unlock(&mMutex);
6075 return rc;
6076 }
6077 }
6078
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006079 // Configure modules for stream on.
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006080 rc = startChannelLocked();
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006081 if (rc != NO_ERROR) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006082 LOGE("startChannelLocked failed %d", rc);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006083 pthread_mutex_unlock(&mMutex);
6084 return rc;
6085 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006086 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006087 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006088 }
6089
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -07006090 // Enable HDR+ mode for the first PREVIEW_INTENT request that doesn't disable HDR+.
Chenjie Luo4a761802017-06-13 17:35:54 +00006091 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006092 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006093 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006094 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6095 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6096 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -07006097 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
6098 meta.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
6099 meta.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 0) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006100
6101 if (isSessionHdrPlusModeCompatible()) {
6102 rc = enableHdrPlusModeLocked();
6103 if (rc != OK) {
6104 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6105 pthread_mutex_unlock(&mMutex);
6106 return rc;
6107 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006108 }
6109
6110 mFirstPreviewIntentSeen = true;
6111 }
6112 }
6113
Thierry Strudel3d639192016-09-09 11:52:26 -07006114 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6115
6116 mState = STARTED;
6117 // Added a timed condition wait
6118 struct timespec ts;
6119 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006120 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006121 if (rc < 0) {
6122 isValidTimeout = 0;
6123 LOGE("Error reading the real time clock!!");
6124 }
6125 else {
6126 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006127 int64_t timeout = 5;
6128 {
6129 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6130 // If there is a pending HDR+ request, the following requests may be blocked until the
6131 // HDR+ request is done. So allow a longer timeout.
6132 if (mHdrPlusPendingRequests.size() > 0) {
6133 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6134 }
6135 }
6136 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006137 }
6138 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006139 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006140 (mState != ERROR) && (mState != DEINIT)) {
6141 if (!isValidTimeout) {
6142 LOGD("Blocking on conditional wait");
6143 pthread_cond_wait(&mRequestCond, &mMutex);
6144 }
6145 else {
6146 LOGD("Blocking on timed conditional wait");
6147 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6148 if (rc == ETIMEDOUT) {
6149 rc = -ENODEV;
6150 LOGE("Unblocked on timeout!!!!");
6151 break;
6152 }
6153 }
6154 LOGD("Unblocked");
6155 if (mWokenUpByDaemon) {
6156 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006157 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006158 break;
6159 }
6160 }
6161 pthread_mutex_unlock(&mMutex);
6162
6163 return rc;
6164}
6165
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006166int32_t QCamera3HardwareInterface::startChannelLocked()
6167{
6168 // Configure modules for stream on.
6169 int32_t rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6170 mChannelHandle, /*start_sensor_streaming*/false);
6171 if (rc != NO_ERROR) {
6172 LOGE("start_channel failed %d", rc);
6173 return rc;
6174 }
6175
6176 {
6177 // Configure Easel for stream on.
6178 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6179
6180 // Now that sensor mode should have been selected, get the selected sensor mode
6181 // info.
6182 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6183 getCurrentSensorModeInfo(mSensorModeInfo);
6184
6185 if (EaselManagerClientOpened) {
6186 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
6187 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6188 /*enableCapture*/true);
6189 if (rc != OK) {
6190 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6191 mCameraId, mSensorModeInfo.op_pixel_clk);
6192 return rc;
6193 }
6194 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
6195 mEaselMipiStarted = true;
6196 }
6197 }
6198
6199 // Start sensor streaming.
6200 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6201 mChannelHandle);
6202 if (rc != NO_ERROR) {
6203 LOGE("start_sensor_stream_on failed %d", rc);
6204 return rc;
6205 }
6206
6207 return 0;
6208}
6209
6210void QCamera3HardwareInterface::stopChannelLocked(bool stopChannelImmediately)
6211{
6212 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6213 mChannelHandle, stopChannelImmediately);
6214
6215 {
6216 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6217 if (EaselManagerClientOpened && mEaselMipiStarted) {
6218 int32_t rc = gEaselManagerClient->stopMipi(mCameraId);
6219 if (rc != 0) {
6220 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
6221 }
6222 mEaselMipiStarted = false;
6223 }
6224 }
6225}
6226
Thierry Strudel3d639192016-09-09 11:52:26 -07006227/*===========================================================================
6228 * FUNCTION : dump
6229 *
6230 * DESCRIPTION:
6231 *
6232 * PARAMETERS :
6233 *
6234 *
6235 * RETURN :
6236 *==========================================================================*/
6237void QCamera3HardwareInterface::dump(int fd)
6238{
6239 pthread_mutex_lock(&mMutex);
6240 dprintf(fd, "\n Camera HAL3 information Begin \n");
6241
6242 dprintf(fd, "\nNumber of pending requests: %zu \n",
6243 mPendingRequestsList.size());
6244 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6245 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6246 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6247 for(pendingRequestIterator i = mPendingRequestsList.begin();
6248 i != mPendingRequestsList.end(); i++) {
6249 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6250 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6251 i->input_buffer);
6252 }
6253 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6254 mPendingBuffersMap.get_num_overall_buffers());
6255 dprintf(fd, "-------+------------------\n");
6256 dprintf(fd, " Frame | Stream type mask \n");
6257 dprintf(fd, "-------+------------------\n");
6258 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6259 for(auto &j : req.mPendingBufferList) {
6260 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6261 dprintf(fd, " %5d | %11d \n",
6262 req.frame_number, channel->getStreamTypeMask());
6263 }
6264 }
6265 dprintf(fd, "-------+------------------\n");
6266
6267 dprintf(fd, "\nPending frame drop list: %zu\n",
6268 mPendingFrameDropList.size());
6269 dprintf(fd, "-------+-----------\n");
6270 dprintf(fd, " Frame | Stream ID \n");
6271 dprintf(fd, "-------+-----------\n");
6272 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6273 i != mPendingFrameDropList.end(); i++) {
6274 dprintf(fd, " %5d | %9d \n",
6275 i->frame_number, i->stream_ID);
6276 }
6277 dprintf(fd, "-------+-----------\n");
6278
6279 dprintf(fd, "\n Camera HAL3 information End \n");
6280
6281 /* use dumpsys media.camera as trigger to send update debug level event */
6282 mUpdateDebugLevel = true;
6283 pthread_mutex_unlock(&mMutex);
6284 return;
6285}
6286
6287/*===========================================================================
6288 * FUNCTION : flush
6289 *
6290 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6291 * conditionally restarts channels
6292 *
6293 * PARAMETERS :
6294 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006295 * @ stopChannelImmediately: stop the channel immediately. This should be used
6296 * when device encountered an error and MIPI may has
6297 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006298 *
6299 * RETURN :
6300 * 0 on success
6301 * Error code on failure
6302 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006303int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006304{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006305 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006306 int32_t rc = NO_ERROR;
6307
6308 LOGD("Unblocking Process Capture Request");
6309 pthread_mutex_lock(&mMutex);
6310 mFlush = true;
6311 pthread_mutex_unlock(&mMutex);
6312
Chien-Yu Chen11c8edc2017-09-11 20:54:24 -07006313 // Disable HDR+ if it's enabled;
6314 {
6315 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6316 finishHdrPlusClientOpeningLocked(l);
6317 disableHdrPlusModeLocked();
6318 }
6319
Thierry Strudel3d639192016-09-09 11:52:26 -07006320 rc = stopAllChannels();
6321 // unlink of dualcam
6322 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006323 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6324 &m_pDualCamCmdPtr->bundle_info;
6325 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006326 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6327 pthread_mutex_lock(&gCamLock);
6328
6329 if (mIsMainCamera == 1) {
6330 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6331 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006332 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006333 // related session id should be session id of linked session
6334 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6335 } else {
6336 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6337 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006338 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006339 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6340 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006341 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006342 pthread_mutex_unlock(&gCamLock);
6343
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006344 rc = mCameraHandle->ops->set_dual_cam_cmd(
6345 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006346 if (rc < 0) {
6347 LOGE("Dualcam: Unlink failed, but still proceed to close");
6348 }
6349 }
6350
6351 if (rc < 0) {
6352 LOGE("stopAllChannels failed");
6353 return rc;
6354 }
6355 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006356 stopChannelLocked(stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006357 }
6358
6359 // Reset bundle info
6360 rc = setBundleInfo();
6361 if (rc < 0) {
6362 LOGE("setBundleInfo failed %d", rc);
6363 return rc;
6364 }
6365
6366 // Mutex Lock
6367 pthread_mutex_lock(&mMutex);
6368
6369 // Unblock process_capture_request
6370 mPendingLiveRequest = 0;
6371 pthread_cond_signal(&mRequestCond);
6372
6373 rc = notifyErrorForPendingRequests();
6374 if (rc < 0) {
6375 LOGE("notifyErrorForPendingRequests failed");
6376 pthread_mutex_unlock(&mMutex);
6377 return rc;
6378 }
6379
6380 mFlush = false;
6381
6382 // Start the Streams/Channels
6383 if (restartChannels) {
6384 rc = startAllChannels();
6385 if (rc < 0) {
6386 LOGE("startAllChannels failed");
6387 pthread_mutex_unlock(&mMutex);
6388 return rc;
6389 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006390 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006391 // Configure modules for stream on.
6392 rc = startChannelLocked();
Thierry Strudel2896d122017-02-23 19:18:03 -08006393 if (rc < 0) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006394 LOGE("startChannelLocked failed");
Thierry Strudel2896d122017-02-23 19:18:03 -08006395 pthread_mutex_unlock(&mMutex);
6396 return rc;
6397 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006398 }
6399 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006400 pthread_mutex_unlock(&mMutex);
6401
6402 return 0;
6403}
6404
6405/*===========================================================================
6406 * FUNCTION : flushPerf
6407 *
6408 * DESCRIPTION: This is the performance optimization version of flush that does
6409 * not use stream off, rather flushes the system
6410 *
6411 * PARAMETERS :
6412 *
6413 *
6414 * RETURN : 0 : success
6415 * -EINVAL: input is malformed (device is not valid)
6416 * -ENODEV: if the device has encountered a serious error
6417 *==========================================================================*/
6418int QCamera3HardwareInterface::flushPerf()
6419{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006420 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006421 int32_t rc = 0;
6422 struct timespec timeout;
6423 bool timed_wait = false;
6424
6425 pthread_mutex_lock(&mMutex);
6426 mFlushPerf = true;
6427 mPendingBuffersMap.numPendingBufsAtFlush =
6428 mPendingBuffersMap.get_num_overall_buffers();
6429 LOGD("Calling flush. Wait for %d buffers to return",
6430 mPendingBuffersMap.numPendingBufsAtFlush);
6431
6432 /* send the flush event to the backend */
6433 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6434 if (rc < 0) {
6435 LOGE("Error in flush: IOCTL failure");
6436 mFlushPerf = false;
6437 pthread_mutex_unlock(&mMutex);
6438 return -ENODEV;
6439 }
6440
6441 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6442 LOGD("No pending buffers in HAL, return flush");
6443 mFlushPerf = false;
6444 pthread_mutex_unlock(&mMutex);
6445 return rc;
6446 }
6447
6448 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006449 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006450 if (rc < 0) {
6451 LOGE("Error reading the real time clock, cannot use timed wait");
6452 } else {
6453 timeout.tv_sec += FLUSH_TIMEOUT;
6454 timed_wait = true;
6455 }
6456
6457 //Block on conditional variable
6458 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6459 LOGD("Waiting on mBuffersCond");
6460 if (!timed_wait) {
6461 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6462 if (rc != 0) {
6463 LOGE("pthread_cond_wait failed due to rc = %s",
6464 strerror(rc));
6465 break;
6466 }
6467 } else {
6468 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6469 if (rc != 0) {
6470 LOGE("pthread_cond_timedwait failed due to rc = %s",
6471 strerror(rc));
6472 break;
6473 }
6474 }
6475 }
6476 if (rc != 0) {
6477 mFlushPerf = false;
6478 pthread_mutex_unlock(&mMutex);
6479 return -ENODEV;
6480 }
6481
6482 LOGD("Received buffers, now safe to return them");
6483
6484 //make sure the channels handle flush
6485 //currently only required for the picture channel to release snapshot resources
6486 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6487 it != mStreamInfo.end(); it++) {
6488 QCamera3Channel *channel = (*it)->channel;
6489 if (channel) {
6490 rc = channel->flush();
6491 if (rc) {
6492 LOGE("Flushing the channels failed with error %d", rc);
6493 // even though the channel flush failed we need to continue and
6494 // return the buffers we have to the framework, however the return
6495 // value will be an error
6496 rc = -ENODEV;
6497 }
6498 }
6499 }
6500
6501 /* notify the frameworks and send errored results */
6502 rc = notifyErrorForPendingRequests();
6503 if (rc < 0) {
6504 LOGE("notifyErrorForPendingRequests failed");
6505 pthread_mutex_unlock(&mMutex);
6506 return rc;
6507 }
6508
6509 //unblock process_capture_request
6510 mPendingLiveRequest = 0;
6511 unblockRequestIfNecessary();
6512
6513 mFlushPerf = false;
6514 pthread_mutex_unlock(&mMutex);
6515 LOGD ("Flush Operation complete. rc = %d", rc);
6516 return rc;
6517}
6518
6519/*===========================================================================
6520 * FUNCTION : handleCameraDeviceError
6521 *
6522 * DESCRIPTION: This function calls internal flush and notifies the error to
6523 * framework and updates the state variable.
6524 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006525 * PARAMETERS :
6526 * @stopChannelImmediately : stop channels immediately without waiting for
6527 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006528 *
6529 * RETURN : NO_ERROR on Success
6530 * Error code on failure
6531 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006532int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006533{
6534 int32_t rc = NO_ERROR;
6535
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006536 {
6537 Mutex::Autolock lock(mFlushLock);
6538 pthread_mutex_lock(&mMutex);
6539 if (mState != ERROR) {
6540 //if mState != ERROR, nothing to be done
6541 pthread_mutex_unlock(&mMutex);
6542 return NO_ERROR;
6543 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006544 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006545
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006546 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006547 if (NO_ERROR != rc) {
6548 LOGE("internal flush to handle mState = ERROR failed");
6549 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006550
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006551 pthread_mutex_lock(&mMutex);
6552 mState = DEINIT;
6553 pthread_mutex_unlock(&mMutex);
6554 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006555
6556 camera3_notify_msg_t notify_msg;
6557 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6558 notify_msg.type = CAMERA3_MSG_ERROR;
6559 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6560 notify_msg.message.error.error_stream = NULL;
6561 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006562 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006563
6564 return rc;
6565}
6566
6567/*===========================================================================
6568 * FUNCTION : captureResultCb
6569 *
6570 * DESCRIPTION: Callback handler for all capture result
6571 * (streams, as well as metadata)
6572 *
6573 * PARAMETERS :
6574 * @metadata : metadata information
6575 * @buffer : actual gralloc buffer to be returned to frameworks.
6576 * NULL if metadata.
6577 *
6578 * RETURN : NONE
6579 *==========================================================================*/
6580void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6581 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6582{
6583 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006584 pthread_mutex_lock(&mMutex);
6585 uint8_t batchSize = mBatchSize;
6586 pthread_mutex_unlock(&mMutex);
6587 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006588 handleBatchMetadata(metadata_buf,
6589 true /* free_and_bufdone_meta_buf */);
6590 } else { /* mBatchSize = 0 */
6591 hdrPlusPerfLock(metadata_buf);
6592 pthread_mutex_lock(&mMutex);
6593 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006594 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006595 true /* last urgent frame of batch metadata */,
6596 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006597 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006598 pthread_mutex_unlock(&mMutex);
6599 }
6600 } else if (isInputBuffer) {
6601 pthread_mutex_lock(&mMutex);
6602 handleInputBufferWithLock(frame_number);
6603 pthread_mutex_unlock(&mMutex);
6604 } else {
6605 pthread_mutex_lock(&mMutex);
6606 handleBufferWithLock(buffer, frame_number);
6607 pthread_mutex_unlock(&mMutex);
6608 }
6609 return;
6610}
6611
6612/*===========================================================================
6613 * FUNCTION : getReprocessibleOutputStreamId
6614 *
6615 * DESCRIPTION: Get source output stream id for the input reprocess stream
6616 * based on size and format, which would be the largest
6617 * output stream if an input stream exists.
6618 *
6619 * PARAMETERS :
6620 * @id : return the stream id if found
6621 *
6622 * RETURN : int32_t type of status
6623 * NO_ERROR -- success
6624 * none-zero failure code
6625 *==========================================================================*/
6626int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6627{
6628 /* check if any output or bidirectional stream with the same size and format
6629 and return that stream */
6630 if ((mInputStreamInfo.dim.width > 0) &&
6631 (mInputStreamInfo.dim.height > 0)) {
6632 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6633 it != mStreamInfo.end(); it++) {
6634
6635 camera3_stream_t *stream = (*it)->stream;
6636 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6637 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6638 (stream->format == mInputStreamInfo.format)) {
6639 // Usage flag for an input stream and the source output stream
6640 // may be different.
6641 LOGD("Found reprocessible output stream! %p", *it);
6642 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6643 stream->usage, mInputStreamInfo.usage);
6644
6645 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6646 if (channel != NULL && channel->mStreams[0]) {
6647 id = channel->mStreams[0]->getMyServerID();
6648 return NO_ERROR;
6649 }
6650 }
6651 }
6652 } else {
6653 LOGD("No input stream, so no reprocessible output stream");
6654 }
6655 return NAME_NOT_FOUND;
6656}
6657
6658/*===========================================================================
6659 * FUNCTION : lookupFwkName
6660 *
6661 * DESCRIPTION: In case the enum is not same in fwk and backend
6662 * make sure the parameter is correctly propogated
6663 *
6664 * PARAMETERS :
6665 * @arr : map between the two enums
6666 * @len : len of the map
6667 * @hal_name : name of the hal_parm to map
6668 *
6669 * RETURN : int type of status
6670 * fwk_name -- success
6671 * none-zero failure code
6672 *==========================================================================*/
6673template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6674 size_t len, halType hal_name)
6675{
6676
6677 for (size_t i = 0; i < len; i++) {
6678 if (arr[i].hal_name == hal_name) {
6679 return arr[i].fwk_name;
6680 }
6681 }
6682
6683 /* Not able to find matching framework type is not necessarily
6684 * an error case. This happens when mm-camera supports more attributes
6685 * than the frameworks do */
6686 LOGH("Cannot find matching framework type");
6687 return NAME_NOT_FOUND;
6688}
6689
6690/*===========================================================================
6691 * FUNCTION : lookupHalName
6692 *
6693 * DESCRIPTION: In case the enum is not same in fwk and backend
6694 * make sure the parameter is correctly propogated
6695 *
6696 * PARAMETERS :
6697 * @arr : map between the two enums
6698 * @len : len of the map
6699 * @fwk_name : name of the hal_parm to map
6700 *
6701 * RETURN : int32_t type of status
6702 * hal_name -- success
6703 * none-zero failure code
6704 *==========================================================================*/
6705template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6706 size_t len, fwkType fwk_name)
6707{
6708 for (size_t i = 0; i < len; i++) {
6709 if (arr[i].fwk_name == fwk_name) {
6710 return arr[i].hal_name;
6711 }
6712 }
6713
6714 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6715 return NAME_NOT_FOUND;
6716}
6717
6718/*===========================================================================
6719 * FUNCTION : lookupProp
6720 *
6721 * DESCRIPTION: lookup a value by its name
6722 *
6723 * PARAMETERS :
6724 * @arr : map between the two enums
6725 * @len : size of the map
6726 * @name : name to be looked up
6727 *
6728 * RETURN : Value if found
6729 * CAM_CDS_MODE_MAX if not found
6730 *==========================================================================*/
6731template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6732 size_t len, const char *name)
6733{
6734 if (name) {
6735 for (size_t i = 0; i < len; i++) {
6736 if (!strcmp(arr[i].desc, name)) {
6737 return arr[i].val;
6738 }
6739 }
6740 }
6741 return CAM_CDS_MODE_MAX;
6742}
6743
6744/*===========================================================================
6745 *
6746 * DESCRIPTION:
6747 *
6748 * PARAMETERS :
6749 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006750 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006751 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006752 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6753 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006754 *
6755 * RETURN : camera_metadata_t*
6756 * metadata in a format specified by fwk
6757 *==========================================================================*/
6758camera_metadata_t*
6759QCamera3HardwareInterface::translateFromHalMetadata(
6760 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006761 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006762 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006763 bool lastMetadataInBatch,
6764 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006765{
6766 CameraMetadata camMetadata;
6767 camera_metadata_t *resultMetadata;
6768
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006769 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006770 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6771 * Timestamp is needed because it's used for shutter notify calculation.
6772 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006773 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006774 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006775 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006776 }
6777
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006778 if (pendingRequest.jpegMetadata.entryCount())
6779 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006780
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006781 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6782 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6783 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6784 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6785 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006786 if (mBatchSize == 0) {
6787 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006788 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006789 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006790
Samuel Ha68ba5172016-12-15 18:41:12 -08006791 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6792 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006793 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006794 // DevCamDebug metadata translateFromHalMetadata AF
6795 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6796 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6797 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6798 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6799 }
6800 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07006801 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006802 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6803 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6804 }
6805 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07006806 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006807 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6808 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6809 }
6810 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6811 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6812 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6813 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6814 }
6815 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6816 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6817 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6818 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6819 }
6820 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6821 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6822 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6823 *DevCamDebug_af_monitor_pdaf_target_pos;
6824 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6825 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6826 }
6827 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6828 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6829 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6830 *DevCamDebug_af_monitor_pdaf_confidence;
6831 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6832 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6833 }
6834 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6835 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6836 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6837 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6838 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6839 }
6840 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6841 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6842 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6843 *DevCamDebug_af_monitor_tof_target_pos;
6844 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6845 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6846 }
6847 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6848 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6849 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6850 *DevCamDebug_af_monitor_tof_confidence;
6851 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6852 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6853 }
6854 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6855 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6856 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6857 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6858 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6859 }
6860 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6861 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6862 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6863 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6864 &fwk_DevCamDebug_af_monitor_type_select, 1);
6865 }
6866 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6867 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6868 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6869 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6870 &fwk_DevCamDebug_af_monitor_refocus, 1);
6871 }
6872 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6873 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6874 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6875 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6876 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6877 }
6878 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6879 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6880 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6881 *DevCamDebug_af_search_pdaf_target_pos;
6882 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6883 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6884 }
6885 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6886 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6887 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6888 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6889 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6890 }
6891 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6892 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6893 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6894 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6895 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6896 }
6897 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6898 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6899 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6900 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6901 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6902 }
6903 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6904 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6905 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6906 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6907 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6908 }
6909 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6910 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6911 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6912 *DevCamDebug_af_search_tof_target_pos;
6913 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6914 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6915 }
6916 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6917 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6918 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6919 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6920 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6921 }
6922 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6923 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6924 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6925 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6926 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6927 }
6928 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6929 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6930 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6931 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6932 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6933 }
6934 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6935 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6936 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6937 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6938 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6939 }
6940 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6941 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6942 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6943 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6944 &fwk_DevCamDebug_af_search_type_select, 1);
6945 }
6946 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6947 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6948 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6949 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6950 &fwk_DevCamDebug_af_search_next_pos, 1);
6951 }
6952 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6953 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6954 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6955 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6956 &fwk_DevCamDebug_af_search_target_pos, 1);
6957 }
6958 // DevCamDebug metadata translateFromHalMetadata AEC
6959 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6960 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6961 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6962 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6963 }
6964 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6965 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6966 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6967 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6968 }
6969 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6970 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6971 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6972 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6973 }
6974 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6975 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6976 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6977 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6978 }
6979 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6980 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6981 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6982 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6983 }
6984 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6985 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6986 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6987 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6988 }
6989 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6990 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6991 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6992 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6993 }
6994 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6995 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6996 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6997 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6998 }
Samuel Ha34229982017-02-17 13:51:11 -08006999 // DevCamDebug metadata translateFromHalMetadata zzHDR
7000 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
7001 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
7002 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
7003 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
7004 }
7005 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
7006 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07007007 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08007008 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
7009 }
7010 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
7011 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
7012 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
7013 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
7014 }
7015 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
7016 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07007017 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08007018 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
7019 }
7020 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
7021 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
7022 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
7023 *DevCamDebug_aec_hdr_sensitivity_ratio;
7024 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
7025 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
7026 }
7027 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
7028 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
7029 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
7030 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
7031 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
7032 }
7033 // DevCamDebug metadata translateFromHalMetadata ADRC
7034 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
7035 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
7036 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
7037 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
7038 &fwk_DevCamDebug_aec_total_drc_gain, 1);
7039 }
7040 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
7041 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
7042 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
7043 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
7044 &fwk_DevCamDebug_aec_color_drc_gain, 1);
7045 }
7046 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
7047 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
7048 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
7049 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
7050 }
7051 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
7052 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
7053 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
7054 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
7055 }
7056 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
7057 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
7058 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
7059 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
7060 }
7061 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
7062 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
7063 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
7064 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
7065 }
Samuel Habdf4fac2017-07-28 17:21:18 -07007066 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
7067 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
7068 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
7069 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
7070 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
7071 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
7072 }
7073 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
7074 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
7075 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
7076 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
7077 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
7078 }
7079 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
7080 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
7081 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
7082 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
7083 &fwk_DevCamDebug_aec_subject_motion, 1);
7084 }
Samuel Ha68ba5172016-12-15 18:41:12 -08007085 // DevCamDebug metadata translateFromHalMetadata AWB
7086 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
7087 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
7088 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
7089 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7090 }
7091 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7092 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7093 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7094 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7095 }
7096 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7097 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7098 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7099 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7100 }
7101 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7102 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7103 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7104 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7105 }
7106 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7107 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7108 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7109 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7110 }
7111 }
7112 // atrace_end(ATRACE_TAG_ALWAYS);
7113
Thierry Strudel3d639192016-09-09 11:52:26 -07007114 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7115 int64_t fwk_frame_number = *frame_number;
7116 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7117 }
7118
7119 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7120 int32_t fps_range[2];
7121 fps_range[0] = (int32_t)float_range->min_fps;
7122 fps_range[1] = (int32_t)float_range->max_fps;
7123 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7124 fps_range, 2);
7125 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7126 fps_range[0], fps_range[1]);
7127 }
7128
7129 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7130 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7131 }
7132
7133 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7134 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7135 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7136 *sceneMode);
7137 if (NAME_NOT_FOUND != val) {
7138 uint8_t fwkSceneMode = (uint8_t)val;
7139 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7140 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7141 fwkSceneMode);
7142 }
7143 }
7144
7145 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7146 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7147 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7148 }
7149
7150 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7151 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7152 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7153 }
7154
7155 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7156 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7157 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7158 }
7159
7160 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7161 CAM_INTF_META_EDGE_MODE, metadata) {
7162 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7163 }
7164
7165 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7166 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7167 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7168 }
7169
7170 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7171 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7172 }
7173
7174 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7175 if (0 <= *flashState) {
7176 uint8_t fwk_flashState = (uint8_t) *flashState;
7177 if (!gCamCapability[mCameraId]->flash_available) {
7178 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7179 }
7180 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7181 }
7182 }
7183
7184 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7185 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7186 if (NAME_NOT_FOUND != val) {
7187 uint8_t fwk_flashMode = (uint8_t)val;
7188 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7189 }
7190 }
7191
7192 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7193 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7194 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7195 }
7196
7197 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7198 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7199 }
7200
7201 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7202 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7203 }
7204
7205 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7206 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7207 }
7208
7209 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7210 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7211 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7212 }
7213
7214 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7215 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7216 LOGD("fwk_videoStab = %d", fwk_videoStab);
7217 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7218 } else {
7219 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7220 // and so hardcoding the Video Stab result to OFF mode.
7221 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7222 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007223 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007224 }
7225
7226 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7227 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7228 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7229 }
7230
7231 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7232 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7233 }
7234
Thierry Strudel3d639192016-09-09 11:52:26 -07007235 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7236 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007237 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007238
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007239 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7240 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007241
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007242 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007243 blackLevelAppliedPattern->cam_black_level[0],
7244 blackLevelAppliedPattern->cam_black_level[1],
7245 blackLevelAppliedPattern->cam_black_level[2],
7246 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007247 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7248 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007249
7250#ifndef USE_HAL_3_3
7251 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307252 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007253 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307254 fwk_blackLevelInd[0] /= 16.0;
7255 fwk_blackLevelInd[1] /= 16.0;
7256 fwk_blackLevelInd[2] /= 16.0;
7257 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007258 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7259 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007260#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007261 }
7262
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007263#ifndef USE_HAL_3_3
7264 // Fixed whitelevel is used by ISP/Sensor
7265 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7266 &gCamCapability[mCameraId]->white_level, 1);
7267#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007268
7269 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7270 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7271 int32_t scalerCropRegion[4];
7272 scalerCropRegion[0] = hScalerCropRegion->left;
7273 scalerCropRegion[1] = hScalerCropRegion->top;
7274 scalerCropRegion[2] = hScalerCropRegion->width;
7275 scalerCropRegion[3] = hScalerCropRegion->height;
7276
7277 // Adjust crop region from sensor output coordinate system to active
7278 // array coordinate system.
7279 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7280 scalerCropRegion[2], scalerCropRegion[3]);
7281
7282 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7283 }
7284
7285 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7286 LOGD("sensorExpTime = %lld", *sensorExpTime);
7287 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7288 }
7289
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007290 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7291 LOGD("expTimeBoost = %f", *expTimeBoost);
7292 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7293 }
7294
Thierry Strudel3d639192016-09-09 11:52:26 -07007295 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7296 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7297 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7298 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7299 }
7300
7301 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7302 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7303 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7304 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7305 sensorRollingShutterSkew, 1);
7306 }
7307
7308 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7309 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7310 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7311
7312 //calculate the noise profile based on sensitivity
7313 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7314 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7315 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7316 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7317 noise_profile[i] = noise_profile_S;
7318 noise_profile[i+1] = noise_profile_O;
7319 }
7320 LOGD("noise model entry (S, O) is (%f, %f)",
7321 noise_profile_S, noise_profile_O);
7322 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7323 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7324 }
7325
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007326#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007327 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007328 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007329 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007330 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007331 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7332 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7333 }
7334 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007335#endif
7336
Thierry Strudel3d639192016-09-09 11:52:26 -07007337 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7338 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7339 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7340 }
7341
7342 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7343 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7344 *faceDetectMode);
7345 if (NAME_NOT_FOUND != val) {
7346 uint8_t fwk_faceDetectMode = (uint8_t)val;
7347 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7348
7349 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7350 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7351 CAM_INTF_META_FACE_DETECTION, metadata) {
7352 uint8_t numFaces = MIN(
7353 faceDetectionInfo->num_faces_detected, MAX_ROI);
7354 int32_t faceIds[MAX_ROI];
7355 uint8_t faceScores[MAX_ROI];
7356 int32_t faceRectangles[MAX_ROI * 4];
7357 int32_t faceLandmarks[MAX_ROI * 6];
7358 size_t j = 0, k = 0;
7359
7360 for (size_t i = 0; i < numFaces; i++) {
7361 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7362 // Adjust crop region from sensor output coordinate system to active
7363 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007364 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007365 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7366 rect.width, rect.height);
7367
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007368 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007369
Jason Lee8ce36fa2017-04-19 19:40:37 -07007370 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7371 "bottom-right (%d, %d)",
7372 faceDetectionInfo->frame_id, i,
7373 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7374 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7375
Thierry Strudel3d639192016-09-09 11:52:26 -07007376 j+= 4;
7377 }
7378 if (numFaces <= 0) {
7379 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7380 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7381 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7382 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7383 }
7384
7385 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7386 numFaces);
7387 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7388 faceRectangles, numFaces * 4U);
7389 if (fwk_faceDetectMode ==
7390 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7391 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7392 CAM_INTF_META_FACE_LANDMARK, metadata) {
7393
7394 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007395 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007396 // Map the co-ordinate sensor output coordinate system to active
7397 // array coordinate system.
7398 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007399 face_landmarks.left_eye_center.x,
7400 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007401 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007402 face_landmarks.right_eye_center.x,
7403 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007404 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007405 face_landmarks.mouth_center.x,
7406 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007407
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007408 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007409
7410 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7411 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7412 faceDetectionInfo->frame_id, i,
7413 faceLandmarks[k + LEFT_EYE_X],
7414 faceLandmarks[k + LEFT_EYE_Y],
7415 faceLandmarks[k + RIGHT_EYE_X],
7416 faceLandmarks[k + RIGHT_EYE_Y],
7417 faceLandmarks[k + MOUTH_X],
7418 faceLandmarks[k + MOUTH_Y]);
7419
Thierry Strudel04e026f2016-10-10 11:27:36 -07007420 k+= TOTAL_LANDMARK_INDICES;
7421 }
7422 } else {
7423 for (size_t i = 0; i < numFaces; i++) {
7424 setInvalidLandmarks(faceLandmarks+k);
7425 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007426 }
7427 }
7428
Jason Lee49619db2017-04-13 12:07:22 -07007429 for (size_t i = 0; i < numFaces; i++) {
7430 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7431
7432 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7433 faceDetectionInfo->frame_id, i, faceIds[i]);
7434 }
7435
Thierry Strudel3d639192016-09-09 11:52:26 -07007436 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7437 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7438 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007439 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007440 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7441 CAM_INTF_META_FACE_BLINK, metadata) {
7442 uint8_t detected[MAX_ROI];
7443 uint8_t degree[MAX_ROI * 2];
7444 for (size_t i = 0; i < numFaces; i++) {
7445 detected[i] = blinks->blink[i].blink_detected;
7446 degree[2 * i] = blinks->blink[i].left_blink;
7447 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007448
Jason Lee49619db2017-04-13 12:07:22 -07007449 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7450 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7451 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7452 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007453 }
7454 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7455 detected, numFaces);
7456 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7457 degree, numFaces * 2);
7458 }
7459 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7460 CAM_INTF_META_FACE_SMILE, metadata) {
7461 uint8_t degree[MAX_ROI];
7462 uint8_t confidence[MAX_ROI];
7463 for (size_t i = 0; i < numFaces; i++) {
7464 degree[i] = smiles->smile[i].smile_degree;
7465 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007466
Jason Lee49619db2017-04-13 12:07:22 -07007467 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7468 "smile_degree=%d, smile_score=%d",
7469 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007470 }
7471 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7472 degree, numFaces);
7473 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7474 confidence, numFaces);
7475 }
7476 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7477 CAM_INTF_META_FACE_GAZE, metadata) {
7478 int8_t angle[MAX_ROI];
7479 int32_t direction[MAX_ROI * 3];
7480 int8_t degree[MAX_ROI * 2];
7481 for (size_t i = 0; i < numFaces; i++) {
7482 angle[i] = gazes->gaze[i].gaze_angle;
7483 direction[3 * i] = gazes->gaze[i].updown_dir;
7484 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7485 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7486 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7487 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007488
7489 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7490 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7491 "left_right_gaze=%d, top_bottom_gaze=%d",
7492 faceDetectionInfo->frame_id, i, angle[i],
7493 direction[3 * i], direction[3 * i + 1],
7494 direction[3 * i + 2],
7495 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007496 }
7497 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7498 (uint8_t *)angle, numFaces);
7499 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7500 direction, numFaces * 3);
7501 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7502 (uint8_t *)degree, numFaces * 2);
7503 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007504 }
7505 }
7506 }
7507 }
7508
7509 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7510 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007511 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007512 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007513 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007514
Shuzhen Wang14415f52016-11-16 18:26:18 -08007515 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7516 histogramBins = *histBins;
7517 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7518 }
7519
7520 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007521 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7522 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007523 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007524
7525 switch (stats_data->type) {
7526 case CAM_HISTOGRAM_TYPE_BAYER:
7527 switch (stats_data->bayer_stats.data_type) {
7528 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007529 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7530 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007531 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007532 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7533 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007534 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007535 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7536 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007537 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007538 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007539 case CAM_STATS_CHANNEL_R:
7540 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007541 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7542 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007543 }
7544 break;
7545 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007546 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007547 break;
7548 }
7549
Shuzhen Wang14415f52016-11-16 18:26:18 -08007550 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007551 }
7552 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007553 }
7554
7555 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7556 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7557 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7558 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7559 }
7560
7561 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7562 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7563 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7564 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7565 }
7566
7567 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7568 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7569 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7570 CAM_MAX_SHADING_MAP_HEIGHT);
7571 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7572 CAM_MAX_SHADING_MAP_WIDTH);
7573 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7574 lensShadingMap->lens_shading, 4U * map_width * map_height);
7575 }
7576
7577 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7578 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7579 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7580 }
7581
7582 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7583 //Populate CAM_INTF_META_TONEMAP_CURVES
7584 /* ch0 = G, ch 1 = B, ch 2 = R*/
7585 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7586 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7587 tonemap->tonemap_points_cnt,
7588 CAM_MAX_TONEMAP_CURVE_SIZE);
7589 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7590 }
7591
7592 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7593 &tonemap->curves[0].tonemap_points[0][0],
7594 tonemap->tonemap_points_cnt * 2);
7595
7596 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7597 &tonemap->curves[1].tonemap_points[0][0],
7598 tonemap->tonemap_points_cnt * 2);
7599
7600 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7601 &tonemap->curves[2].tonemap_points[0][0],
7602 tonemap->tonemap_points_cnt * 2);
7603 }
7604
7605 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7606 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7607 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7608 CC_GAIN_MAX);
7609 }
7610
7611 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7612 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7613 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7614 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7615 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7616 }
7617
7618 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7619 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7620 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7621 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7622 toneCurve->tonemap_points_cnt,
7623 CAM_MAX_TONEMAP_CURVE_SIZE);
7624 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7625 }
7626 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7627 (float*)toneCurve->curve.tonemap_points,
7628 toneCurve->tonemap_points_cnt * 2);
7629 }
7630
7631 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7632 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7633 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7634 predColorCorrectionGains->gains, 4);
7635 }
7636
7637 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7638 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7639 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7640 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7641 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7642 }
7643
7644 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7645 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7646 }
7647
7648 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7649 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7650 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7651 }
7652
7653 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7654 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7655 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7656 }
7657
7658 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7659 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7660 *effectMode);
7661 if (NAME_NOT_FOUND != val) {
7662 uint8_t fwk_effectMode = (uint8_t)val;
7663 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7664 }
7665 }
7666
7667 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7668 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7669 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7670 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7671 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7672 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7673 }
7674 int32_t fwk_testPatternData[4];
7675 fwk_testPatternData[0] = testPatternData->r;
7676 fwk_testPatternData[3] = testPatternData->b;
7677 switch (gCamCapability[mCameraId]->color_arrangement) {
7678 case CAM_FILTER_ARRANGEMENT_RGGB:
7679 case CAM_FILTER_ARRANGEMENT_GRBG:
7680 fwk_testPatternData[1] = testPatternData->gr;
7681 fwk_testPatternData[2] = testPatternData->gb;
7682 break;
7683 case CAM_FILTER_ARRANGEMENT_GBRG:
7684 case CAM_FILTER_ARRANGEMENT_BGGR:
7685 fwk_testPatternData[2] = testPatternData->gr;
7686 fwk_testPatternData[1] = testPatternData->gb;
7687 break;
7688 default:
7689 LOGE("color arrangement %d is not supported",
7690 gCamCapability[mCameraId]->color_arrangement);
7691 break;
7692 }
7693 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7694 }
7695
7696 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7697 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7698 }
7699
7700 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7701 String8 str((const char *)gps_methods);
7702 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7703 }
7704
7705 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7706 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7707 }
7708
7709 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7710 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7711 }
7712
7713 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7714 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7715 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7716 }
7717
7718 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7719 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7720 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7721 }
7722
7723 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7724 int32_t fwk_thumb_size[2];
7725 fwk_thumb_size[0] = thumb_size->width;
7726 fwk_thumb_size[1] = thumb_size->height;
7727 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7728 }
7729
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007730 // Skip reprocess metadata if there is no input stream.
7731 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7732 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7733 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7734 privateData,
7735 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7736 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007737 }
7738
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007739 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007740 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007741 meteringMode, 1);
7742 }
7743
Thierry Strudel54dc9782017-02-15 12:12:10 -08007744 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7745 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7746 LOGD("hdr_scene_data: %d %f\n",
7747 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7748 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7749 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7750 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7751 &isHdr, 1);
7752 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7753 &isHdrConfidence, 1);
7754 }
7755
7756
7757
Thierry Strudel3d639192016-09-09 11:52:26 -07007758 if (metadata->is_tuning_params_valid) {
7759 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7760 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7761 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7762
7763
7764 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7765 sizeof(uint32_t));
7766 data += sizeof(uint32_t);
7767
7768 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7769 sizeof(uint32_t));
7770 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7771 data += sizeof(uint32_t);
7772
7773 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7774 sizeof(uint32_t));
7775 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7776 data += sizeof(uint32_t);
7777
7778 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7779 sizeof(uint32_t));
7780 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7781 data += sizeof(uint32_t);
7782
7783 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7784 sizeof(uint32_t));
7785 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7786 data += sizeof(uint32_t);
7787
7788 metadata->tuning_params.tuning_mod3_data_size = 0;
7789 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7790 sizeof(uint32_t));
7791 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7792 data += sizeof(uint32_t);
7793
7794 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7795 TUNING_SENSOR_DATA_MAX);
7796 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7797 count);
7798 data += count;
7799
7800 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7801 TUNING_VFE_DATA_MAX);
7802 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7803 count);
7804 data += count;
7805
7806 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7807 TUNING_CPP_DATA_MAX);
7808 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7809 count);
7810 data += count;
7811
7812 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7813 TUNING_CAC_DATA_MAX);
7814 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7815 count);
7816 data += count;
7817
7818 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7819 (int32_t *)(void *)tuning_meta_data_blob,
7820 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7821 }
7822
7823 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7824 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7825 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7826 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7827 NEUTRAL_COL_POINTS);
7828 }
7829
7830 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7831 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7832 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7833 }
7834
7835 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7836 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7837 // Adjust crop region from sensor output coordinate system to active
7838 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007839 cam_rect_t hAeRect = hAeRegions->rect;
7840 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7841 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007842
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007843 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007844 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7845 REGIONS_TUPLE_COUNT);
7846 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7847 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007848 hAeRect.left, hAeRect.top, hAeRect.width,
7849 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007850 }
7851
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007852 if (!pendingRequest.focusStateSent) {
7853 if (pendingRequest.focusStateValid) {
7854 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7855 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007856 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007857 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7858 uint8_t fwk_afState = (uint8_t) *afState;
7859 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7860 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7861 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007862 }
7863 }
7864
Thierry Strudel3d639192016-09-09 11:52:26 -07007865 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7866 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7867 }
7868
7869 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7870 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7871 }
7872
7873 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7874 uint8_t fwk_lensState = *lensState;
7875 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7876 }
7877
Thierry Strudel3d639192016-09-09 11:52:26 -07007878 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007879 uint32_t ab_mode = *hal_ab_mode;
7880 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7881 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7882 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7883 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007884 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007885 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007886 if (NAME_NOT_FOUND != val) {
7887 uint8_t fwk_ab_mode = (uint8_t)val;
7888 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7889 }
7890 }
7891
7892 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7893 int val = lookupFwkName(SCENE_MODES_MAP,
7894 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7895 if (NAME_NOT_FOUND != val) {
7896 uint8_t fwkBestshotMode = (uint8_t)val;
7897 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7898 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7899 } else {
7900 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7901 }
7902 }
7903
7904 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7905 uint8_t fwk_mode = (uint8_t) *mode;
7906 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7907 }
7908
7909 /* Constant metadata values to be update*/
7910 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7911 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7912
7913 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7914 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7915
7916 int32_t hotPixelMap[2];
7917 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7918
7919 // CDS
7920 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7921 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7922 }
7923
Thierry Strudel04e026f2016-10-10 11:27:36 -07007924 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7925 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007926 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007927 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7928 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7929 } else {
7930 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7931 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007932
7933 if(fwk_hdr != curr_hdr_state) {
7934 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7935 if(fwk_hdr)
7936 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7937 else
7938 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7939 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007940 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7941 }
7942
Thierry Strudel54dc9782017-02-15 12:12:10 -08007943 //binning correction
7944 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7945 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7946 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7947 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7948 }
7949
Thierry Strudel04e026f2016-10-10 11:27:36 -07007950 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007951 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007952 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7953 int8_t is_ir_on = 0;
7954
7955 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7956 if(is_ir_on != curr_ir_state) {
7957 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7958 if(is_ir_on)
7959 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7960 else
7961 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7962 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007963 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007964 }
7965
Thierry Strudel269c81a2016-10-12 12:13:59 -07007966 // AEC SPEED
7967 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7968 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7969 }
7970
7971 // AWB SPEED
7972 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7973 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7974 }
7975
Thierry Strudel3d639192016-09-09 11:52:26 -07007976 // TNR
7977 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7978 uint8_t tnr_enable = tnr->denoise_enable;
7979 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007980 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7981 int8_t is_tnr_on = 0;
7982
7983 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7984 if(is_tnr_on != curr_tnr_state) {
7985 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7986 if(is_tnr_on)
7987 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7988 else
7989 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7990 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007991
7992 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7993 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7994 }
7995
7996 // Reprocess crop data
7997 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7998 uint8_t cnt = crop_data->num_of_streams;
7999 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
8000 // mm-qcamera-daemon only posts crop_data for streams
8001 // not linked to pproc. So no valid crop metadata is not
8002 // necessarily an error case.
8003 LOGD("No valid crop metadata entries");
8004 } else {
8005 uint32_t reproc_stream_id;
8006 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8007 LOGD("No reprocessible stream found, ignore crop data");
8008 } else {
8009 int rc = NO_ERROR;
8010 Vector<int32_t> roi_map;
8011 int32_t *crop = new int32_t[cnt*4];
8012 if (NULL == crop) {
8013 rc = NO_MEMORY;
8014 }
8015 if (NO_ERROR == rc) {
8016 int32_t streams_found = 0;
8017 for (size_t i = 0; i < cnt; i++) {
8018 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
8019 if (pprocDone) {
8020 // HAL already does internal reprocessing,
8021 // either via reprocessing before JPEG encoding,
8022 // or offline postprocessing for pproc bypass case.
8023 crop[0] = 0;
8024 crop[1] = 0;
8025 crop[2] = mInputStreamInfo.dim.width;
8026 crop[3] = mInputStreamInfo.dim.height;
8027 } else {
8028 crop[0] = crop_data->crop_info[i].crop.left;
8029 crop[1] = crop_data->crop_info[i].crop.top;
8030 crop[2] = crop_data->crop_info[i].crop.width;
8031 crop[3] = crop_data->crop_info[i].crop.height;
8032 }
8033 roi_map.add(crop_data->crop_info[i].roi_map.left);
8034 roi_map.add(crop_data->crop_info[i].roi_map.top);
8035 roi_map.add(crop_data->crop_info[i].roi_map.width);
8036 roi_map.add(crop_data->crop_info[i].roi_map.height);
8037 streams_found++;
8038 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
8039 crop[0], crop[1], crop[2], crop[3]);
8040 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
8041 crop_data->crop_info[i].roi_map.left,
8042 crop_data->crop_info[i].roi_map.top,
8043 crop_data->crop_info[i].roi_map.width,
8044 crop_data->crop_info[i].roi_map.height);
8045 break;
8046
8047 }
8048 }
8049 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
8050 &streams_found, 1);
8051 camMetadata.update(QCAMERA3_CROP_REPROCESS,
8052 crop, (size_t)(streams_found * 4));
8053 if (roi_map.array()) {
8054 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
8055 roi_map.array(), roi_map.size());
8056 }
8057 }
8058 if (crop) {
8059 delete [] crop;
8060 }
8061 }
8062 }
8063 }
8064
8065 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8066 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
8067 // so hardcoding the CAC result to OFF mode.
8068 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8069 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
8070 } else {
8071 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
8072 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8073 *cacMode);
8074 if (NAME_NOT_FOUND != val) {
8075 uint8_t resultCacMode = (uint8_t)val;
8076 // check whether CAC result from CB is equal to Framework set CAC mode
8077 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008078 if (pendingRequest.fwkCacMode != resultCacMode) {
8079 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07008080 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08008081 //Check if CAC is disabled by property
8082 if (m_cacModeDisabled) {
8083 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8084 }
8085
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008086 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07008087 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
8088 } else {
8089 LOGE("Invalid CAC camera parameter: %d", *cacMode);
8090 }
8091 }
8092 }
8093
8094 // Post blob of cam_cds_data through vendor tag.
8095 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8096 uint8_t cnt = cdsInfo->num_of_streams;
8097 cam_cds_data_t cdsDataOverride;
8098 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8099 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8100 cdsDataOverride.num_of_streams = 1;
8101 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8102 uint32_t reproc_stream_id;
8103 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8104 LOGD("No reprocessible stream found, ignore cds data");
8105 } else {
8106 for (size_t i = 0; i < cnt; i++) {
8107 if (cdsInfo->cds_info[i].stream_id ==
8108 reproc_stream_id) {
8109 cdsDataOverride.cds_info[0].cds_enable =
8110 cdsInfo->cds_info[i].cds_enable;
8111 break;
8112 }
8113 }
8114 }
8115 } else {
8116 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8117 }
8118 camMetadata.update(QCAMERA3_CDS_INFO,
8119 (uint8_t *)&cdsDataOverride,
8120 sizeof(cam_cds_data_t));
8121 }
8122
8123 // Ldaf calibration data
8124 if (!mLdafCalibExist) {
8125 IF_META_AVAILABLE(uint32_t, ldafCalib,
8126 CAM_INTF_META_LDAF_EXIF, metadata) {
8127 mLdafCalibExist = true;
8128 mLdafCalib[0] = ldafCalib[0];
8129 mLdafCalib[1] = ldafCalib[1];
8130 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8131 ldafCalib[0], ldafCalib[1]);
8132 }
8133 }
8134
Thierry Strudel54dc9782017-02-15 12:12:10 -08008135 // EXIF debug data through vendor tag
8136 /*
8137 * Mobicat Mask can assume 3 values:
8138 * 1 refers to Mobicat data,
8139 * 2 refers to Stats Debug and Exif Debug Data
8140 * 3 refers to Mobicat and Stats Debug Data
8141 * We want to make sure that we are sending Exif debug data
8142 * only when Mobicat Mask is 2.
8143 */
8144 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8145 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8146 (uint8_t *)(void *)mExifParams.debug_params,
8147 sizeof(mm_jpeg_debug_exif_params_t));
8148 }
8149
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008150 // Reprocess and DDM debug data through vendor tag
8151 cam_reprocess_info_t repro_info;
8152 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008153 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8154 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008155 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008156 }
8157 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8158 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008159 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008160 }
8161 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8162 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008163 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008164 }
8165 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8166 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008167 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008168 }
8169 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8170 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008171 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008172 }
8173 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008174 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008175 }
8176 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8177 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008178 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008179 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008180 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8181 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8182 }
8183 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8184 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8185 }
8186 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8187 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008188
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008189 // INSTANT AEC MODE
8190 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8191 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8192 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8193 }
8194
Shuzhen Wange763e802016-03-31 10:24:29 -07008195 // AF scene change
8196 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8197 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8198 }
8199
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008200 // Enable ZSL
8201 if (enableZsl != nullptr) {
8202 uint8_t value = *enableZsl ?
8203 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8204 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8205 }
8206
Xu Han821ea9c2017-05-23 09:00:40 -07008207 // OIS Data
8208 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8209 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8210 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8211 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8212 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8213 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8214 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8215 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8216 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8217 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8218 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
Xue Tu2c3e9142017-08-18 16:23:52 -07008219 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8220 frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8221 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8222 frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
Xu Han821ea9c2017-05-23 09:00:40 -07008223 }
8224
Thierry Strudel3d639192016-09-09 11:52:26 -07008225 resultMetadata = camMetadata.release();
8226 return resultMetadata;
8227}
8228
8229/*===========================================================================
8230 * FUNCTION : saveExifParams
8231 *
8232 * DESCRIPTION:
8233 *
8234 * PARAMETERS :
8235 * @metadata : metadata information from callback
8236 *
8237 * RETURN : none
8238 *
8239 *==========================================================================*/
8240void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8241{
8242 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8243 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8244 if (mExifParams.debug_params) {
8245 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8246 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8247 }
8248 }
8249 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8250 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8251 if (mExifParams.debug_params) {
8252 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8253 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8254 }
8255 }
8256 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8257 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8258 if (mExifParams.debug_params) {
8259 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8260 mExifParams.debug_params->af_debug_params_valid = TRUE;
8261 }
8262 }
8263 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8264 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8265 if (mExifParams.debug_params) {
8266 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8267 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8268 }
8269 }
8270 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8271 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8272 if (mExifParams.debug_params) {
8273 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8274 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8275 }
8276 }
8277 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8278 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8279 if (mExifParams.debug_params) {
8280 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8281 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8282 }
8283 }
8284 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8285 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8286 if (mExifParams.debug_params) {
8287 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8288 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8289 }
8290 }
8291 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8292 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8293 if (mExifParams.debug_params) {
8294 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8295 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8296 }
8297 }
8298}
8299
8300/*===========================================================================
8301 * FUNCTION : get3AExifParams
8302 *
8303 * DESCRIPTION:
8304 *
8305 * PARAMETERS : none
8306 *
8307 *
8308 * RETURN : mm_jpeg_exif_params_t
8309 *
8310 *==========================================================================*/
8311mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8312{
8313 return mExifParams;
8314}
8315
8316/*===========================================================================
8317 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8318 *
8319 * DESCRIPTION:
8320 *
8321 * PARAMETERS :
8322 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008323 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8324 * urgent metadata in a batch. Always true for
8325 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008326 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008327 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8328 * i.e. even though it doesn't map to a valid partial
8329 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008330 * RETURN : camera_metadata_t*
8331 * metadata in a format specified by fwk
8332 *==========================================================================*/
8333camera_metadata_t*
8334QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008335 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008336 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008337{
8338 CameraMetadata camMetadata;
8339 camera_metadata_t *resultMetadata;
8340
Shuzhen Wang485e2442017-08-02 12:21:08 -07008341 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008342 /* In batch mode, use empty metadata if this is not the last in batch
8343 */
8344 resultMetadata = allocate_camera_metadata(0, 0);
8345 return resultMetadata;
8346 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008347
8348 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8349 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8350 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8351 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8352 }
8353
8354 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8355 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8356 &aecTrigger->trigger, 1);
8357 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8358 &aecTrigger->trigger_id, 1);
8359 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8360 aecTrigger->trigger);
8361 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8362 aecTrigger->trigger_id);
8363 }
8364
8365 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8366 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8367 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8368 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8369 }
8370
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008371 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8372 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8373 if (NAME_NOT_FOUND != val) {
8374 uint8_t fwkAfMode = (uint8_t)val;
8375 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8376 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8377 } else {
8378 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8379 val);
8380 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008381 }
8382
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008383 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8384 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8385 af_trigger->trigger);
8386 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8387 af_trigger->trigger_id);
8388
8389 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8390 mAfTrigger = *af_trigger;
8391 uint32_t fwk_AfState = (uint32_t) *afState;
8392
8393 // If this is the result for a new trigger, check if there is new early
8394 // af state. If there is, use the last af state for all results
8395 // preceding current partial frame number.
8396 for (auto & pendingRequest : mPendingRequestsList) {
8397 if (pendingRequest.frame_number < frame_number) {
8398 pendingRequest.focusStateValid = true;
8399 pendingRequest.focusState = fwk_AfState;
8400 } else if (pendingRequest.frame_number == frame_number) {
8401 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8402 // Check if early AF state for trigger exists. If yes, send AF state as
8403 // partial result for better latency.
8404 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8405 pendingRequest.focusStateSent = true;
8406 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8407 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8408 frame_number, fwkEarlyAfState);
8409 }
8410 }
8411 }
8412 }
8413 }
8414 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8415 &mAfTrigger.trigger, 1);
8416 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8417
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008418 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8419 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008420 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008421 int32_t afRegions[REGIONS_TUPLE_COUNT];
8422 // Adjust crop region from sensor output coordinate system to active
8423 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008424 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8425 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008426
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008427 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008428 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8429 REGIONS_TUPLE_COUNT);
8430 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8431 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008432 hAfRect.left, hAfRect.top, hAfRect.width,
8433 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008434 }
8435
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008436 // AF region confidence
8437 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8438 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8439 }
8440
Thierry Strudel3d639192016-09-09 11:52:26 -07008441 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8442 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8443 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8444 if (NAME_NOT_FOUND != val) {
8445 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8446 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8447 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8448 } else {
8449 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8450 }
8451 }
8452
8453 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8454 uint32_t aeMode = CAM_AE_MODE_MAX;
8455 int32_t flashMode = CAM_FLASH_MODE_MAX;
8456 int32_t redeye = -1;
8457 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8458 aeMode = *pAeMode;
8459 }
8460 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8461 flashMode = *pFlashMode;
8462 }
8463 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8464 redeye = *pRedeye;
8465 }
8466
8467 if (1 == redeye) {
8468 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8469 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8470 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8471 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8472 flashMode);
8473 if (NAME_NOT_FOUND != val) {
8474 fwk_aeMode = (uint8_t)val;
8475 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8476 } else {
8477 LOGE("Unsupported flash mode %d", flashMode);
8478 }
8479 } else if (aeMode == CAM_AE_MODE_ON) {
8480 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8481 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8482 } else if (aeMode == CAM_AE_MODE_OFF) {
8483 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8484 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008485 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8486 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8487 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008488 } else {
8489 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8490 "flashMode:%d, aeMode:%u!!!",
8491 redeye, flashMode, aeMode);
8492 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008493 if (mInstantAEC) {
8494 // Increment frame Idx count untill a bound reached for instant AEC.
8495 mInstantAecFrameIdxCount++;
8496 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8497 CAM_INTF_META_AEC_INFO, metadata) {
8498 LOGH("ae_params->settled = %d",ae_params->settled);
8499 // If AEC settled, or if number of frames reached bound value,
8500 // should reset instant AEC.
8501 if (ae_params->settled ||
8502 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8503 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8504 mInstantAEC = false;
8505 mResetInstantAEC = true;
8506 mInstantAecFrameIdxCount = 0;
8507 }
8508 }
8509 }
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07008510
8511 IF_META_AVAILABLE(int32_t, af_tof_confidence,
8512 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
8513 IF_META_AVAILABLE(int32_t, af_tof_distance,
8514 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
8515 int32_t fwk_af_tof_confidence = *af_tof_confidence;
8516 int32_t fwk_af_tof_distance = *af_tof_distance;
8517 if (fwk_af_tof_confidence == 1) {
8518 mSceneDistance = fwk_af_tof_distance;
8519 } else {
8520 mSceneDistance = -1;
8521 }
8522 LOGD("tof_distance %d, tof_confidence %d, mSceneDistance %d",
8523 fwk_af_tof_distance, fwk_af_tof_confidence, mSceneDistance);
8524 }
8525 }
8526 camMetadata.update(NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE, &mSceneDistance, 1);
8527
Thierry Strudel3d639192016-09-09 11:52:26 -07008528 resultMetadata = camMetadata.release();
8529 return resultMetadata;
8530}
8531
8532/*===========================================================================
8533 * FUNCTION : dumpMetadataToFile
8534 *
8535 * DESCRIPTION: Dumps tuning metadata to file system
8536 *
8537 * PARAMETERS :
8538 * @meta : tuning metadata
8539 * @dumpFrameCount : current dump frame count
8540 * @enabled : Enable mask
8541 *
8542 *==========================================================================*/
8543void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8544 uint32_t &dumpFrameCount,
8545 bool enabled,
8546 const char *type,
8547 uint32_t frameNumber)
8548{
8549 //Some sanity checks
8550 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8551 LOGE("Tuning sensor data size bigger than expected %d: %d",
8552 meta.tuning_sensor_data_size,
8553 TUNING_SENSOR_DATA_MAX);
8554 return;
8555 }
8556
8557 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8558 LOGE("Tuning VFE data size bigger than expected %d: %d",
8559 meta.tuning_vfe_data_size,
8560 TUNING_VFE_DATA_MAX);
8561 return;
8562 }
8563
8564 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8565 LOGE("Tuning CPP data size bigger than expected %d: %d",
8566 meta.tuning_cpp_data_size,
8567 TUNING_CPP_DATA_MAX);
8568 return;
8569 }
8570
8571 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8572 LOGE("Tuning CAC data size bigger than expected %d: %d",
8573 meta.tuning_cac_data_size,
8574 TUNING_CAC_DATA_MAX);
8575 return;
8576 }
8577 //
8578
8579 if(enabled){
8580 char timeBuf[FILENAME_MAX];
8581 char buf[FILENAME_MAX];
8582 memset(buf, 0, sizeof(buf));
8583 memset(timeBuf, 0, sizeof(timeBuf));
8584 time_t current_time;
8585 struct tm * timeinfo;
8586 time (&current_time);
8587 timeinfo = localtime (&current_time);
8588 if (timeinfo != NULL) {
8589 strftime (timeBuf, sizeof(timeBuf),
8590 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8591 }
8592 String8 filePath(timeBuf);
8593 snprintf(buf,
8594 sizeof(buf),
8595 "%dm_%s_%d.bin",
8596 dumpFrameCount,
8597 type,
8598 frameNumber);
8599 filePath.append(buf);
8600 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8601 if (file_fd >= 0) {
8602 ssize_t written_len = 0;
8603 meta.tuning_data_version = TUNING_DATA_VERSION;
8604 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8605 written_len += write(file_fd, data, sizeof(uint32_t));
8606 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8607 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8608 written_len += write(file_fd, data, sizeof(uint32_t));
8609 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8610 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8611 written_len += write(file_fd, data, sizeof(uint32_t));
8612 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8613 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8614 written_len += write(file_fd, data, sizeof(uint32_t));
8615 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8616 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8617 written_len += write(file_fd, data, sizeof(uint32_t));
8618 meta.tuning_mod3_data_size = 0;
8619 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8620 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8621 written_len += write(file_fd, data, sizeof(uint32_t));
8622 size_t total_size = meta.tuning_sensor_data_size;
8623 data = (void *)((uint8_t *)&meta.data);
8624 written_len += write(file_fd, data, total_size);
8625 total_size = meta.tuning_vfe_data_size;
8626 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8627 written_len += write(file_fd, data, total_size);
8628 total_size = meta.tuning_cpp_data_size;
8629 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8630 written_len += write(file_fd, data, total_size);
8631 total_size = meta.tuning_cac_data_size;
8632 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8633 written_len += write(file_fd, data, total_size);
8634 close(file_fd);
8635 }else {
8636 LOGE("fail to open file for metadata dumping");
8637 }
8638 }
8639}
8640
8641/*===========================================================================
8642 * FUNCTION : cleanAndSortStreamInfo
8643 *
8644 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8645 * and sort them such that raw stream is at the end of the list
8646 * This is a workaround for camera daemon constraint.
8647 *
8648 * PARAMETERS : None
8649 *
8650 *==========================================================================*/
8651void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8652{
8653 List<stream_info_t *> newStreamInfo;
8654
8655 /*clean up invalid streams*/
8656 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8657 it != mStreamInfo.end();) {
8658 if(((*it)->status) == INVALID){
8659 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8660 delete channel;
8661 free(*it);
8662 it = mStreamInfo.erase(it);
8663 } else {
8664 it++;
8665 }
8666 }
8667
8668 // Move preview/video/callback/snapshot streams into newList
8669 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8670 it != mStreamInfo.end();) {
8671 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8672 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8673 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8674 newStreamInfo.push_back(*it);
8675 it = mStreamInfo.erase(it);
8676 } else
8677 it++;
8678 }
8679 // Move raw streams into newList
8680 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8681 it != mStreamInfo.end();) {
8682 newStreamInfo.push_back(*it);
8683 it = mStreamInfo.erase(it);
8684 }
8685
8686 mStreamInfo = newStreamInfo;
8687}
8688
8689/*===========================================================================
8690 * FUNCTION : extractJpegMetadata
8691 *
8692 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8693 * JPEG metadata is cached in HAL, and return as part of capture
8694 * result when metadata is returned from camera daemon.
8695 *
8696 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8697 * @request: capture request
8698 *
8699 *==========================================================================*/
8700void QCamera3HardwareInterface::extractJpegMetadata(
8701 CameraMetadata& jpegMetadata,
8702 const camera3_capture_request_t *request)
8703{
8704 CameraMetadata frame_settings;
8705 frame_settings = request->settings;
8706
8707 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8708 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8709 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8710 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8711
8712 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8713 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8714 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8715 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8716
8717 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8718 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8719 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8720 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8721
8722 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8723 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8724 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8725 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8726
8727 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8728 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8729 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8730 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8731
8732 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8733 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8734 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8735 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8736
8737 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8738 int32_t thumbnail_size[2];
8739 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8740 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8741 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8742 int32_t orientation =
8743 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008744 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008745 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8746 int32_t temp;
8747 temp = thumbnail_size[0];
8748 thumbnail_size[0] = thumbnail_size[1];
8749 thumbnail_size[1] = temp;
8750 }
8751 }
8752 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8753 thumbnail_size,
8754 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8755 }
8756
8757}
8758
8759/*===========================================================================
8760 * FUNCTION : convertToRegions
8761 *
8762 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8763 *
8764 * PARAMETERS :
8765 * @rect : cam_rect_t struct to convert
8766 * @region : int32_t destination array
8767 * @weight : if we are converting from cam_area_t, weight is valid
8768 * else weight = -1
8769 *
8770 *==========================================================================*/
8771void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8772 int32_t *region, int weight)
8773{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008774 region[FACE_LEFT] = rect.left;
8775 region[FACE_TOP] = rect.top;
8776 region[FACE_RIGHT] = rect.left + rect.width;
8777 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008778 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008779 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008780 }
8781}
8782
8783/*===========================================================================
8784 * FUNCTION : convertFromRegions
8785 *
8786 * DESCRIPTION: helper method to convert from array to cam_rect_t
8787 *
8788 * PARAMETERS :
8789 * @rect : cam_rect_t struct to convert
8790 * @region : int32_t destination array
8791 * @weight : if we are converting from cam_area_t, weight is valid
8792 * else weight = -1
8793 *
8794 *==========================================================================*/
8795void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008796 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008797{
Thierry Strudel3d639192016-09-09 11:52:26 -07008798 int32_t x_min = frame_settings.find(tag).data.i32[0];
8799 int32_t y_min = frame_settings.find(tag).data.i32[1];
8800 int32_t x_max = frame_settings.find(tag).data.i32[2];
8801 int32_t y_max = frame_settings.find(tag).data.i32[3];
8802 roi.weight = frame_settings.find(tag).data.i32[4];
8803 roi.rect.left = x_min;
8804 roi.rect.top = y_min;
8805 roi.rect.width = x_max - x_min;
8806 roi.rect.height = y_max - y_min;
8807}
8808
8809/*===========================================================================
8810 * FUNCTION : resetIfNeededROI
8811 *
8812 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8813 * crop region
8814 *
8815 * PARAMETERS :
8816 * @roi : cam_area_t struct to resize
8817 * @scalerCropRegion : cam_crop_region_t region to compare against
8818 *
8819 *
8820 *==========================================================================*/
8821bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8822 const cam_crop_region_t* scalerCropRegion)
8823{
8824 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8825 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8826 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8827 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8828
8829 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8830 * without having this check the calculations below to validate if the roi
8831 * is inside scalar crop region will fail resulting in the roi not being
8832 * reset causing algorithm to continue to use stale roi window
8833 */
8834 if (roi->weight == 0) {
8835 return true;
8836 }
8837
8838 if ((roi_x_max < scalerCropRegion->left) ||
8839 // right edge of roi window is left of scalar crop's left edge
8840 (roi_y_max < scalerCropRegion->top) ||
8841 // bottom edge of roi window is above scalar crop's top edge
8842 (roi->rect.left > crop_x_max) ||
8843 // left edge of roi window is beyond(right) of scalar crop's right edge
8844 (roi->rect.top > crop_y_max)){
8845 // top edge of roi windo is above scalar crop's top edge
8846 return false;
8847 }
8848 if (roi->rect.left < scalerCropRegion->left) {
8849 roi->rect.left = scalerCropRegion->left;
8850 }
8851 if (roi->rect.top < scalerCropRegion->top) {
8852 roi->rect.top = scalerCropRegion->top;
8853 }
8854 if (roi_x_max > crop_x_max) {
8855 roi_x_max = crop_x_max;
8856 }
8857 if (roi_y_max > crop_y_max) {
8858 roi_y_max = crop_y_max;
8859 }
8860 roi->rect.width = roi_x_max - roi->rect.left;
8861 roi->rect.height = roi_y_max - roi->rect.top;
8862 return true;
8863}
8864
8865/*===========================================================================
8866 * FUNCTION : convertLandmarks
8867 *
8868 * DESCRIPTION: helper method to extract the landmarks from face detection info
8869 *
8870 * PARAMETERS :
8871 * @landmark_data : input landmark data to be converted
8872 * @landmarks : int32_t destination array
8873 *
8874 *
8875 *==========================================================================*/
8876void QCamera3HardwareInterface::convertLandmarks(
8877 cam_face_landmarks_info_t landmark_data,
8878 int32_t *landmarks)
8879{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008880 if (landmark_data.is_left_eye_valid) {
8881 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8882 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8883 } else {
8884 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8885 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8886 }
8887
8888 if (landmark_data.is_right_eye_valid) {
8889 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8890 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8891 } else {
8892 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8893 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8894 }
8895
8896 if (landmark_data.is_mouth_valid) {
8897 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8898 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8899 } else {
8900 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8901 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8902 }
8903}
8904
8905/*===========================================================================
8906 * FUNCTION : setInvalidLandmarks
8907 *
8908 * DESCRIPTION: helper method to set invalid landmarks
8909 *
8910 * PARAMETERS :
8911 * @landmarks : int32_t destination array
8912 *
8913 *
8914 *==========================================================================*/
8915void QCamera3HardwareInterface::setInvalidLandmarks(
8916 int32_t *landmarks)
8917{
8918 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8919 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8920 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8921 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8922 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8923 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008924}
8925
8926#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008927
8928/*===========================================================================
8929 * FUNCTION : getCapabilities
8930 *
8931 * DESCRIPTION: query camera capability from back-end
8932 *
8933 * PARAMETERS :
8934 * @ops : mm-interface ops structure
8935 * @cam_handle : camera handle for which we need capability
8936 *
8937 * RETURN : ptr type of capability structure
8938 * capability for success
8939 * NULL for failure
8940 *==========================================================================*/
8941cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8942 uint32_t cam_handle)
8943{
8944 int rc = NO_ERROR;
8945 QCamera3HeapMemory *capabilityHeap = NULL;
8946 cam_capability_t *cap_ptr = NULL;
8947
8948 if (ops == NULL) {
8949 LOGE("Invalid arguments");
8950 return NULL;
8951 }
8952
8953 capabilityHeap = new QCamera3HeapMemory(1);
8954 if (capabilityHeap == NULL) {
8955 LOGE("creation of capabilityHeap failed");
8956 return NULL;
8957 }
8958
8959 /* Allocate memory for capability buffer */
8960 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8961 if(rc != OK) {
8962 LOGE("No memory for cappability");
8963 goto allocate_failed;
8964 }
8965
8966 /* Map memory for capability buffer */
8967 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8968
8969 rc = ops->map_buf(cam_handle,
8970 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8971 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8972 if(rc < 0) {
8973 LOGE("failed to map capability buffer");
8974 rc = FAILED_TRANSACTION;
8975 goto map_failed;
8976 }
8977
8978 /* Query Capability */
8979 rc = ops->query_capability(cam_handle);
8980 if(rc < 0) {
8981 LOGE("failed to query capability");
8982 rc = FAILED_TRANSACTION;
8983 goto query_failed;
8984 }
8985
8986 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8987 if (cap_ptr == NULL) {
8988 LOGE("out of memory");
8989 rc = NO_MEMORY;
8990 goto query_failed;
8991 }
8992
8993 memset(cap_ptr, 0, sizeof(cam_capability_t));
8994 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8995
8996 int index;
8997 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8998 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8999 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
9000 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
9001 }
9002
9003query_failed:
9004 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
9005map_failed:
9006 capabilityHeap->deallocate();
9007allocate_failed:
9008 delete capabilityHeap;
9009
9010 if (rc != NO_ERROR) {
9011 return NULL;
9012 } else {
9013 return cap_ptr;
9014 }
9015}
9016
Thierry Strudel3d639192016-09-09 11:52:26 -07009017/*===========================================================================
9018 * FUNCTION : initCapabilities
9019 *
9020 * DESCRIPTION: initialize camera capabilities in static data struct
9021 *
9022 * PARAMETERS :
9023 * @cameraId : camera Id
9024 *
9025 * RETURN : int32_t type of status
9026 * NO_ERROR -- success
9027 * none-zero failure code
9028 *==========================================================================*/
9029int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
9030{
9031 int rc = 0;
9032 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009033 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07009034
9035 rc = camera_open((uint8_t)cameraId, &cameraHandle);
9036 if (rc) {
9037 LOGE("camera_open failed. rc = %d", rc);
9038 goto open_failed;
9039 }
9040 if (!cameraHandle) {
9041 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
9042 goto open_failed;
9043 }
9044
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009045 handle = get_main_camera_handle(cameraHandle->camera_handle);
9046 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
9047 if (gCamCapability[cameraId] == NULL) {
9048 rc = FAILED_TRANSACTION;
9049 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07009050 }
9051
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009052 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009053 if (is_dual_camera_by_idx(cameraId)) {
9054 handle = get_aux_camera_handle(cameraHandle->camera_handle);
9055 gCamCapability[cameraId]->aux_cam_cap =
9056 getCapabilities(cameraHandle->ops, handle);
9057 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
9058 rc = FAILED_TRANSACTION;
9059 free(gCamCapability[cameraId]);
9060 goto failed_op;
9061 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009062
9063 // Copy the main camera capability to main_cam_cap struct
9064 gCamCapability[cameraId]->main_cam_cap =
9065 (cam_capability_t *)malloc(sizeof(cam_capability_t));
9066 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
9067 LOGE("out of memory");
9068 rc = NO_MEMORY;
9069 goto failed_op;
9070 }
9071 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
9072 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07009073 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009074failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07009075 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
9076 cameraHandle = NULL;
9077open_failed:
9078 return rc;
9079}
9080
9081/*==========================================================================
9082 * FUNCTION : get3Aversion
9083 *
9084 * DESCRIPTION: get the Q3A S/W version
9085 *
9086 * PARAMETERS :
9087 * @sw_version: Reference of Q3A structure which will hold version info upon
9088 * return
9089 *
9090 * RETURN : None
9091 *
9092 *==========================================================================*/
9093void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
9094{
9095 if(gCamCapability[mCameraId])
9096 sw_version = gCamCapability[mCameraId]->q3a_version;
9097 else
9098 LOGE("Capability structure NULL!");
9099}
9100
9101
9102/*===========================================================================
9103 * FUNCTION : initParameters
9104 *
9105 * DESCRIPTION: initialize camera parameters
9106 *
9107 * PARAMETERS :
9108 *
9109 * RETURN : int32_t type of status
9110 * NO_ERROR -- success
9111 * none-zero failure code
9112 *==========================================================================*/
9113int QCamera3HardwareInterface::initParameters()
9114{
9115 int rc = 0;
9116
9117 //Allocate Set Param Buffer
9118 mParamHeap = new QCamera3HeapMemory(1);
9119 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9120 if(rc != OK) {
9121 rc = NO_MEMORY;
9122 LOGE("Failed to allocate SETPARM Heap memory");
9123 delete mParamHeap;
9124 mParamHeap = NULL;
9125 return rc;
9126 }
9127
9128 //Map memory for parameters buffer
9129 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9130 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9131 mParamHeap->getFd(0),
9132 sizeof(metadata_buffer_t),
9133 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9134 if(rc < 0) {
9135 LOGE("failed to map SETPARM buffer");
9136 rc = FAILED_TRANSACTION;
9137 mParamHeap->deallocate();
9138 delete mParamHeap;
9139 mParamHeap = NULL;
9140 return rc;
9141 }
9142
9143 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9144
9145 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9146 return rc;
9147}
9148
9149/*===========================================================================
9150 * FUNCTION : deinitParameters
9151 *
9152 * DESCRIPTION: de-initialize camera parameters
9153 *
9154 * PARAMETERS :
9155 *
9156 * RETURN : NONE
9157 *==========================================================================*/
9158void QCamera3HardwareInterface::deinitParameters()
9159{
9160 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9161 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9162
9163 mParamHeap->deallocate();
9164 delete mParamHeap;
9165 mParamHeap = NULL;
9166
9167 mParameters = NULL;
9168
9169 free(mPrevParameters);
9170 mPrevParameters = NULL;
9171}
9172
9173/*===========================================================================
9174 * FUNCTION : calcMaxJpegSize
9175 *
9176 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9177 *
9178 * PARAMETERS :
9179 *
9180 * RETURN : max_jpeg_size
9181 *==========================================================================*/
9182size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9183{
9184 size_t max_jpeg_size = 0;
9185 size_t temp_width, temp_height;
9186 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9187 MAX_SIZES_CNT);
9188 for (size_t i = 0; i < count; i++) {
9189 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9190 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9191 if (temp_width * temp_height > max_jpeg_size ) {
9192 max_jpeg_size = temp_width * temp_height;
9193 }
9194 }
9195 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9196 return max_jpeg_size;
9197}
9198
9199/*===========================================================================
9200 * FUNCTION : getMaxRawSize
9201 *
9202 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9203 *
9204 * PARAMETERS :
9205 *
9206 * RETURN : Largest supported Raw Dimension
9207 *==========================================================================*/
9208cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9209{
9210 int max_width = 0;
9211 cam_dimension_t maxRawSize;
9212
9213 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9214 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9215 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9216 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9217 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9218 }
9219 }
9220 return maxRawSize;
9221}
9222
9223
9224/*===========================================================================
9225 * FUNCTION : calcMaxJpegDim
9226 *
9227 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9228 *
9229 * PARAMETERS :
9230 *
9231 * RETURN : max_jpeg_dim
9232 *==========================================================================*/
9233cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9234{
9235 cam_dimension_t max_jpeg_dim;
9236 cam_dimension_t curr_jpeg_dim;
9237 max_jpeg_dim.width = 0;
9238 max_jpeg_dim.height = 0;
9239 curr_jpeg_dim.width = 0;
9240 curr_jpeg_dim.height = 0;
9241 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9242 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9243 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9244 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9245 max_jpeg_dim.width * max_jpeg_dim.height ) {
9246 max_jpeg_dim.width = curr_jpeg_dim.width;
9247 max_jpeg_dim.height = curr_jpeg_dim.height;
9248 }
9249 }
9250 return max_jpeg_dim;
9251}
9252
9253/*===========================================================================
9254 * FUNCTION : addStreamConfig
9255 *
9256 * DESCRIPTION: adds the stream configuration to the array
9257 *
9258 * PARAMETERS :
9259 * @available_stream_configs : pointer to stream configuration array
9260 * @scalar_format : scalar format
9261 * @dim : configuration dimension
9262 * @config_type : input or output configuration type
9263 *
9264 * RETURN : NONE
9265 *==========================================================================*/
9266void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9267 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9268{
9269 available_stream_configs.add(scalar_format);
9270 available_stream_configs.add(dim.width);
9271 available_stream_configs.add(dim.height);
9272 available_stream_configs.add(config_type);
9273}
9274
9275/*===========================================================================
9276 * FUNCTION : suppportBurstCapture
9277 *
9278 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9279 *
9280 * PARAMETERS :
9281 * @cameraId : camera Id
9282 *
9283 * RETURN : true if camera supports BURST_CAPTURE
9284 * false otherwise
9285 *==========================================================================*/
9286bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9287{
9288 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9289 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9290 const int32_t highResWidth = 3264;
9291 const int32_t highResHeight = 2448;
9292
9293 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9294 // Maximum resolution images cannot be captured at >= 10fps
9295 // -> not supporting BURST_CAPTURE
9296 return false;
9297 }
9298
9299 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9300 // Maximum resolution images can be captured at >= 20fps
9301 // --> supporting BURST_CAPTURE
9302 return true;
9303 }
9304
9305 // Find the smallest highRes resolution, or largest resolution if there is none
9306 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9307 MAX_SIZES_CNT);
9308 size_t highRes = 0;
9309 while ((highRes + 1 < totalCnt) &&
9310 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9311 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9312 highResWidth * highResHeight)) {
9313 highRes++;
9314 }
9315 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9316 return true;
9317 } else {
9318 return false;
9319 }
9320}
9321
9322/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009323 * FUNCTION : getPDStatIndex
9324 *
9325 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9326 *
9327 * PARAMETERS :
9328 * @caps : camera capabilities
9329 *
9330 * RETURN : int32_t type
9331 * non-negative - on success
9332 * -1 - on failure
9333 *==========================================================================*/
9334int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9335 if (nullptr == caps) {
9336 return -1;
9337 }
9338
9339 uint32_t metaRawCount = caps->meta_raw_channel_count;
9340 int32_t ret = -1;
9341 for (size_t i = 0; i < metaRawCount; i++) {
9342 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9343 ret = i;
9344 break;
9345 }
9346 }
9347
9348 return ret;
9349}
9350
9351/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009352 * FUNCTION : initStaticMetadata
9353 *
9354 * DESCRIPTION: initialize the static metadata
9355 *
9356 * PARAMETERS :
9357 * @cameraId : camera Id
9358 *
9359 * RETURN : int32_t type of status
9360 * 0 -- success
9361 * non-zero failure code
9362 *==========================================================================*/
9363int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9364{
9365 int rc = 0;
9366 CameraMetadata staticInfo;
9367 size_t count = 0;
9368 bool limitedDevice = false;
9369 char prop[PROPERTY_VALUE_MAX];
9370 bool supportBurst = false;
9371
9372 supportBurst = supportBurstCapture(cameraId);
9373
9374 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9375 * guaranteed or if min fps of max resolution is less than 20 fps, its
9376 * advertised as limited device*/
9377 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9378 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9379 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9380 !supportBurst;
9381
9382 uint8_t supportedHwLvl = limitedDevice ?
9383 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009384#ifndef USE_HAL_3_3
9385 // LEVEL_3 - This device will support level 3.
9386 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9387#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009388 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009389#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009390
9391 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9392 &supportedHwLvl, 1);
9393
9394 bool facingBack = false;
9395 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9396 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9397 facingBack = true;
9398 }
9399 /*HAL 3 only*/
9400 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9401 &gCamCapability[cameraId]->min_focus_distance, 1);
9402
9403 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9404 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9405
9406 /*should be using focal lengths but sensor doesn't provide that info now*/
9407 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9408 &gCamCapability[cameraId]->focal_length,
9409 1);
9410
9411 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9412 gCamCapability[cameraId]->apertures,
9413 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9414
9415 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9416 gCamCapability[cameraId]->filter_densities,
9417 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9418
9419
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009420 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9421 size_t mode_count =
9422 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9423 for (size_t i = 0; i < mode_count; i++) {
9424 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9425 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009426 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009427 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009428
9429 int32_t lens_shading_map_size[] = {
9430 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9431 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9432 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9433 lens_shading_map_size,
9434 sizeof(lens_shading_map_size)/sizeof(int32_t));
9435
9436 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9437 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9438
9439 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9440 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9441
9442 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9443 &gCamCapability[cameraId]->max_frame_duration, 1);
9444
9445 camera_metadata_rational baseGainFactor = {
9446 gCamCapability[cameraId]->base_gain_factor.numerator,
9447 gCamCapability[cameraId]->base_gain_factor.denominator};
9448 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9449 &baseGainFactor, 1);
9450
9451 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9452 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9453
9454 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9455 gCamCapability[cameraId]->pixel_array_size.height};
9456 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9457 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9458
9459 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9460 gCamCapability[cameraId]->active_array_size.top,
9461 gCamCapability[cameraId]->active_array_size.width,
9462 gCamCapability[cameraId]->active_array_size.height};
9463 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9464 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9465
9466 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9467 &gCamCapability[cameraId]->white_level, 1);
9468
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009469 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9470 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9471 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009472 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009473 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009474
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009475#ifndef USE_HAL_3_3
9476 bool hasBlackRegions = false;
9477 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9478 LOGW("black_region_count: %d is bounded to %d",
9479 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9480 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9481 }
9482 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9483 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9484 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9485 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9486 }
9487 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9488 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9489 hasBlackRegions = true;
9490 }
9491#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009492 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9493 &gCamCapability[cameraId]->flash_charge_duration, 1);
9494
9495 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9496 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9497
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009498 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9499 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9500 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009501 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9502 &timestampSource, 1);
9503
Thierry Strudel54dc9782017-02-15 12:12:10 -08009504 //update histogram vendor data
9505 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009506 &gCamCapability[cameraId]->histogram_size, 1);
9507
Thierry Strudel54dc9782017-02-15 12:12:10 -08009508 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009509 &gCamCapability[cameraId]->max_histogram_count, 1);
9510
Shuzhen Wang14415f52016-11-16 18:26:18 -08009511 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9512 //so that app can request fewer number of bins than the maximum supported.
9513 std::vector<int32_t> histBins;
9514 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9515 histBins.push_back(maxHistBins);
9516 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9517 (maxHistBins & 0x1) == 0) {
9518 histBins.push_back(maxHistBins >> 1);
9519 maxHistBins >>= 1;
9520 }
9521 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9522 histBins.data(), histBins.size());
9523
Thierry Strudel3d639192016-09-09 11:52:26 -07009524 int32_t sharpness_map_size[] = {
9525 gCamCapability[cameraId]->sharpness_map_size.width,
9526 gCamCapability[cameraId]->sharpness_map_size.height};
9527
9528 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9529 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9530
9531 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9532 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9533
Emilian Peev0f3c3162017-03-15 12:57:46 +00009534 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9535 if (0 <= indexPD) {
9536 // Advertise PD stats data as part of the Depth capabilities
9537 int32_t depthWidth =
9538 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9539 int32_t depthHeight =
9540 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009541 int32_t depthStride =
9542 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009543 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9544 assert(0 < depthSamplesCount);
9545 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9546 &depthSamplesCount, 1);
9547
9548 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9549 depthHeight,
9550 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9551 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9552 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9553 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9554 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9555
9556 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9557 depthHeight, 33333333,
9558 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9559 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9560 depthMinDuration,
9561 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9562
9563 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9564 depthHeight, 0,
9565 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9566 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9567 depthStallDuration,
9568 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9569
9570 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9571 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009572
9573 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9574 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9575 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev835938b2017-08-31 16:59:54 +01009576
9577 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS,
9578 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.right_gain_map),
9579 sizeof(gCamCapability[cameraId]->pdaf_cal.right_gain_map));
9580
9581 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS,
9582 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.left_gain_map),
9583 sizeof(gCamCapability[cameraId]->pdaf_cal.left_gain_map));
9584
9585 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF,
9586 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.conversion_coeff),
9587 sizeof(gCamCapability[cameraId]->pdaf_cal.conversion_coeff));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009588 }
9589
Thierry Strudel3d639192016-09-09 11:52:26 -07009590 int32_t scalar_formats[] = {
9591 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9592 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9593 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9594 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9595 HAL_PIXEL_FORMAT_RAW10,
9596 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009597 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9598 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9599 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009600
9601 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9602 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9603 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9604 count, MAX_SIZES_CNT, available_processed_sizes);
9605 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9606 available_processed_sizes, count * 2);
9607
9608 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9609 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9610 makeTable(gCamCapability[cameraId]->raw_dim,
9611 count, MAX_SIZES_CNT, available_raw_sizes);
9612 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9613 available_raw_sizes, count * 2);
9614
9615 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9616 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9617 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9618 count, MAX_SIZES_CNT, available_fps_ranges);
9619 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9620 available_fps_ranges, count * 2);
9621
9622 camera_metadata_rational exposureCompensationStep = {
9623 gCamCapability[cameraId]->exp_compensation_step.numerator,
9624 gCamCapability[cameraId]->exp_compensation_step.denominator};
9625 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9626 &exposureCompensationStep, 1);
9627
9628 Vector<uint8_t> availableVstabModes;
9629 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9630 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009631 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009632 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009633 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009634 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009635 count = IS_TYPE_MAX;
9636 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9637 for (size_t i = 0; i < count; i++) {
9638 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9639 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9640 eisSupported = true;
9641 break;
9642 }
9643 }
9644 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009645 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9646 }
9647 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9648 availableVstabModes.array(), availableVstabModes.size());
9649
9650 /*HAL 1 and HAL 3 common*/
9651 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9652 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9653 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009654 // Cap the max zoom to the max preferred value
9655 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009656 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9657 &maxZoom, 1);
9658
9659 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9660 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9661
9662 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9663 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9664 max3aRegions[2] = 0; /* AF not supported */
9665 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9666 max3aRegions, 3);
9667
9668 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9669 memset(prop, 0, sizeof(prop));
9670 property_get("persist.camera.facedetect", prop, "1");
9671 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9672 LOGD("Support face detection mode: %d",
9673 supportedFaceDetectMode);
9674
9675 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009676 /* support mode should be OFF if max number of face is 0 */
9677 if (maxFaces <= 0) {
9678 supportedFaceDetectMode = 0;
9679 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009680 Vector<uint8_t> availableFaceDetectModes;
9681 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9682 if (supportedFaceDetectMode == 1) {
9683 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9684 } else if (supportedFaceDetectMode == 2) {
9685 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9686 } else if (supportedFaceDetectMode == 3) {
9687 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9688 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9689 } else {
9690 maxFaces = 0;
9691 }
9692 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9693 availableFaceDetectModes.array(),
9694 availableFaceDetectModes.size());
9695 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9696 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009697 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9698 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9699 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009700
9701 int32_t exposureCompensationRange[] = {
9702 gCamCapability[cameraId]->exposure_compensation_min,
9703 gCamCapability[cameraId]->exposure_compensation_max};
9704 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9705 exposureCompensationRange,
9706 sizeof(exposureCompensationRange)/sizeof(int32_t));
9707
9708 uint8_t lensFacing = (facingBack) ?
9709 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9710 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9711
9712 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9713 available_thumbnail_sizes,
9714 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9715
9716 /*all sizes will be clubbed into this tag*/
9717 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9718 /*android.scaler.availableStreamConfigurations*/
9719 Vector<int32_t> available_stream_configs;
9720 cam_dimension_t active_array_dim;
9721 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9722 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009723
9724 /*advertise list of input dimensions supported based on below property.
9725 By default all sizes upto 5MP will be advertised.
9726 Note that the setprop resolution format should be WxH.
9727 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9728 To list all supported sizes, setprop needs to be set with "0x0" */
9729 cam_dimension_t minInputSize = {2592,1944}; //5MP
9730 memset(prop, 0, sizeof(prop));
9731 property_get("persist.camera.input.minsize", prop, "2592x1944");
9732 if (strlen(prop) > 0) {
9733 char *saveptr = NULL;
9734 char *token = strtok_r(prop, "x", &saveptr);
9735 if (token != NULL) {
9736 minInputSize.width = atoi(token);
9737 }
9738 token = strtok_r(NULL, "x", &saveptr);
9739 if (token != NULL) {
9740 minInputSize.height = atoi(token);
9741 }
9742 }
9743
Thierry Strudel3d639192016-09-09 11:52:26 -07009744 /* Add input/output stream configurations for each scalar formats*/
9745 for (size_t j = 0; j < scalar_formats_count; j++) {
9746 switch (scalar_formats[j]) {
9747 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9748 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9749 case HAL_PIXEL_FORMAT_RAW10:
9750 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9751 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9752 addStreamConfig(available_stream_configs, scalar_formats[j],
9753 gCamCapability[cameraId]->raw_dim[i],
9754 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9755 }
9756 break;
9757 case HAL_PIXEL_FORMAT_BLOB:
9758 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9759 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9760 addStreamConfig(available_stream_configs, scalar_formats[j],
9761 gCamCapability[cameraId]->picture_sizes_tbl[i],
9762 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9763 }
9764 break;
9765 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9766 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9767 default:
9768 cam_dimension_t largest_picture_size;
9769 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9770 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9771 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9772 addStreamConfig(available_stream_configs, scalar_formats[j],
9773 gCamCapability[cameraId]->picture_sizes_tbl[i],
9774 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009775 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009776 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9777 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009778 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9779 >= minInputSize.width) || (gCamCapability[cameraId]->
9780 picture_sizes_tbl[i].height >= minInputSize.height)) {
9781 addStreamConfig(available_stream_configs, scalar_formats[j],
9782 gCamCapability[cameraId]->picture_sizes_tbl[i],
9783 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9784 }
9785 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009786 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009787
Thierry Strudel3d639192016-09-09 11:52:26 -07009788 break;
9789 }
9790 }
9791
9792 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9793 available_stream_configs.array(), available_stream_configs.size());
9794 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9795 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9796
9797 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9798 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9799
9800 /* android.scaler.availableMinFrameDurations */
9801 Vector<int64_t> available_min_durations;
9802 for (size_t j = 0; j < scalar_formats_count; j++) {
9803 switch (scalar_formats[j]) {
9804 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9805 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9806 case HAL_PIXEL_FORMAT_RAW10:
9807 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9808 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9809 available_min_durations.add(scalar_formats[j]);
9810 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9811 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9812 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9813 }
9814 break;
9815 default:
9816 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9817 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9818 available_min_durations.add(scalar_formats[j]);
9819 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9820 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9821 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9822 }
9823 break;
9824 }
9825 }
9826 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9827 available_min_durations.array(), available_min_durations.size());
9828
9829 Vector<int32_t> available_hfr_configs;
9830 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9831 int32_t fps = 0;
9832 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9833 case CAM_HFR_MODE_60FPS:
9834 fps = 60;
9835 break;
9836 case CAM_HFR_MODE_90FPS:
9837 fps = 90;
9838 break;
9839 case CAM_HFR_MODE_120FPS:
9840 fps = 120;
9841 break;
9842 case CAM_HFR_MODE_150FPS:
9843 fps = 150;
9844 break;
9845 case CAM_HFR_MODE_180FPS:
9846 fps = 180;
9847 break;
9848 case CAM_HFR_MODE_210FPS:
9849 fps = 210;
9850 break;
9851 case CAM_HFR_MODE_240FPS:
9852 fps = 240;
9853 break;
9854 case CAM_HFR_MODE_480FPS:
9855 fps = 480;
9856 break;
9857 case CAM_HFR_MODE_OFF:
9858 case CAM_HFR_MODE_MAX:
9859 default:
9860 break;
9861 }
9862
9863 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9864 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9865 /* For each HFR frame rate, need to advertise one variable fps range
9866 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9867 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9868 * set by the app. When video recording is started, [120, 120] is
9869 * set. This way sensor configuration does not change when recording
9870 * is started */
9871
9872 /* (width, height, fps_min, fps_max, batch_size_max) */
9873 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9874 j < MAX_SIZES_CNT; j++) {
9875 available_hfr_configs.add(
9876 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9877 available_hfr_configs.add(
9878 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9879 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9880 available_hfr_configs.add(fps);
9881 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9882
9883 /* (width, height, fps_min, fps_max, batch_size_max) */
9884 available_hfr_configs.add(
9885 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9886 available_hfr_configs.add(
9887 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9888 available_hfr_configs.add(fps);
9889 available_hfr_configs.add(fps);
9890 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9891 }
9892 }
9893 }
9894 //Advertise HFR capability only if the property is set
9895 memset(prop, 0, sizeof(prop));
9896 property_get("persist.camera.hal3hfr.enable", prop, "1");
9897 uint8_t hfrEnable = (uint8_t)atoi(prop);
9898
9899 if(hfrEnable && available_hfr_configs.array()) {
9900 staticInfo.update(
9901 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9902 available_hfr_configs.array(), available_hfr_configs.size());
9903 }
9904
9905 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9906 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9907 &max_jpeg_size, 1);
9908
9909 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9910 size_t size = 0;
9911 count = CAM_EFFECT_MODE_MAX;
9912 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9913 for (size_t i = 0; i < count; i++) {
9914 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9915 gCamCapability[cameraId]->supported_effects[i]);
9916 if (NAME_NOT_FOUND != val) {
9917 avail_effects[size] = (uint8_t)val;
9918 size++;
9919 }
9920 }
9921 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9922 avail_effects,
9923 size);
9924
9925 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9926 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9927 size_t supported_scene_modes_cnt = 0;
9928 count = CAM_SCENE_MODE_MAX;
9929 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9930 for (size_t i = 0; i < count; i++) {
9931 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9932 CAM_SCENE_MODE_OFF) {
9933 int val = lookupFwkName(SCENE_MODES_MAP,
9934 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9935 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009936
Thierry Strudel3d639192016-09-09 11:52:26 -07009937 if (NAME_NOT_FOUND != val) {
9938 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9939 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9940 supported_scene_modes_cnt++;
9941 }
9942 }
9943 }
9944 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9945 avail_scene_modes,
9946 supported_scene_modes_cnt);
9947
9948 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9949 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9950 supported_scene_modes_cnt,
9951 CAM_SCENE_MODE_MAX,
9952 scene_mode_overrides,
9953 supported_indexes,
9954 cameraId);
9955
9956 if (supported_scene_modes_cnt == 0) {
9957 supported_scene_modes_cnt = 1;
9958 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9959 }
9960
9961 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9962 scene_mode_overrides, supported_scene_modes_cnt * 3);
9963
9964 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9965 ANDROID_CONTROL_MODE_AUTO,
9966 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9967 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9968 available_control_modes,
9969 3);
9970
9971 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9972 size = 0;
9973 count = CAM_ANTIBANDING_MODE_MAX;
9974 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9975 for (size_t i = 0; i < count; i++) {
9976 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9977 gCamCapability[cameraId]->supported_antibandings[i]);
9978 if (NAME_NOT_FOUND != val) {
9979 avail_antibanding_modes[size] = (uint8_t)val;
9980 size++;
9981 }
9982
9983 }
9984 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9985 avail_antibanding_modes,
9986 size);
9987
9988 uint8_t avail_abberation_modes[] = {
9989 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9990 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9991 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9992 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9993 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9994 if (0 == count) {
9995 // If no aberration correction modes are available for a device, this advertise OFF mode
9996 size = 1;
9997 } else {
9998 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9999 // So, advertize all 3 modes if atleast any one mode is supported as per the
10000 // new M requirement
10001 size = 3;
10002 }
10003 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10004 avail_abberation_modes,
10005 size);
10006
10007 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
10008 size = 0;
10009 count = CAM_FOCUS_MODE_MAX;
10010 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
10011 for (size_t i = 0; i < count; i++) {
10012 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10013 gCamCapability[cameraId]->supported_focus_modes[i]);
10014 if (NAME_NOT_FOUND != val) {
10015 avail_af_modes[size] = (uint8_t)val;
10016 size++;
10017 }
10018 }
10019 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
10020 avail_af_modes,
10021 size);
10022
10023 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
10024 size = 0;
10025 count = CAM_WB_MODE_MAX;
10026 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
10027 for (size_t i = 0; i < count; i++) {
10028 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10029 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10030 gCamCapability[cameraId]->supported_white_balances[i]);
10031 if (NAME_NOT_FOUND != val) {
10032 avail_awb_modes[size] = (uint8_t)val;
10033 size++;
10034 }
10035 }
10036 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
10037 avail_awb_modes,
10038 size);
10039
10040 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
10041 count = CAM_FLASH_FIRING_LEVEL_MAX;
10042 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
10043 count);
10044 for (size_t i = 0; i < count; i++) {
10045 available_flash_levels[i] =
10046 gCamCapability[cameraId]->supported_firing_levels[i];
10047 }
10048 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
10049 available_flash_levels, count);
10050
10051 uint8_t flashAvailable;
10052 if (gCamCapability[cameraId]->flash_available)
10053 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
10054 else
10055 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
10056 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
10057 &flashAvailable, 1);
10058
10059 Vector<uint8_t> avail_ae_modes;
10060 count = CAM_AE_MODE_MAX;
10061 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
10062 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080010063 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
10064 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
10065 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
10066 }
10067 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070010068 }
10069 if (flashAvailable) {
10070 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
10071 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
10072 }
10073 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
10074 avail_ae_modes.array(),
10075 avail_ae_modes.size());
10076
10077 int32_t sensitivity_range[2];
10078 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
10079 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
10080 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
10081 sensitivity_range,
10082 sizeof(sensitivity_range) / sizeof(int32_t));
10083
10084 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10085 &gCamCapability[cameraId]->max_analog_sensitivity,
10086 1);
10087
10088 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
10089 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
10090 &sensor_orientation,
10091 1);
10092
10093 int32_t max_output_streams[] = {
10094 MAX_STALLING_STREAMS,
10095 MAX_PROCESSED_STREAMS,
10096 MAX_RAW_STREAMS};
10097 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
10098 max_output_streams,
10099 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
10100
10101 uint8_t avail_leds = 0;
10102 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
10103 &avail_leds, 0);
10104
10105 uint8_t focus_dist_calibrated;
10106 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
10107 gCamCapability[cameraId]->focus_dist_calibrated);
10108 if (NAME_NOT_FOUND != val) {
10109 focus_dist_calibrated = (uint8_t)val;
10110 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10111 &focus_dist_calibrated, 1);
10112 }
10113
10114 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
10115 size = 0;
10116 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
10117 MAX_TEST_PATTERN_CNT);
10118 for (size_t i = 0; i < count; i++) {
10119 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10120 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10121 if (NAME_NOT_FOUND != testpatternMode) {
10122 avail_testpattern_modes[size] = testpatternMode;
10123 size++;
10124 }
10125 }
10126 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10127 avail_testpattern_modes,
10128 size);
10129
10130 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10131 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10132 &max_pipeline_depth,
10133 1);
10134
10135 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10136 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10137 &partial_result_count,
10138 1);
10139
10140 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10141 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10142
10143 Vector<uint8_t> available_capabilities;
10144 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10145 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10146 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10147 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10148 if (supportBurst) {
10149 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10150 }
10151 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10152 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10153 if (hfrEnable && available_hfr_configs.array()) {
10154 available_capabilities.add(
10155 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10156 }
10157
10158 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10159 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10160 }
10161 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10162 available_capabilities.array(),
10163 available_capabilities.size());
10164
10165 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10166 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10167 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10168 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10169
10170 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10171 &aeLockAvailable, 1);
10172
10173 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10174 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10175 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10176 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10177
10178 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10179 &awbLockAvailable, 1);
10180
10181 int32_t max_input_streams = 1;
10182 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10183 &max_input_streams,
10184 1);
10185
10186 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10187 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10188 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10189 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10190 HAL_PIXEL_FORMAT_YCbCr_420_888};
10191 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10192 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10193
10194 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10195 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10196 &max_latency,
10197 1);
10198
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010199#ifndef USE_HAL_3_3
10200 int32_t isp_sensitivity_range[2];
10201 isp_sensitivity_range[0] =
10202 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10203 isp_sensitivity_range[1] =
10204 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10205 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10206 isp_sensitivity_range,
10207 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10208#endif
10209
Thierry Strudel3d639192016-09-09 11:52:26 -070010210 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10211 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10212 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10213 available_hot_pixel_modes,
10214 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10215
10216 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10217 ANDROID_SHADING_MODE_FAST,
10218 ANDROID_SHADING_MODE_HIGH_QUALITY};
10219 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10220 available_shading_modes,
10221 3);
10222
10223 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10224 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10225 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10226 available_lens_shading_map_modes,
10227 2);
10228
10229 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10230 ANDROID_EDGE_MODE_FAST,
10231 ANDROID_EDGE_MODE_HIGH_QUALITY,
10232 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10233 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10234 available_edge_modes,
10235 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10236
10237 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10238 ANDROID_NOISE_REDUCTION_MODE_FAST,
10239 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10240 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10241 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10242 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10243 available_noise_red_modes,
10244 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10245
10246 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10247 ANDROID_TONEMAP_MODE_FAST,
10248 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10249 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10250 available_tonemap_modes,
10251 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10252
10253 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10254 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10255 available_hot_pixel_map_modes,
10256 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10257
10258 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10259 gCamCapability[cameraId]->reference_illuminant1);
10260 if (NAME_NOT_FOUND != val) {
10261 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10262 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10263 }
10264
10265 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10266 gCamCapability[cameraId]->reference_illuminant2);
10267 if (NAME_NOT_FOUND != val) {
10268 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10269 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10270 }
10271
10272 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10273 (void *)gCamCapability[cameraId]->forward_matrix1,
10274 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10275
10276 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10277 (void *)gCamCapability[cameraId]->forward_matrix2,
10278 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10279
10280 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10281 (void *)gCamCapability[cameraId]->color_transform1,
10282 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10283
10284 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10285 (void *)gCamCapability[cameraId]->color_transform2,
10286 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10287
10288 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10289 (void *)gCamCapability[cameraId]->calibration_transform1,
10290 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10291
10292 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10293 (void *)gCamCapability[cameraId]->calibration_transform2,
10294 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10295
10296 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10297 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10298 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10299 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10300 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10301 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10302 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10303 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10304 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10305 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10306 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10307 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10308 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10309 ANDROID_JPEG_GPS_COORDINATES,
10310 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10311 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10312 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10313 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10314 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10315 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10316 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10317 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10318 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10319 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010320#ifndef USE_HAL_3_3
10321 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10322#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010323 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010324 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010325 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10326 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010327 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010328 /* DevCamDebug metadata request_keys_basic */
10329 DEVCAMDEBUG_META_ENABLE,
10330 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010331 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010332 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010333 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010334 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010335 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010336 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010337
10338 size_t request_keys_cnt =
10339 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10340 Vector<int32_t> available_request_keys;
10341 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10342 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10343 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10344 }
10345
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010346 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010347 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010348 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070010349 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
Chien-Yu Chenec328c82017-08-30 16:41:08 -070010350 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010351 }
10352
Thierry Strudel3d639192016-09-09 11:52:26 -070010353 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10354 available_request_keys.array(), available_request_keys.size());
10355
10356 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10357 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10358 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10359 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10360 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10361 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10362 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10363 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10364 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10365 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10366 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10367 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10368 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10369 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10370 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10371 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10372 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010373 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010374 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10375 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10376 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010377 ANDROID_STATISTICS_FACE_SCORES,
10378#ifndef USE_HAL_3_3
10379 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10380#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010381 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010382 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010383 // DevCamDebug metadata result_keys_basic
10384 DEVCAMDEBUG_META_ENABLE,
10385 // DevCamDebug metadata result_keys AF
10386 DEVCAMDEBUG_AF_LENS_POSITION,
10387 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10388 DEVCAMDEBUG_AF_TOF_DISTANCE,
10389 DEVCAMDEBUG_AF_LUMA,
10390 DEVCAMDEBUG_AF_HAF_STATE,
10391 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10392 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10393 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10394 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10395 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10396 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10397 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10398 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10399 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10400 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10401 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10402 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10403 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10404 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10405 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10406 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10407 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10408 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10409 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10410 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10411 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10412 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10413 // DevCamDebug metadata result_keys AEC
10414 DEVCAMDEBUG_AEC_TARGET_LUMA,
10415 DEVCAMDEBUG_AEC_COMP_LUMA,
10416 DEVCAMDEBUG_AEC_AVG_LUMA,
10417 DEVCAMDEBUG_AEC_CUR_LUMA,
10418 DEVCAMDEBUG_AEC_LINECOUNT,
10419 DEVCAMDEBUG_AEC_REAL_GAIN,
10420 DEVCAMDEBUG_AEC_EXP_INDEX,
10421 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010422 // DevCamDebug metadata result_keys zzHDR
10423 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10424 DEVCAMDEBUG_AEC_L_LINECOUNT,
10425 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10426 DEVCAMDEBUG_AEC_S_LINECOUNT,
10427 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10428 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10429 // DevCamDebug metadata result_keys ADRC
10430 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10431 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10432 DEVCAMDEBUG_AEC_GTM_RATIO,
10433 DEVCAMDEBUG_AEC_LTM_RATIO,
10434 DEVCAMDEBUG_AEC_LA_RATIO,
10435 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010436 // DevCamDebug metadata result_keys AEC MOTION
10437 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10438 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10439 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010440 // DevCamDebug metadata result_keys AWB
10441 DEVCAMDEBUG_AWB_R_GAIN,
10442 DEVCAMDEBUG_AWB_G_GAIN,
10443 DEVCAMDEBUG_AWB_B_GAIN,
10444 DEVCAMDEBUG_AWB_CCT,
10445 DEVCAMDEBUG_AWB_DECISION,
10446 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010447 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10448 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10449 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010450 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010451 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -070010452 NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010453 };
10454
Thierry Strudel3d639192016-09-09 11:52:26 -070010455 size_t result_keys_cnt =
10456 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10457
10458 Vector<int32_t> available_result_keys;
10459 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10460 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10461 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10462 }
10463 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10464 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10465 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10466 }
10467 if (supportedFaceDetectMode == 1) {
10468 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10469 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10470 } else if ((supportedFaceDetectMode == 2) ||
10471 (supportedFaceDetectMode == 3)) {
10472 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10473 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10474 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010475#ifndef USE_HAL_3_3
Shuzhen Wanga1bc9de2017-09-14 16:54:02 -070010476 {
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010477 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10478 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10479 }
10480#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010481
10482 if (gExposeEnableZslKey) {
10483 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chendaf68892017-08-25 12:56:40 -070010484 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010485 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
10486 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010487 }
10488
Thierry Strudel3d639192016-09-09 11:52:26 -070010489 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10490 available_result_keys.array(), available_result_keys.size());
10491
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010492 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010493 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10494 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10495 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10496 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10497 ANDROID_SCALER_CROPPING_TYPE,
10498 ANDROID_SYNC_MAX_LATENCY,
10499 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10500 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10501 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10502 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10503 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10504 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10505 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10506 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10507 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10508 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10509 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10510 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10511 ANDROID_LENS_FACING,
10512 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10513 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10514 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10515 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10516 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10517 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10518 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10519 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10520 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10521 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10522 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10523 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10524 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10525 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10526 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10527 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10528 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10529 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10530 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10531 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010532 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010533 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10534 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10535 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10536 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10537 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10538 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10539 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10540 ANDROID_CONTROL_AVAILABLE_MODES,
10541 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10542 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10543 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10544 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010545 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10546#ifndef USE_HAL_3_3
10547 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10548 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10549#endif
10550 };
10551
10552 Vector<int32_t> available_characteristics_keys;
10553 available_characteristics_keys.appendArray(characteristics_keys_basic,
10554 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10555#ifndef USE_HAL_3_3
10556 if (hasBlackRegions) {
10557 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10558 }
10559#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010560
10561 if (0 <= indexPD) {
10562 int32_t depthKeys[] = {
10563 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10564 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10565 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10566 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10567 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10568 };
10569 available_characteristics_keys.appendArray(depthKeys,
10570 sizeof(depthKeys) / sizeof(depthKeys[0]));
10571 }
10572
Thierry Strudel3d639192016-09-09 11:52:26 -070010573 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010574 available_characteristics_keys.array(),
10575 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010576
10577 /*available stall durations depend on the hw + sw and will be different for different devices */
10578 /*have to add for raw after implementation*/
10579 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10580 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10581
10582 Vector<int64_t> available_stall_durations;
10583 for (uint32_t j = 0; j < stall_formats_count; j++) {
10584 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10585 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10586 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10587 available_stall_durations.add(stall_formats[j]);
10588 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10589 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10590 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10591 }
10592 } else {
10593 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10594 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10595 available_stall_durations.add(stall_formats[j]);
10596 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10597 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10598 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10599 }
10600 }
10601 }
10602 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10603 available_stall_durations.array(),
10604 available_stall_durations.size());
10605
10606 //QCAMERA3_OPAQUE_RAW
10607 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10608 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10609 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10610 case LEGACY_RAW:
10611 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10612 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10613 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10614 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10615 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10616 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10617 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10618 break;
10619 case MIPI_RAW:
10620 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10621 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10622 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10623 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10624 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10625 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10626 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10627 break;
10628 default:
10629 LOGE("unknown opaque_raw_format %d",
10630 gCamCapability[cameraId]->opaque_raw_fmt);
10631 break;
10632 }
10633 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10634
10635 Vector<int32_t> strides;
10636 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10637 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10638 cam_stream_buf_plane_info_t buf_planes;
10639 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10640 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10641 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10642 &gCamCapability[cameraId]->padding_info, &buf_planes);
10643 strides.add(buf_planes.plane_info.mp[0].stride);
10644 }
10645 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10646 strides.size());
10647
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010648 //TBD: remove the following line once backend advertises zzHDR in feature mask
10649 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010650 //Video HDR default
10651 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10652 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010653 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010654 int32_t vhdr_mode[] = {
10655 QCAMERA3_VIDEO_HDR_MODE_OFF,
10656 QCAMERA3_VIDEO_HDR_MODE_ON};
10657
10658 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10659 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10660 vhdr_mode, vhdr_mode_count);
10661 }
10662
Thierry Strudel3d639192016-09-09 11:52:26 -070010663 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10664 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10665 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10666
10667 uint8_t isMonoOnly =
10668 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10669 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10670 &isMonoOnly, 1);
10671
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010672#ifndef USE_HAL_3_3
10673 Vector<int32_t> opaque_size;
10674 for (size_t j = 0; j < scalar_formats_count; j++) {
10675 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10676 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10677 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10678 cam_stream_buf_plane_info_t buf_planes;
10679
10680 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10681 &gCamCapability[cameraId]->padding_info, &buf_planes);
10682
10683 if (rc == 0) {
10684 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10685 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10686 opaque_size.add(buf_planes.plane_info.frame_len);
10687 }else {
10688 LOGE("raw frame calculation failed!");
10689 }
10690 }
10691 }
10692 }
10693
10694 if ((opaque_size.size() > 0) &&
10695 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10696 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10697 else
10698 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10699#endif
10700
Thierry Strudel04e026f2016-10-10 11:27:36 -070010701 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10702 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10703 size = 0;
10704 count = CAM_IR_MODE_MAX;
10705 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10706 for (size_t i = 0; i < count; i++) {
10707 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10708 gCamCapability[cameraId]->supported_ir_modes[i]);
10709 if (NAME_NOT_FOUND != val) {
10710 avail_ir_modes[size] = (int32_t)val;
10711 size++;
10712 }
10713 }
10714 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10715 avail_ir_modes, size);
10716 }
10717
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010718 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10719 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10720 size = 0;
10721 count = CAM_AEC_CONVERGENCE_MAX;
10722 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10723 for (size_t i = 0; i < count; i++) {
10724 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10725 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10726 if (NAME_NOT_FOUND != val) {
10727 available_instant_aec_modes[size] = (int32_t)val;
10728 size++;
10729 }
10730 }
10731 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10732 available_instant_aec_modes, size);
10733 }
10734
Thierry Strudel54dc9782017-02-15 12:12:10 -080010735 int32_t sharpness_range[] = {
10736 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10737 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10738 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10739
10740 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10741 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10742 size = 0;
10743 count = CAM_BINNING_CORRECTION_MODE_MAX;
10744 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10745 for (size_t i = 0; i < count; i++) {
10746 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10747 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10748 gCamCapability[cameraId]->supported_binning_modes[i]);
10749 if (NAME_NOT_FOUND != val) {
10750 avail_binning_modes[size] = (int32_t)val;
10751 size++;
10752 }
10753 }
10754 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10755 avail_binning_modes, size);
10756 }
10757
10758 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10759 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10760 size = 0;
10761 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10762 for (size_t i = 0; i < count; i++) {
10763 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10764 gCamCapability[cameraId]->supported_aec_modes[i]);
10765 if (NAME_NOT_FOUND != val)
10766 available_aec_modes[size++] = val;
10767 }
10768 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10769 available_aec_modes, size);
10770 }
10771
10772 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10773 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10774 size = 0;
10775 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10776 for (size_t i = 0; i < count; i++) {
10777 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10778 gCamCapability[cameraId]->supported_iso_modes[i]);
10779 if (NAME_NOT_FOUND != val)
10780 available_iso_modes[size++] = val;
10781 }
10782 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10783 available_iso_modes, size);
10784 }
10785
10786 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010787 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010788 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10789 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10790 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10791
10792 int32_t available_saturation_range[4];
10793 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10794 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10795 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10796 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10797 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10798 available_saturation_range, 4);
10799
10800 uint8_t is_hdr_values[2];
10801 is_hdr_values[0] = 0;
10802 is_hdr_values[1] = 1;
10803 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10804 is_hdr_values, 2);
10805
10806 float is_hdr_confidence_range[2];
10807 is_hdr_confidence_range[0] = 0.0;
10808 is_hdr_confidence_range[1] = 1.0;
10809 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10810 is_hdr_confidence_range, 2);
10811
Emilian Peev0a972ef2017-03-16 10:25:53 +000010812 size_t eepromLength = strnlen(
10813 reinterpret_cast<const char *>(
10814 gCamCapability[cameraId]->eeprom_version_info),
10815 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10816 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010817 char easelInfo[] = ",E:N";
10818 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10819 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10820 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010821 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
Arnd Geis082a4d72017-08-24 10:33:07 -070010822 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010823 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010824 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010825 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10826 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10827 }
10828
Thierry Strudel3d639192016-09-09 11:52:26 -070010829 gStaticMetadata[cameraId] = staticInfo.release();
10830 return rc;
10831}
10832
10833/*===========================================================================
10834 * FUNCTION : makeTable
10835 *
10836 * DESCRIPTION: make a table of sizes
10837 *
10838 * PARAMETERS :
10839 *
10840 *
10841 *==========================================================================*/
10842void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10843 size_t max_size, int32_t *sizeTable)
10844{
10845 size_t j = 0;
10846 if (size > max_size) {
10847 size = max_size;
10848 }
10849 for (size_t i = 0; i < size; i++) {
10850 sizeTable[j] = dimTable[i].width;
10851 sizeTable[j+1] = dimTable[i].height;
10852 j+=2;
10853 }
10854}
10855
10856/*===========================================================================
10857 * FUNCTION : makeFPSTable
10858 *
10859 * DESCRIPTION: make a table of fps ranges
10860 *
10861 * PARAMETERS :
10862 *
10863 *==========================================================================*/
10864void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10865 size_t max_size, int32_t *fpsRangesTable)
10866{
10867 size_t j = 0;
10868 if (size > max_size) {
10869 size = max_size;
10870 }
10871 for (size_t i = 0; i < size; i++) {
10872 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10873 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10874 j+=2;
10875 }
10876}
10877
10878/*===========================================================================
10879 * FUNCTION : makeOverridesList
10880 *
10881 * DESCRIPTION: make a list of scene mode overrides
10882 *
10883 * PARAMETERS :
10884 *
10885 *
10886 *==========================================================================*/
10887void QCamera3HardwareInterface::makeOverridesList(
10888 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10889 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10890{
10891 /*daemon will give a list of overrides for all scene modes.
10892 However we should send the fwk only the overrides for the scene modes
10893 supported by the framework*/
10894 size_t j = 0;
10895 if (size > max_size) {
10896 size = max_size;
10897 }
10898 size_t focus_count = CAM_FOCUS_MODE_MAX;
10899 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10900 focus_count);
10901 for (size_t i = 0; i < size; i++) {
10902 bool supt = false;
10903 size_t index = supported_indexes[i];
10904 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10905 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10906 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10907 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10908 overridesTable[index].awb_mode);
10909 if (NAME_NOT_FOUND != val) {
10910 overridesList[j+1] = (uint8_t)val;
10911 }
10912 uint8_t focus_override = overridesTable[index].af_mode;
10913 for (size_t k = 0; k < focus_count; k++) {
10914 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10915 supt = true;
10916 break;
10917 }
10918 }
10919 if (supt) {
10920 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10921 focus_override);
10922 if (NAME_NOT_FOUND != val) {
10923 overridesList[j+2] = (uint8_t)val;
10924 }
10925 } else {
10926 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10927 }
10928 j+=3;
10929 }
10930}
10931
10932/*===========================================================================
10933 * FUNCTION : filterJpegSizes
10934 *
10935 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10936 * could be downscaled to
10937 *
10938 * PARAMETERS :
10939 *
10940 * RETURN : length of jpegSizes array
10941 *==========================================================================*/
10942
10943size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10944 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10945 uint8_t downscale_factor)
10946{
10947 if (0 == downscale_factor) {
10948 downscale_factor = 1;
10949 }
10950
10951 int32_t min_width = active_array_size.width / downscale_factor;
10952 int32_t min_height = active_array_size.height / downscale_factor;
10953 size_t jpegSizesCnt = 0;
10954 if (processedSizesCnt > maxCount) {
10955 processedSizesCnt = maxCount;
10956 }
10957 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10958 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10959 jpegSizes[jpegSizesCnt] = processedSizes[i];
10960 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10961 jpegSizesCnt += 2;
10962 }
10963 }
10964 return jpegSizesCnt;
10965}
10966
10967/*===========================================================================
10968 * FUNCTION : computeNoiseModelEntryS
10969 *
10970 * DESCRIPTION: function to map a given sensitivity to the S noise
10971 * model parameters in the DNG noise model.
10972 *
10973 * PARAMETERS : sens : the sensor sensitivity
10974 *
10975 ** RETURN : S (sensor amplification) noise
10976 *
10977 *==========================================================================*/
10978double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10979 double s = gCamCapability[mCameraId]->gradient_S * sens +
10980 gCamCapability[mCameraId]->offset_S;
10981 return ((s < 0.0) ? 0.0 : s);
10982}
10983
10984/*===========================================================================
10985 * FUNCTION : computeNoiseModelEntryO
10986 *
10987 * DESCRIPTION: function to map a given sensitivity to the O noise
10988 * model parameters in the DNG noise model.
10989 *
10990 * PARAMETERS : sens : the sensor sensitivity
10991 *
10992 ** RETURN : O (sensor readout) noise
10993 *
10994 *==========================================================================*/
10995double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10996 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10997 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10998 1.0 : (1.0 * sens / max_analog_sens);
10999 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
11000 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
11001 return ((o < 0.0) ? 0.0 : o);
11002}
11003
11004/*===========================================================================
11005 * FUNCTION : getSensorSensitivity
11006 *
11007 * DESCRIPTION: convert iso_mode to an integer value
11008 *
11009 * PARAMETERS : iso_mode : the iso_mode supported by sensor
11010 *
11011 ** RETURN : sensitivity supported by sensor
11012 *
11013 *==========================================================================*/
11014int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
11015{
11016 int32_t sensitivity;
11017
11018 switch (iso_mode) {
11019 case CAM_ISO_MODE_100:
11020 sensitivity = 100;
11021 break;
11022 case CAM_ISO_MODE_200:
11023 sensitivity = 200;
11024 break;
11025 case CAM_ISO_MODE_400:
11026 sensitivity = 400;
11027 break;
11028 case CAM_ISO_MODE_800:
11029 sensitivity = 800;
11030 break;
11031 case CAM_ISO_MODE_1600:
11032 sensitivity = 1600;
11033 break;
11034 default:
11035 sensitivity = -1;
11036 break;
11037 }
11038 return sensitivity;
11039}
11040
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011041int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011042 if (gEaselManagerClient == nullptr) {
11043 gEaselManagerClient = EaselManagerClient::create();
11044 if (gEaselManagerClient == nullptr) {
11045 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
11046 return -ENODEV;
11047 }
11048 }
11049
11050 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070011051 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
11052 // to connect to Easel.
11053 bool doNotpowerOnEasel =
11054 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
11055
11056 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070011057 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
11058 return OK;
11059 }
11060
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011061 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011062 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011063 if (res != OK) {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070011064 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
11065 res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011066 return res;
11067 }
11068
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011069 EaselManagerClientOpened = true;
11070
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011071 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011072 if (res != OK) {
11073 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
11074 }
11075
Zhijun Hedaacd8a2017-09-14 12:07:42 -070011076 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070011077 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070011078 gEnableMultipleHdrplusOutputs =
11079 property_get_bool("persist.camera.hdrplus.multiple_outputs", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011080
11081 // Expose enableZsl key only when HDR+ mode is enabled.
11082 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011083 }
11084
11085 return OK;
11086}
11087
Thierry Strudel3d639192016-09-09 11:52:26 -070011088/*===========================================================================
11089 * FUNCTION : getCamInfo
11090 *
11091 * DESCRIPTION: query camera capabilities
11092 *
11093 * PARAMETERS :
11094 * @cameraId : camera Id
11095 * @info : camera info struct to be filled in with camera capabilities
11096 *
11097 * RETURN : int type of status
11098 * NO_ERROR -- success
11099 * none-zero failure code
11100 *==========================================================================*/
11101int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
11102 struct camera_info *info)
11103{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011104 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070011105 int rc = 0;
11106
11107 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070011108
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011109 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070011110 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011111 rc = initHdrPlusClientLocked();
11112 if (rc != OK) {
11113 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
11114 pthread_mutex_unlock(&gCamLock);
11115 return rc;
11116 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070011117 }
11118
Thierry Strudel3d639192016-09-09 11:52:26 -070011119 if (NULL == gCamCapability[cameraId]) {
11120 rc = initCapabilities(cameraId);
11121 if (rc < 0) {
11122 pthread_mutex_unlock(&gCamLock);
11123 return rc;
11124 }
11125 }
11126
11127 if (NULL == gStaticMetadata[cameraId]) {
11128 rc = initStaticMetadata(cameraId);
11129 if (rc < 0) {
11130 pthread_mutex_unlock(&gCamLock);
11131 return rc;
11132 }
11133 }
11134
11135 switch(gCamCapability[cameraId]->position) {
11136 case CAM_POSITION_BACK:
11137 case CAM_POSITION_BACK_AUX:
11138 info->facing = CAMERA_FACING_BACK;
11139 break;
11140
11141 case CAM_POSITION_FRONT:
11142 case CAM_POSITION_FRONT_AUX:
11143 info->facing = CAMERA_FACING_FRONT;
11144 break;
11145
11146 default:
11147 LOGE("Unknown position type %d for camera id:%d",
11148 gCamCapability[cameraId]->position, cameraId);
11149 rc = -1;
11150 break;
11151 }
11152
11153
11154 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011155#ifndef USE_HAL_3_3
11156 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11157#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011158 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011159#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011160 info->static_camera_characteristics = gStaticMetadata[cameraId];
11161
11162 //For now assume both cameras can operate independently.
11163 info->conflicting_devices = NULL;
11164 info->conflicting_devices_length = 0;
11165
11166 //resource cost is 100 * MIN(1.0, m/M),
11167 //where m is throughput requirement with maximum stream configuration
11168 //and M is CPP maximum throughput.
11169 float max_fps = 0.0;
11170 for (uint32_t i = 0;
11171 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11172 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11173 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11174 }
11175 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11176 gCamCapability[cameraId]->active_array_size.width *
11177 gCamCapability[cameraId]->active_array_size.height * max_fps /
11178 gCamCapability[cameraId]->max_pixel_bandwidth;
11179 info->resource_cost = 100 * MIN(1.0, ratio);
11180 LOGI("camera %d resource cost is %d", cameraId,
11181 info->resource_cost);
11182
11183 pthread_mutex_unlock(&gCamLock);
11184 return rc;
11185}
11186
11187/*===========================================================================
11188 * FUNCTION : translateCapabilityToMetadata
11189 *
11190 * DESCRIPTION: translate the capability into camera_metadata_t
11191 *
11192 * PARAMETERS : type of the request
11193 *
11194 *
11195 * RETURN : success: camera_metadata_t*
11196 * failure: NULL
11197 *
11198 *==========================================================================*/
11199camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11200{
11201 if (mDefaultMetadata[type] != NULL) {
11202 return mDefaultMetadata[type];
11203 }
11204 //first time we are handling this request
11205 //fill up the metadata structure using the wrapper class
11206 CameraMetadata settings;
11207 //translate from cam_capability_t to camera_metadata_tag_t
11208 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11209 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11210 int32_t defaultRequestID = 0;
11211 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11212
11213 /* OIS disable */
11214 char ois_prop[PROPERTY_VALUE_MAX];
11215 memset(ois_prop, 0, sizeof(ois_prop));
11216 property_get("persist.camera.ois.disable", ois_prop, "0");
11217 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11218
11219 /* Force video to use OIS */
11220 char videoOisProp[PROPERTY_VALUE_MAX];
11221 memset(videoOisProp, 0, sizeof(videoOisProp));
11222 property_get("persist.camera.ois.video", videoOisProp, "1");
11223 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011224
11225 // Hybrid AE enable/disable
11226 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11227 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11228 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011229 uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011230
Thierry Strudel3d639192016-09-09 11:52:26 -070011231 uint8_t controlIntent = 0;
11232 uint8_t focusMode;
11233 uint8_t vsMode;
11234 uint8_t optStabMode;
11235 uint8_t cacMode;
11236 uint8_t edge_mode;
11237 uint8_t noise_red_mode;
11238 uint8_t tonemap_mode;
11239 bool highQualityModeEntryAvailable = FALSE;
11240 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011241 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011242 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11243 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011244 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011245 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011246 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011247
Thierry Strudel3d639192016-09-09 11:52:26 -070011248 switch (type) {
11249 case CAMERA3_TEMPLATE_PREVIEW:
11250 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11251 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11252 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11253 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11254 edge_mode = ANDROID_EDGE_MODE_FAST;
11255 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11256 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11257 break;
11258 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11259 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11260 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11261 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11262 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11263 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11264 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11265 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11266 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11267 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11268 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11269 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11270 highQualityModeEntryAvailable = TRUE;
11271 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11272 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11273 fastModeEntryAvailable = TRUE;
11274 }
11275 }
11276 if (highQualityModeEntryAvailable) {
11277 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11278 } else if (fastModeEntryAvailable) {
11279 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11280 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011281 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11282 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11283 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011284 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011285 break;
11286 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11287 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11288 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11289 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011290 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11291 edge_mode = ANDROID_EDGE_MODE_FAST;
11292 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11293 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11294 if (forceVideoOis)
11295 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11296 break;
11297 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11298 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11299 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11300 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011301 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11302 edge_mode = ANDROID_EDGE_MODE_FAST;
11303 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11304 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11305 if (forceVideoOis)
11306 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11307 break;
11308 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11309 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11310 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11311 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11312 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11313 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11314 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11315 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11316 break;
11317 case CAMERA3_TEMPLATE_MANUAL:
11318 edge_mode = ANDROID_EDGE_MODE_FAST;
11319 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11320 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11321 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11322 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11323 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11324 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11325 break;
11326 default:
11327 edge_mode = ANDROID_EDGE_MODE_FAST;
11328 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11329 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11330 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11331 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11332 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11333 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11334 break;
11335 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011336 // Set CAC to OFF if underlying device doesn't support
11337 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11338 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11339 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011340 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11341 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11342 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11343 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11344 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11345 }
11346 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011347 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011348 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011349
11350 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11351 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11352 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11353 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11354 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11355 || ois_disable)
11356 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11357 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011358 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011359
11360 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11361 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11362
11363 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11364 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11365
11366 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11367 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11368
11369 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11370 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11371
11372 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11373 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11374
11375 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11376 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11377
11378 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11379 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11380
11381 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11382 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11383
11384 /*flash*/
11385 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11386 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11387
11388 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11389 settings.update(ANDROID_FLASH_FIRING_POWER,
11390 &flashFiringLevel, 1);
11391
11392 /* lens */
11393 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11394 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11395
11396 if (gCamCapability[mCameraId]->filter_densities_count) {
11397 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11398 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11399 gCamCapability[mCameraId]->filter_densities_count);
11400 }
11401
11402 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11403 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11404
Thierry Strudel3d639192016-09-09 11:52:26 -070011405 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11406 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11407
11408 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11409 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11410
11411 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11412 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11413
11414 /* face detection (default to OFF) */
11415 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11416 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11417
Thierry Strudel54dc9782017-02-15 12:12:10 -080011418 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11419 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011420
11421 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11422 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11423
11424 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11425 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11426
Thierry Strudel3d639192016-09-09 11:52:26 -070011427
11428 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11429 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11430
11431 /* Exposure time(Update the Min Exposure Time)*/
11432 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11433 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11434
11435 /* frame duration */
11436 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11437 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11438
11439 /* sensitivity */
11440 static const int32_t default_sensitivity = 100;
11441 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011442#ifndef USE_HAL_3_3
11443 static const int32_t default_isp_sensitivity =
11444 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11445 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11446#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011447
11448 /*edge mode*/
11449 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11450
11451 /*noise reduction mode*/
11452 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11453
11454 /*color correction mode*/
11455 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11456 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11457
11458 /*transform matrix mode*/
11459 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11460
11461 int32_t scaler_crop_region[4];
11462 scaler_crop_region[0] = 0;
11463 scaler_crop_region[1] = 0;
11464 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11465 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11466 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11467
11468 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11469 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11470
11471 /*focus distance*/
11472 float focus_distance = 0.0;
11473 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11474
11475 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011476 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011477 float max_range = 0.0;
11478 float max_fixed_fps = 0.0;
11479 int32_t fps_range[2] = {0, 0};
11480 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11481 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011482 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11483 TEMPLATE_MAX_PREVIEW_FPS) {
11484 continue;
11485 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011486 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11487 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11488 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11489 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11490 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11491 if (range > max_range) {
11492 fps_range[0] =
11493 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11494 fps_range[1] =
11495 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11496 max_range = range;
11497 }
11498 } else {
11499 if (range < 0.01 && max_fixed_fps <
11500 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11501 fps_range[0] =
11502 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11503 fps_range[1] =
11504 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11505 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11506 }
11507 }
11508 }
11509 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11510
11511 /*precapture trigger*/
11512 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11513 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11514
11515 /*af trigger*/
11516 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11517 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11518
11519 /* ae & af regions */
11520 int32_t active_region[] = {
11521 gCamCapability[mCameraId]->active_array_size.left,
11522 gCamCapability[mCameraId]->active_array_size.top,
11523 gCamCapability[mCameraId]->active_array_size.left +
11524 gCamCapability[mCameraId]->active_array_size.width,
11525 gCamCapability[mCameraId]->active_array_size.top +
11526 gCamCapability[mCameraId]->active_array_size.height,
11527 0};
11528 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11529 sizeof(active_region) / sizeof(active_region[0]));
11530 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11531 sizeof(active_region) / sizeof(active_region[0]));
11532
11533 /* black level lock */
11534 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11535 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11536
Thierry Strudel3d639192016-09-09 11:52:26 -070011537 //special defaults for manual template
11538 if (type == CAMERA3_TEMPLATE_MANUAL) {
11539 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11540 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11541
11542 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11543 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11544
11545 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11546 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11547
11548 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11549 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11550
11551 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11552 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11553
11554 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11555 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11556 }
11557
11558
11559 /* TNR
11560 * We'll use this location to determine which modes TNR will be set.
11561 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11562 * This is not to be confused with linking on a per stream basis that decision
11563 * is still on per-session basis and will be handled as part of config stream
11564 */
11565 uint8_t tnr_enable = 0;
11566
11567 if (m_bTnrPreview || m_bTnrVideo) {
11568
11569 switch (type) {
11570 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11571 tnr_enable = 1;
11572 break;
11573
11574 default:
11575 tnr_enable = 0;
11576 break;
11577 }
11578
11579 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11580 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11581 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11582
11583 LOGD("TNR:%d with process plate %d for template:%d",
11584 tnr_enable, tnr_process_type, type);
11585 }
11586
11587 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011588 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011589 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11590
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011591 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011592 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11593
Shuzhen Wang920ea402017-05-03 08:49:39 -070011594 uint8_t related_camera_id = mCameraId;
11595 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011596
11597 /* CDS default */
11598 char prop[PROPERTY_VALUE_MAX];
11599 memset(prop, 0, sizeof(prop));
11600 property_get("persist.camera.CDS", prop, "Auto");
11601 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11602 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11603 if (CAM_CDS_MODE_MAX == cds_mode) {
11604 cds_mode = CAM_CDS_MODE_AUTO;
11605 }
11606
11607 /* Disabling CDS in templates which have TNR enabled*/
11608 if (tnr_enable)
11609 cds_mode = CAM_CDS_MODE_OFF;
11610
11611 int32_t mode = cds_mode;
11612 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011613
Thierry Strudel269c81a2016-10-12 12:13:59 -070011614 /* Manual Convergence AEC Speed is disabled by default*/
11615 float default_aec_speed = 0;
11616 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11617
11618 /* Manual Convergence AWB Speed is disabled by default*/
11619 float default_awb_speed = 0;
11620 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11621
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011622 // Set instant AEC to normal convergence by default
11623 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11624 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11625
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011626 if (gExposeEnableZslKey) {
11627 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070011628 int32_t postview = 0;
11629 settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070011630 int32_t continuousZslCapture = 0;
11631 settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -070011632 // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE and
11633 // CAMERA3_TEMPLATE_PREVIEW.
11634 int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11635 type == CAMERA3_TEMPLATE_PREVIEW) ? 0 : 1;
Chien-Yu Chenec328c82017-08-30 16:41:08 -070011636 settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
11637
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011638 // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
11639 // hybrid ae is enabled for 3rd party app HDR+.
11640 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11641 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
11642 hybrid_ae = 1;
11643 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011644 }
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011645 /* hybrid ae */
11646 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011647
Thierry Strudel3d639192016-09-09 11:52:26 -070011648 mDefaultMetadata[type] = settings.release();
11649
11650 return mDefaultMetadata[type];
11651}
11652
11653/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011654 * FUNCTION : getExpectedFrameDuration
11655 *
11656 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11657 * duration
11658 *
11659 * PARAMETERS :
11660 * @request : request settings
11661 * @frameDuration : The maximum frame duration in nanoseconds
11662 *
11663 * RETURN : None
11664 *==========================================================================*/
11665void QCamera3HardwareInterface::getExpectedFrameDuration(
11666 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11667 if (nullptr == frameDuration) {
11668 return;
11669 }
11670
11671 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11672 find_camera_metadata_ro_entry(request,
11673 ANDROID_SENSOR_EXPOSURE_TIME,
11674 &e);
11675 if (e.count > 0) {
11676 *frameDuration = e.data.i64[0];
11677 }
11678 find_camera_metadata_ro_entry(request,
11679 ANDROID_SENSOR_FRAME_DURATION,
11680 &e);
11681 if (e.count > 0) {
11682 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11683 }
11684}
11685
11686/*===========================================================================
11687 * FUNCTION : calculateMaxExpectedDuration
11688 *
11689 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11690 * current camera settings.
11691 *
11692 * PARAMETERS :
11693 * @request : request settings
11694 *
11695 * RETURN : Expected frame duration in nanoseconds.
11696 *==========================================================================*/
11697nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11698 const camera_metadata_t *request) {
11699 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11700 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11701 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11702 if (e.count == 0) {
11703 return maxExpectedDuration;
11704 }
11705
11706 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11707 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11708 }
11709
11710 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11711 return maxExpectedDuration;
11712 }
11713
11714 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11715 if (e.count == 0) {
11716 return maxExpectedDuration;
11717 }
11718
11719 switch (e.data.u8[0]) {
11720 case ANDROID_CONTROL_AE_MODE_OFF:
11721 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11722 break;
11723 default:
11724 find_camera_metadata_ro_entry(request,
11725 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11726 &e);
11727 if (e.count > 1) {
11728 maxExpectedDuration = 1e9 / e.data.u8[0];
11729 }
11730 break;
11731 }
11732
11733 return maxExpectedDuration;
11734}
11735
11736/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011737 * FUNCTION : setFrameParameters
11738 *
11739 * DESCRIPTION: set parameters per frame as requested in the metadata from
11740 * framework
11741 *
11742 * PARAMETERS :
11743 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011744 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011745 * @blob_request: Whether this request is a blob request or not
11746 *
11747 * RETURN : success: NO_ERROR
11748 * failure:
11749 *==========================================================================*/
11750int QCamera3HardwareInterface::setFrameParameters(
11751 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011752 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011753 int blob_request,
11754 uint32_t snapshotStreamId)
11755{
11756 /*translate from camera_metadata_t type to parm_type_t*/
11757 int rc = 0;
11758 int32_t hal_version = CAM_HAL_V3;
11759
11760 clear_metadata_buffer(mParameters);
11761 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11762 LOGE("Failed to set hal version in the parameters");
11763 return BAD_VALUE;
11764 }
11765
11766 /*we need to update the frame number in the parameters*/
11767 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11768 request->frame_number)) {
11769 LOGE("Failed to set the frame number in the parameters");
11770 return BAD_VALUE;
11771 }
11772
11773 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011774 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011775 LOGE("Failed to set stream type mask in the parameters");
11776 return BAD_VALUE;
11777 }
11778
11779 if (mUpdateDebugLevel) {
11780 uint32_t dummyDebugLevel = 0;
11781 /* The value of dummyDebugLevel is irrelavent. On
11782 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11783 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11784 dummyDebugLevel)) {
11785 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11786 return BAD_VALUE;
11787 }
11788 mUpdateDebugLevel = false;
11789 }
11790
11791 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011792 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011793 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11794 if (blob_request)
11795 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11796 }
11797
11798 return rc;
11799}
11800
11801/*===========================================================================
11802 * FUNCTION : setReprocParameters
11803 *
11804 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11805 * return it.
11806 *
11807 * PARAMETERS :
11808 * @request : request that needs to be serviced
11809 *
11810 * RETURN : success: NO_ERROR
11811 * failure:
11812 *==========================================================================*/
11813int32_t QCamera3HardwareInterface::setReprocParameters(
11814 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11815 uint32_t snapshotStreamId)
11816{
11817 /*translate from camera_metadata_t type to parm_type_t*/
11818 int rc = 0;
11819
11820 if (NULL == request->settings){
11821 LOGE("Reprocess settings cannot be NULL");
11822 return BAD_VALUE;
11823 }
11824
11825 if (NULL == reprocParam) {
11826 LOGE("Invalid reprocessing metadata buffer");
11827 return BAD_VALUE;
11828 }
11829 clear_metadata_buffer(reprocParam);
11830
11831 /*we need to update the frame number in the parameters*/
11832 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11833 request->frame_number)) {
11834 LOGE("Failed to set the frame number in the parameters");
11835 return BAD_VALUE;
11836 }
11837
11838 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11839 if (rc < 0) {
11840 LOGE("Failed to translate reproc request");
11841 return rc;
11842 }
11843
11844 CameraMetadata frame_settings;
11845 frame_settings = request->settings;
11846 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11847 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11848 int32_t *crop_count =
11849 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11850 int32_t *crop_data =
11851 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11852 int32_t *roi_map =
11853 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11854 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11855 cam_crop_data_t crop_meta;
11856 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11857 crop_meta.num_of_streams = 1;
11858 crop_meta.crop_info[0].crop.left = crop_data[0];
11859 crop_meta.crop_info[0].crop.top = crop_data[1];
11860 crop_meta.crop_info[0].crop.width = crop_data[2];
11861 crop_meta.crop_info[0].crop.height = crop_data[3];
11862
11863 crop_meta.crop_info[0].roi_map.left =
11864 roi_map[0];
11865 crop_meta.crop_info[0].roi_map.top =
11866 roi_map[1];
11867 crop_meta.crop_info[0].roi_map.width =
11868 roi_map[2];
11869 crop_meta.crop_info[0].roi_map.height =
11870 roi_map[3];
11871
11872 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11873 rc = BAD_VALUE;
11874 }
11875 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11876 request->input_buffer->stream,
11877 crop_meta.crop_info[0].crop.left,
11878 crop_meta.crop_info[0].crop.top,
11879 crop_meta.crop_info[0].crop.width,
11880 crop_meta.crop_info[0].crop.height);
11881 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11882 request->input_buffer->stream,
11883 crop_meta.crop_info[0].roi_map.left,
11884 crop_meta.crop_info[0].roi_map.top,
11885 crop_meta.crop_info[0].roi_map.width,
11886 crop_meta.crop_info[0].roi_map.height);
11887 } else {
11888 LOGE("Invalid reprocess crop count %d!", *crop_count);
11889 }
11890 } else {
11891 LOGE("No crop data from matching output stream");
11892 }
11893
11894 /* These settings are not needed for regular requests so handle them specially for
11895 reprocess requests; information needed for EXIF tags */
11896 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11897 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11898 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11899 if (NAME_NOT_FOUND != val) {
11900 uint32_t flashMode = (uint32_t)val;
11901 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11902 rc = BAD_VALUE;
11903 }
11904 } else {
11905 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11906 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11907 }
11908 } else {
11909 LOGH("No flash mode in reprocess settings");
11910 }
11911
11912 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11913 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11914 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11915 rc = BAD_VALUE;
11916 }
11917 } else {
11918 LOGH("No flash state in reprocess settings");
11919 }
11920
11921 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11922 uint8_t *reprocessFlags =
11923 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11924 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11925 *reprocessFlags)) {
11926 rc = BAD_VALUE;
11927 }
11928 }
11929
Thierry Strudel54dc9782017-02-15 12:12:10 -080011930 // Add exif debug data to internal metadata
11931 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11932 mm_jpeg_debug_exif_params_t *debug_params =
11933 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11934 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11935 // AE
11936 if (debug_params->ae_debug_params_valid == TRUE) {
11937 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11938 debug_params->ae_debug_params);
11939 }
11940 // AWB
11941 if (debug_params->awb_debug_params_valid == TRUE) {
11942 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11943 debug_params->awb_debug_params);
11944 }
11945 // AF
11946 if (debug_params->af_debug_params_valid == TRUE) {
11947 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11948 debug_params->af_debug_params);
11949 }
11950 // ASD
11951 if (debug_params->asd_debug_params_valid == TRUE) {
11952 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11953 debug_params->asd_debug_params);
11954 }
11955 // Stats
11956 if (debug_params->stats_debug_params_valid == TRUE) {
11957 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11958 debug_params->stats_debug_params);
11959 }
11960 // BE Stats
11961 if (debug_params->bestats_debug_params_valid == TRUE) {
11962 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11963 debug_params->bestats_debug_params);
11964 }
11965 // BHIST
11966 if (debug_params->bhist_debug_params_valid == TRUE) {
11967 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11968 debug_params->bhist_debug_params);
11969 }
11970 // 3A Tuning
11971 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11972 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11973 debug_params->q3a_tuning_debug_params);
11974 }
11975 }
11976
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011977 // Add metadata which reprocess needs
11978 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11979 cam_reprocess_info_t *repro_info =
11980 (cam_reprocess_info_t *)frame_settings.find
11981 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011982 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011983 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011984 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011985 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011986 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011987 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011988 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011989 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011990 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011991 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011992 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011993 repro_info->pipeline_flip);
11994 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11995 repro_info->af_roi);
11996 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11997 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011998 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11999 CAM_INTF_PARM_ROTATION metadata then has been added in
12000 translateToHalMetadata. HAL need to keep this new rotation
12001 metadata. Otherwise, the old rotation info saved in the vendor tag
12002 would be used */
12003 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
12004 CAM_INTF_PARM_ROTATION, reprocParam) {
12005 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
12006 } else {
12007 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012008 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012009 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012010 }
12011
12012 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
12013 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
12014 roi.width and roi.height would be the final JPEG size.
12015 For now, HAL only checks this for reprocess request */
12016 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
12017 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
12018 uint8_t *enable =
12019 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
12020 if (*enable == TRUE) {
12021 int32_t *crop_data =
12022 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
12023 cam_stream_crop_info_t crop_meta;
12024 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
12025 crop_meta.stream_id = 0;
12026 crop_meta.crop.left = crop_data[0];
12027 crop_meta.crop.top = crop_data[1];
12028 crop_meta.crop.width = crop_data[2];
12029 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012030 // The JPEG crop roi should match cpp output size
12031 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
12032 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
12033 crop_meta.roi_map.left = 0;
12034 crop_meta.roi_map.top = 0;
12035 crop_meta.roi_map.width = cpp_crop->crop.width;
12036 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070012037 }
12038 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
12039 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012040 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070012041 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012042 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
12043 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070012044 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012045 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
12046
12047 // Add JPEG scale information
12048 cam_dimension_t scale_dim;
12049 memset(&scale_dim, 0, sizeof(cam_dimension_t));
12050 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
12051 int32_t *roi =
12052 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
12053 scale_dim.width = roi[2];
12054 scale_dim.height = roi[3];
12055 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
12056 scale_dim);
12057 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
12058 scale_dim.width, scale_dim.height, mCameraId);
12059 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012060 }
12061 }
12062
12063 return rc;
12064}
12065
12066/*===========================================================================
12067 * FUNCTION : saveRequestSettings
12068 *
12069 * DESCRIPTION: Add any settings that might have changed to the request settings
12070 * and save the settings to be applied on the frame
12071 *
12072 * PARAMETERS :
12073 * @jpegMetadata : the extracted and/or modified jpeg metadata
12074 * @request : request with initial settings
12075 *
12076 * RETURN :
12077 * camera_metadata_t* : pointer to the saved request settings
12078 *==========================================================================*/
12079camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
12080 const CameraMetadata &jpegMetadata,
12081 camera3_capture_request_t *request)
12082{
12083 camera_metadata_t *resultMetadata;
12084 CameraMetadata camMetadata;
12085 camMetadata = request->settings;
12086
12087 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12088 int32_t thumbnail_size[2];
12089 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12090 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12091 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
12092 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
12093 }
12094
12095 if (request->input_buffer != NULL) {
12096 uint8_t reprocessFlags = 1;
12097 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
12098 (uint8_t*)&reprocessFlags,
12099 sizeof(reprocessFlags));
12100 }
12101
12102 resultMetadata = camMetadata.release();
12103 return resultMetadata;
12104}
12105
12106/*===========================================================================
12107 * FUNCTION : setHalFpsRange
12108 *
12109 * DESCRIPTION: set FPS range parameter
12110 *
12111 *
12112 * PARAMETERS :
12113 * @settings : Metadata from framework
12114 * @hal_metadata: Metadata buffer
12115 *
12116 *
12117 * RETURN : success: NO_ERROR
12118 * failure:
12119 *==========================================================================*/
12120int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
12121 metadata_buffer_t *hal_metadata)
12122{
12123 int32_t rc = NO_ERROR;
12124 cam_fps_range_t fps_range;
12125 fps_range.min_fps = (float)
12126 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12127 fps_range.max_fps = (float)
12128 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12129 fps_range.video_min_fps = fps_range.min_fps;
12130 fps_range.video_max_fps = fps_range.max_fps;
12131
12132 LOGD("aeTargetFpsRange fps: [%f %f]",
12133 fps_range.min_fps, fps_range.max_fps);
12134 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12135 * follows:
12136 * ---------------------------------------------------------------|
12137 * Video stream is absent in configure_streams |
12138 * (Camcorder preview before the first video record |
12139 * ---------------------------------------------------------------|
12140 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12141 * | | | vid_min/max_fps|
12142 * ---------------------------------------------------------------|
12143 * NO | [ 30, 240] | 240 | [240, 240] |
12144 * |-------------|-------------|----------------|
12145 * | [240, 240] | 240 | [240, 240] |
12146 * ---------------------------------------------------------------|
12147 * Video stream is present in configure_streams |
12148 * ---------------------------------------------------------------|
12149 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12150 * | | | vid_min/max_fps|
12151 * ---------------------------------------------------------------|
12152 * NO | [ 30, 240] | 240 | [240, 240] |
12153 * (camcorder prev |-------------|-------------|----------------|
12154 * after video rec | [240, 240] | 240 | [240, 240] |
12155 * is stopped) | | | |
12156 * ---------------------------------------------------------------|
12157 * YES | [ 30, 240] | 240 | [240, 240] |
12158 * |-------------|-------------|----------------|
12159 * | [240, 240] | 240 | [240, 240] |
12160 * ---------------------------------------------------------------|
12161 * When Video stream is absent in configure_streams,
12162 * preview fps = sensor_fps / batchsize
12163 * Eg: for 240fps at batchSize 4, preview = 60fps
12164 * for 120fps at batchSize 4, preview = 30fps
12165 *
12166 * When video stream is present in configure_streams, preview fps is as per
12167 * the ratio of preview buffers to video buffers requested in process
12168 * capture request
12169 */
12170 mBatchSize = 0;
12171 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12172 fps_range.min_fps = fps_range.video_max_fps;
12173 fps_range.video_min_fps = fps_range.video_max_fps;
12174 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12175 fps_range.max_fps);
12176 if (NAME_NOT_FOUND != val) {
12177 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12178 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12179 return BAD_VALUE;
12180 }
12181
12182 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12183 /* If batchmode is currently in progress and the fps changes,
12184 * set the flag to restart the sensor */
12185 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12186 (mHFRVideoFps != fps_range.max_fps)) {
12187 mNeedSensorRestart = true;
12188 }
12189 mHFRVideoFps = fps_range.max_fps;
12190 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12191 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12192 mBatchSize = MAX_HFR_BATCH_SIZE;
12193 }
12194 }
12195 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12196
12197 }
12198 } else {
12199 /* HFR mode is session param in backend/ISP. This should be reset when
12200 * in non-HFR mode */
12201 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12202 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12203 return BAD_VALUE;
12204 }
12205 }
12206 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12207 return BAD_VALUE;
12208 }
12209 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12210 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12211 return rc;
12212}
12213
12214/*===========================================================================
12215 * FUNCTION : translateToHalMetadata
12216 *
12217 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12218 *
12219 *
12220 * PARAMETERS :
12221 * @request : request sent from framework
12222 *
12223 *
12224 * RETURN : success: NO_ERROR
12225 * failure:
12226 *==========================================================================*/
12227int QCamera3HardwareInterface::translateToHalMetadata
12228 (const camera3_capture_request_t *request,
12229 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012230 uint32_t snapshotStreamId) {
12231 if (request == nullptr || hal_metadata == nullptr) {
12232 return BAD_VALUE;
12233 }
12234
12235 int64_t minFrameDuration = getMinFrameDuration(request);
12236
12237 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12238 minFrameDuration);
12239}
12240
12241int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12242 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12243 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12244
Thierry Strudel3d639192016-09-09 11:52:26 -070012245 int rc = 0;
12246 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012247 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012248
12249 /* Do not change the order of the following list unless you know what you are
12250 * doing.
12251 * The order is laid out in such a way that parameters in the front of the table
12252 * may be used to override the parameters later in the table. Examples are:
12253 * 1. META_MODE should precede AEC/AWB/AF MODE
12254 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12255 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12256 * 4. Any mode should precede it's corresponding settings
12257 */
12258 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12259 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12260 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12261 rc = BAD_VALUE;
12262 }
12263 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12264 if (rc != NO_ERROR) {
12265 LOGE("extractSceneMode failed");
12266 }
12267 }
12268
12269 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12270 uint8_t fwk_aeMode =
12271 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12272 uint8_t aeMode;
12273 int32_t redeye;
12274
12275 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12276 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012277 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12278 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012279 } else {
12280 aeMode = CAM_AE_MODE_ON;
12281 }
12282 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12283 redeye = 1;
12284 } else {
12285 redeye = 0;
12286 }
12287
12288 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12289 fwk_aeMode);
12290 if (NAME_NOT_FOUND != val) {
12291 int32_t flashMode = (int32_t)val;
12292 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12293 }
12294
12295 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12296 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12297 rc = BAD_VALUE;
12298 }
12299 }
12300
12301 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12302 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12303 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12304 fwk_whiteLevel);
12305 if (NAME_NOT_FOUND != val) {
12306 uint8_t whiteLevel = (uint8_t)val;
12307 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12308 rc = BAD_VALUE;
12309 }
12310 }
12311 }
12312
12313 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12314 uint8_t fwk_cacMode =
12315 frame_settings.find(
12316 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12317 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12318 fwk_cacMode);
12319 if (NAME_NOT_FOUND != val) {
12320 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12321 bool entryAvailable = FALSE;
12322 // Check whether Frameworks set CAC mode is supported in device or not
12323 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12324 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12325 entryAvailable = TRUE;
12326 break;
12327 }
12328 }
12329 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12330 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12331 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12332 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12333 if (entryAvailable == FALSE) {
12334 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12335 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12336 } else {
12337 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12338 // High is not supported and so set the FAST as spec say's underlying
12339 // device implementation can be the same for both modes.
12340 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12341 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12342 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12343 // in order to avoid the fps drop due to high quality
12344 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12345 } else {
12346 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12347 }
12348 }
12349 }
12350 LOGD("Final cacMode is %d", cacMode);
12351 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12352 rc = BAD_VALUE;
12353 }
12354 } else {
12355 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12356 }
12357 }
12358
Jason Lee84ae9972017-02-24 13:24:24 -080012359 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012360 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012361 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012362 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012363 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12364 fwk_focusMode);
12365 if (NAME_NOT_FOUND != val) {
12366 uint8_t focusMode = (uint8_t)val;
12367 LOGD("set focus mode %d", focusMode);
12368 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12369 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12370 rc = BAD_VALUE;
12371 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012372 }
Shuzhen Wanga1d82a92017-09-19 14:39:43 -070012373 } else {
12374 LOGE("Fatal: Missing ANDROID_CONTROL_AF_MODE");
Thierry Strudel3d639192016-09-09 11:52:26 -070012375 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012376 } else {
12377 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12378 LOGE("Focus forced to infinity %d", focusMode);
12379 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12380 rc = BAD_VALUE;
12381 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012382 }
12383
Jason Lee84ae9972017-02-24 13:24:24 -080012384 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12385 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012386 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12387 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12388 focalDistance)) {
12389 rc = BAD_VALUE;
12390 }
12391 }
12392
12393 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12394 uint8_t fwk_antibandingMode =
12395 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12396 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12397 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12398 if (NAME_NOT_FOUND != val) {
12399 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012400 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12401 if (m60HzZone) {
12402 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12403 } else {
12404 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12405 }
12406 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012407 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12408 hal_antibandingMode)) {
12409 rc = BAD_VALUE;
12410 }
12411 }
12412 }
12413
12414 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12415 int32_t expCompensation = frame_settings.find(
12416 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12417 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12418 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12419 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12420 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012421 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012422 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12423 expCompensation)) {
12424 rc = BAD_VALUE;
12425 }
12426 }
12427
12428 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12429 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12430 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12431 rc = BAD_VALUE;
12432 }
12433 }
12434 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12435 rc = setHalFpsRange(frame_settings, hal_metadata);
12436 if (rc != NO_ERROR) {
12437 LOGE("setHalFpsRange failed");
12438 }
12439 }
12440
12441 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12442 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12443 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12444 rc = BAD_VALUE;
12445 }
12446 }
12447
12448 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12449 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12450 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12451 fwk_effectMode);
12452 if (NAME_NOT_FOUND != val) {
12453 uint8_t effectMode = (uint8_t)val;
12454 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12455 rc = BAD_VALUE;
12456 }
12457 }
12458 }
12459
12460 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12461 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12462 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12463 colorCorrectMode)) {
12464 rc = BAD_VALUE;
12465 }
12466 }
12467
12468 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12469 cam_color_correct_gains_t colorCorrectGains;
12470 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12471 colorCorrectGains.gains[i] =
12472 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12473 }
12474 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12475 colorCorrectGains)) {
12476 rc = BAD_VALUE;
12477 }
12478 }
12479
12480 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12481 cam_color_correct_matrix_t colorCorrectTransform;
12482 cam_rational_type_t transform_elem;
12483 size_t num = 0;
12484 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12485 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12486 transform_elem.numerator =
12487 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12488 transform_elem.denominator =
12489 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12490 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12491 num++;
12492 }
12493 }
12494 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12495 colorCorrectTransform)) {
12496 rc = BAD_VALUE;
12497 }
12498 }
12499
12500 cam_trigger_t aecTrigger;
12501 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12502 aecTrigger.trigger_id = -1;
12503 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12504 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12505 aecTrigger.trigger =
12506 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12507 aecTrigger.trigger_id =
12508 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12509 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12510 aecTrigger)) {
12511 rc = BAD_VALUE;
12512 }
12513 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12514 aecTrigger.trigger, aecTrigger.trigger_id);
12515 }
12516
12517 /*af_trigger must come with a trigger id*/
12518 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12519 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12520 cam_trigger_t af_trigger;
12521 af_trigger.trigger =
12522 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12523 af_trigger.trigger_id =
12524 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12525 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12526 rc = BAD_VALUE;
12527 }
12528 LOGD("AfTrigger: %d AfTriggerID: %d",
12529 af_trigger.trigger, af_trigger.trigger_id);
12530 }
12531
12532 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12533 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12534 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12535 rc = BAD_VALUE;
12536 }
12537 }
12538 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12539 cam_edge_application_t edge_application;
12540 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012541
Thierry Strudel3d639192016-09-09 11:52:26 -070012542 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12543 edge_application.sharpness = 0;
12544 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012545 edge_application.sharpness =
12546 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12547 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12548 int32_t sharpness =
12549 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12550 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12551 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12552 LOGD("Setting edge mode sharpness %d", sharpness);
12553 edge_application.sharpness = sharpness;
12554 }
12555 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012556 }
12557 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12558 rc = BAD_VALUE;
12559 }
12560 }
12561
12562 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12563 int32_t respectFlashMode = 1;
12564 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12565 uint8_t fwk_aeMode =
12566 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012567 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12568 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12569 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012570 respectFlashMode = 0;
12571 LOGH("AE Mode controls flash, ignore android.flash.mode");
12572 }
12573 }
12574 if (respectFlashMode) {
12575 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12576 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12577 LOGH("flash mode after mapping %d", val);
12578 // To check: CAM_INTF_META_FLASH_MODE usage
12579 if (NAME_NOT_FOUND != val) {
12580 uint8_t flashMode = (uint8_t)val;
12581 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12582 rc = BAD_VALUE;
12583 }
12584 }
12585 }
12586 }
12587
12588 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12589 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12590 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12591 rc = BAD_VALUE;
12592 }
12593 }
12594
12595 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12596 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12597 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12598 flashFiringTime)) {
12599 rc = BAD_VALUE;
12600 }
12601 }
12602
12603 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12604 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12605 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12606 hotPixelMode)) {
12607 rc = BAD_VALUE;
12608 }
12609 }
12610
12611 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12612 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12613 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12614 lensAperture)) {
12615 rc = BAD_VALUE;
12616 }
12617 }
12618
12619 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12620 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12621 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12622 filterDensity)) {
12623 rc = BAD_VALUE;
12624 }
12625 }
12626
12627 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12628 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12629 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12630 focalLength)) {
12631 rc = BAD_VALUE;
12632 }
12633 }
12634
12635 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12636 uint8_t optStabMode =
12637 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12638 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12639 optStabMode)) {
12640 rc = BAD_VALUE;
12641 }
12642 }
12643
12644 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12645 uint8_t videoStabMode =
12646 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12647 LOGD("videoStabMode from APP = %d", videoStabMode);
12648 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12649 videoStabMode)) {
12650 rc = BAD_VALUE;
12651 }
12652 }
12653
12654
12655 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12656 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12657 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12658 noiseRedMode)) {
12659 rc = BAD_VALUE;
12660 }
12661 }
12662
12663 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12664 float reprocessEffectiveExposureFactor =
12665 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12666 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12667 reprocessEffectiveExposureFactor)) {
12668 rc = BAD_VALUE;
12669 }
12670 }
12671
12672 cam_crop_region_t scalerCropRegion;
12673 bool scalerCropSet = false;
12674 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12675 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12676 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12677 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12678 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12679
12680 // Map coordinate system from active array to sensor output.
12681 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12682 scalerCropRegion.width, scalerCropRegion.height);
12683
12684 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12685 scalerCropRegion)) {
12686 rc = BAD_VALUE;
12687 }
12688 scalerCropSet = true;
12689 }
12690
12691 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12692 int64_t sensorExpTime =
12693 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12694 LOGD("setting sensorExpTime %lld", sensorExpTime);
12695 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12696 sensorExpTime)) {
12697 rc = BAD_VALUE;
12698 }
12699 }
12700
12701 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12702 int64_t sensorFrameDuration =
12703 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012704 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12705 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12706 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12707 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12708 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12709 sensorFrameDuration)) {
12710 rc = BAD_VALUE;
12711 }
12712 }
12713
12714 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12715 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12716 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12717 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12718 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12719 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12720 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12721 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12722 sensorSensitivity)) {
12723 rc = BAD_VALUE;
12724 }
12725 }
12726
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012727#ifndef USE_HAL_3_3
12728 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12729 int32_t ispSensitivity =
12730 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12731 if (ispSensitivity <
12732 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12733 ispSensitivity =
12734 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12735 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12736 }
12737 if (ispSensitivity >
12738 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12739 ispSensitivity =
12740 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12741 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12742 }
12743 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12744 ispSensitivity)) {
12745 rc = BAD_VALUE;
12746 }
12747 }
12748#endif
12749
Thierry Strudel3d639192016-09-09 11:52:26 -070012750 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12751 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12752 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12753 rc = BAD_VALUE;
12754 }
12755 }
12756
12757 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12758 uint8_t fwk_facedetectMode =
12759 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12760
12761 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12762 fwk_facedetectMode);
12763
12764 if (NAME_NOT_FOUND != val) {
12765 uint8_t facedetectMode = (uint8_t)val;
12766 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12767 facedetectMode)) {
12768 rc = BAD_VALUE;
12769 }
12770 }
12771 }
12772
Thierry Strudel54dc9782017-02-15 12:12:10 -080012773 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012774 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012775 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012776 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12777 histogramMode)) {
12778 rc = BAD_VALUE;
12779 }
12780 }
12781
12782 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12783 uint8_t sharpnessMapMode =
12784 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12785 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12786 sharpnessMapMode)) {
12787 rc = BAD_VALUE;
12788 }
12789 }
12790
12791 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12792 uint8_t tonemapMode =
12793 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12794 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12795 rc = BAD_VALUE;
12796 }
12797 }
12798 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12799 /*All tonemap channels will have the same number of points*/
12800 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12801 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12802 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12803 cam_rgb_tonemap_curves tonemapCurves;
12804 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12805 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12806 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12807 tonemapCurves.tonemap_points_cnt,
12808 CAM_MAX_TONEMAP_CURVE_SIZE);
12809 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12810 }
12811
12812 /* ch0 = G*/
12813 size_t point = 0;
12814 cam_tonemap_curve_t tonemapCurveGreen;
12815 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12816 for (size_t j = 0; j < 2; j++) {
12817 tonemapCurveGreen.tonemap_points[i][j] =
12818 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12819 point++;
12820 }
12821 }
12822 tonemapCurves.curves[0] = tonemapCurveGreen;
12823
12824 /* ch 1 = B */
12825 point = 0;
12826 cam_tonemap_curve_t tonemapCurveBlue;
12827 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12828 for (size_t j = 0; j < 2; j++) {
12829 tonemapCurveBlue.tonemap_points[i][j] =
12830 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12831 point++;
12832 }
12833 }
12834 tonemapCurves.curves[1] = tonemapCurveBlue;
12835
12836 /* ch 2 = R */
12837 point = 0;
12838 cam_tonemap_curve_t tonemapCurveRed;
12839 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12840 for (size_t j = 0; j < 2; j++) {
12841 tonemapCurveRed.tonemap_points[i][j] =
12842 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12843 point++;
12844 }
12845 }
12846 tonemapCurves.curves[2] = tonemapCurveRed;
12847
12848 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12849 tonemapCurves)) {
12850 rc = BAD_VALUE;
12851 }
12852 }
12853
12854 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12855 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12856 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12857 captureIntent)) {
12858 rc = BAD_VALUE;
12859 }
12860 }
12861
12862 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12863 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12864 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12865 blackLevelLock)) {
12866 rc = BAD_VALUE;
12867 }
12868 }
12869
12870 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12871 uint8_t lensShadingMapMode =
12872 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12873 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12874 lensShadingMapMode)) {
12875 rc = BAD_VALUE;
12876 }
12877 }
12878
12879 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12880 cam_area_t roi;
12881 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012882 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012883
12884 // Map coordinate system from active array to sensor output.
12885 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12886 roi.rect.height);
12887
12888 if (scalerCropSet) {
12889 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12890 }
12891 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12892 rc = BAD_VALUE;
12893 }
12894 }
12895
12896 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12897 cam_area_t roi;
12898 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012899 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012900
12901 // Map coordinate system from active array to sensor output.
12902 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12903 roi.rect.height);
12904
12905 if (scalerCropSet) {
12906 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12907 }
12908 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12909 rc = BAD_VALUE;
12910 }
12911 }
12912
12913 // CDS for non-HFR non-video mode
12914 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12915 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12916 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12917 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12918 LOGE("Invalid CDS mode %d!", *fwk_cds);
12919 } else {
12920 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12921 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12922 rc = BAD_VALUE;
12923 }
12924 }
12925 }
12926
Thierry Strudel04e026f2016-10-10 11:27:36 -070012927 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012928 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012929 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012930 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12931 }
12932 if (m_bVideoHdrEnabled)
12933 vhdr = CAM_VIDEO_HDR_MODE_ON;
12934
Thierry Strudel54dc9782017-02-15 12:12:10 -080012935 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12936
12937 if(vhdr != curr_hdr_state)
12938 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12939
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012940 rc = setVideoHdrMode(mParameters, vhdr);
12941 if (rc != NO_ERROR) {
12942 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012943 }
12944
12945 //IR
12946 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12947 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12948 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012949 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12950 uint8_t isIRon = 0;
12951
12952 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012953 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12954 LOGE("Invalid IR mode %d!", fwk_ir);
12955 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012956 if(isIRon != curr_ir_state )
12957 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12958
Thierry Strudel04e026f2016-10-10 11:27:36 -070012959 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12960 CAM_INTF_META_IR_MODE, fwk_ir)) {
12961 rc = BAD_VALUE;
12962 }
12963 }
12964 }
12965
Thierry Strudel54dc9782017-02-15 12:12:10 -080012966 //Binning Correction Mode
12967 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12968 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12969 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12970 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12971 || (0 > fwk_binning_correction)) {
12972 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12973 } else {
12974 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12975 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12976 rc = BAD_VALUE;
12977 }
12978 }
12979 }
12980
Thierry Strudel269c81a2016-10-12 12:13:59 -070012981 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12982 float aec_speed;
12983 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12984 LOGD("AEC Speed :%f", aec_speed);
12985 if ( aec_speed < 0 ) {
12986 LOGE("Invalid AEC mode %f!", aec_speed);
12987 } else {
12988 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12989 aec_speed)) {
12990 rc = BAD_VALUE;
12991 }
12992 }
12993 }
12994
12995 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12996 float awb_speed;
12997 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12998 LOGD("AWB Speed :%f", awb_speed);
12999 if ( awb_speed < 0 ) {
13000 LOGE("Invalid AWB mode %f!", awb_speed);
13001 } else {
13002 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
13003 awb_speed)) {
13004 rc = BAD_VALUE;
13005 }
13006 }
13007 }
13008
Thierry Strudel3d639192016-09-09 11:52:26 -070013009 // TNR
13010 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
13011 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
13012 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080013013 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070013014 cam_denoise_param_t tnr;
13015 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
13016 tnr.process_plates =
13017 (cam_denoise_process_type_t)frame_settings.find(
13018 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
13019 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080013020
13021 if(b_TnrRequested != curr_tnr_state)
13022 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
13023
Thierry Strudel3d639192016-09-09 11:52:26 -070013024 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
13025 rc = BAD_VALUE;
13026 }
13027 }
13028
Thierry Strudel54dc9782017-02-15 12:12:10 -080013029 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013030 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080013031 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013032 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
13033 *exposure_metering_mode)) {
13034 rc = BAD_VALUE;
13035 }
13036 }
13037
Thierry Strudel3d639192016-09-09 11:52:26 -070013038 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
13039 int32_t fwk_testPatternMode =
13040 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
13041 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
13042 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
13043
13044 if (NAME_NOT_FOUND != testPatternMode) {
13045 cam_test_pattern_data_t testPatternData;
13046 memset(&testPatternData, 0, sizeof(testPatternData));
13047 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
13048 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
13049 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
13050 int32_t *fwk_testPatternData =
13051 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
13052 testPatternData.r = fwk_testPatternData[0];
13053 testPatternData.b = fwk_testPatternData[3];
13054 switch (gCamCapability[mCameraId]->color_arrangement) {
13055 case CAM_FILTER_ARRANGEMENT_RGGB:
13056 case CAM_FILTER_ARRANGEMENT_GRBG:
13057 testPatternData.gr = fwk_testPatternData[1];
13058 testPatternData.gb = fwk_testPatternData[2];
13059 break;
13060 case CAM_FILTER_ARRANGEMENT_GBRG:
13061 case CAM_FILTER_ARRANGEMENT_BGGR:
13062 testPatternData.gr = fwk_testPatternData[2];
13063 testPatternData.gb = fwk_testPatternData[1];
13064 break;
13065 default:
13066 LOGE("color arrangement %d is not supported",
13067 gCamCapability[mCameraId]->color_arrangement);
13068 break;
13069 }
13070 }
13071 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
13072 testPatternData)) {
13073 rc = BAD_VALUE;
13074 }
13075 } else {
13076 LOGE("Invalid framework sensor test pattern mode %d",
13077 fwk_testPatternMode);
13078 }
13079 }
13080
13081 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
13082 size_t count = 0;
13083 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
13084 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
13085 gps_coords.data.d, gps_coords.count, count);
13086 if (gps_coords.count != count) {
13087 rc = BAD_VALUE;
13088 }
13089 }
13090
13091 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
13092 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
13093 size_t count = 0;
13094 const char *gps_methods_src = (const char *)
13095 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
13096 memset(gps_methods, '\0', sizeof(gps_methods));
13097 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
13098 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
13099 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
13100 if (GPS_PROCESSING_METHOD_SIZE != count) {
13101 rc = BAD_VALUE;
13102 }
13103 }
13104
13105 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
13106 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
13107 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
13108 gps_timestamp)) {
13109 rc = BAD_VALUE;
13110 }
13111 }
13112
13113 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
13114 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
13115 cam_rotation_info_t rotation_info;
13116 if (orientation == 0) {
13117 rotation_info.rotation = ROTATE_0;
13118 } else if (orientation == 90) {
13119 rotation_info.rotation = ROTATE_90;
13120 } else if (orientation == 180) {
13121 rotation_info.rotation = ROTATE_180;
13122 } else if (orientation == 270) {
13123 rotation_info.rotation = ROTATE_270;
13124 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070013125 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070013126 rotation_info.streamId = snapshotStreamId;
13127 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
13128 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13129 rc = BAD_VALUE;
13130 }
13131 }
13132
13133 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13134 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13135 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13136 rc = BAD_VALUE;
13137 }
13138 }
13139
13140 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13141 uint32_t thumb_quality = (uint32_t)
13142 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13143 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13144 thumb_quality)) {
13145 rc = BAD_VALUE;
13146 }
13147 }
13148
13149 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13150 cam_dimension_t dim;
13151 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13152 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13153 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13154 rc = BAD_VALUE;
13155 }
13156 }
13157
13158 // Internal metadata
13159 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13160 size_t count = 0;
13161 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13162 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13163 privatedata.data.i32, privatedata.count, count);
13164 if (privatedata.count != count) {
13165 rc = BAD_VALUE;
13166 }
13167 }
13168
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013169 // ISO/Exposure Priority
13170 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13171 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13172 cam_priority_mode_t mode =
13173 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13174 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13175 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13176 use_iso_exp_pty.previewOnly = FALSE;
13177 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13178 use_iso_exp_pty.value = *ptr;
13179
13180 if(CAM_ISO_PRIORITY == mode) {
13181 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13182 use_iso_exp_pty)) {
13183 rc = BAD_VALUE;
13184 }
13185 }
13186 else {
13187 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13188 use_iso_exp_pty)) {
13189 rc = BAD_VALUE;
13190 }
13191 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013192
13193 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13194 rc = BAD_VALUE;
13195 }
13196 }
13197 } else {
13198 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13199 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013200 }
13201 }
13202
13203 // Saturation
13204 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13205 int32_t* use_saturation =
13206 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13207 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13208 rc = BAD_VALUE;
13209 }
13210 }
13211
Thierry Strudel3d639192016-09-09 11:52:26 -070013212 // EV step
13213 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13214 gCamCapability[mCameraId]->exp_compensation_step)) {
13215 rc = BAD_VALUE;
13216 }
13217
13218 // CDS info
13219 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13220 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13221 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13222
13223 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13224 CAM_INTF_META_CDS_DATA, *cdsData)) {
13225 rc = BAD_VALUE;
13226 }
13227 }
13228
Shuzhen Wang19463d72016-03-08 11:09:52 -080013229 // Hybrid AE
13230 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13231 uint8_t *hybrid_ae = (uint8_t *)
13232 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
Shuzhen Wang77b049a2017-08-30 12:24:36 -070013233 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13234 rc = BAD_VALUE;
13235 }
Shuzhen Wang19463d72016-03-08 11:09:52 -080013236 }
13237
Shuzhen Wang14415f52016-11-16 18:26:18 -080013238 // Histogram
13239 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13240 uint8_t histogramMode =
13241 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13242 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13243 histogramMode)) {
13244 rc = BAD_VALUE;
13245 }
13246 }
13247
13248 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13249 int32_t histogramBins =
13250 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13251 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13252 histogramBins)) {
13253 rc = BAD_VALUE;
13254 }
13255 }
13256
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013257 // Tracking AF
13258 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13259 uint8_t trackingAfTrigger =
13260 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13261 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13262 trackingAfTrigger)) {
13263 rc = BAD_VALUE;
13264 }
13265 }
13266
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013267 // Makernote
13268 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13269 if (entry.count != 0) {
13270 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13271 cam_makernote_t makernote;
13272 makernote.length = entry.count;
13273 memcpy(makernote.data, entry.data.u8, makernote.length);
13274 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13275 rc = BAD_VALUE;
13276 }
13277 } else {
13278 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13279 MAX_MAKERNOTE_LENGTH);
13280 rc = BAD_VALUE;
13281 }
13282 }
13283
Thierry Strudel3d639192016-09-09 11:52:26 -070013284 return rc;
13285}
13286
13287/*===========================================================================
13288 * FUNCTION : captureResultCb
13289 *
13290 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13291 *
13292 * PARAMETERS :
13293 * @frame : frame information from mm-camera-interface
13294 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13295 * @userdata: userdata
13296 *
13297 * RETURN : NONE
13298 *==========================================================================*/
13299void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13300 camera3_stream_buffer_t *buffer,
13301 uint32_t frame_number, bool isInputBuffer, void *userdata)
13302{
13303 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13304 if (hw == NULL) {
13305 LOGE("Invalid hw %p", hw);
13306 return;
13307 }
13308
13309 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13310 return;
13311}
13312
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013313/*===========================================================================
13314 * FUNCTION : setBufferErrorStatus
13315 *
13316 * DESCRIPTION: Callback handler for channels to report any buffer errors
13317 *
13318 * PARAMETERS :
13319 * @ch : Channel on which buffer error is reported from
13320 * @frame_number : frame number on which buffer error is reported on
13321 * @buffer_status : buffer error status
13322 * @userdata: userdata
13323 *
13324 * RETURN : NONE
13325 *==========================================================================*/
13326void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13327 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13328{
13329 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13330 if (hw == NULL) {
13331 LOGE("Invalid hw %p", hw);
13332 return;
13333 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013334
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013335 hw->setBufferErrorStatus(ch, frame_number, err);
13336 return;
13337}
13338
13339void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13340 uint32_t frameNumber, camera3_buffer_status_t err)
13341{
13342 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13343 pthread_mutex_lock(&mMutex);
13344
13345 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13346 if (req.frame_number != frameNumber)
13347 continue;
13348 for (auto& k : req.mPendingBufferList) {
13349 if(k.stream->priv == ch) {
13350 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13351 }
13352 }
13353 }
13354
13355 pthread_mutex_unlock(&mMutex);
13356 return;
13357}
Thierry Strudel3d639192016-09-09 11:52:26 -070013358/*===========================================================================
13359 * FUNCTION : initialize
13360 *
13361 * DESCRIPTION: Pass framework callback pointers to HAL
13362 *
13363 * PARAMETERS :
13364 *
13365 *
13366 * RETURN : Success : 0
13367 * Failure: -ENODEV
13368 *==========================================================================*/
13369
13370int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13371 const camera3_callback_ops_t *callback_ops)
13372{
13373 LOGD("E");
13374 QCamera3HardwareInterface *hw =
13375 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13376 if (!hw) {
13377 LOGE("NULL camera device");
13378 return -ENODEV;
13379 }
13380
13381 int rc = hw->initialize(callback_ops);
13382 LOGD("X");
13383 return rc;
13384}
13385
13386/*===========================================================================
13387 * FUNCTION : configure_streams
13388 *
13389 * DESCRIPTION:
13390 *
13391 * PARAMETERS :
13392 *
13393 *
13394 * RETURN : Success: 0
13395 * Failure: -EINVAL (if stream configuration is invalid)
13396 * -ENODEV (fatal error)
13397 *==========================================================================*/
13398
13399int QCamera3HardwareInterface::configure_streams(
13400 const struct camera3_device *device,
13401 camera3_stream_configuration_t *stream_list)
13402{
13403 LOGD("E");
13404 QCamera3HardwareInterface *hw =
13405 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13406 if (!hw) {
13407 LOGE("NULL camera device");
13408 return -ENODEV;
13409 }
13410 int rc = hw->configureStreams(stream_list);
13411 LOGD("X");
13412 return rc;
13413}
13414
13415/*===========================================================================
13416 * FUNCTION : construct_default_request_settings
13417 *
13418 * DESCRIPTION: Configure a settings buffer to meet the required use case
13419 *
13420 * PARAMETERS :
13421 *
13422 *
13423 * RETURN : Success: Return valid metadata
13424 * Failure: Return NULL
13425 *==========================================================================*/
13426const camera_metadata_t* QCamera3HardwareInterface::
13427 construct_default_request_settings(const struct camera3_device *device,
13428 int type)
13429{
13430
13431 LOGD("E");
13432 camera_metadata_t* fwk_metadata = NULL;
13433 QCamera3HardwareInterface *hw =
13434 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13435 if (!hw) {
13436 LOGE("NULL camera device");
13437 return NULL;
13438 }
13439
13440 fwk_metadata = hw->translateCapabilityToMetadata(type);
13441
13442 LOGD("X");
13443 return fwk_metadata;
13444}
13445
13446/*===========================================================================
13447 * FUNCTION : process_capture_request
13448 *
13449 * DESCRIPTION:
13450 *
13451 * PARAMETERS :
13452 *
13453 *
13454 * RETURN :
13455 *==========================================================================*/
13456int QCamera3HardwareInterface::process_capture_request(
13457 const struct camera3_device *device,
13458 camera3_capture_request_t *request)
13459{
13460 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013461 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013462 QCamera3HardwareInterface *hw =
13463 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13464 if (!hw) {
13465 LOGE("NULL camera device");
13466 return -EINVAL;
13467 }
13468
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013469 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013470 LOGD("X");
13471 return rc;
13472}
13473
13474/*===========================================================================
13475 * FUNCTION : dump
13476 *
13477 * DESCRIPTION:
13478 *
13479 * PARAMETERS :
13480 *
13481 *
13482 * RETURN :
13483 *==========================================================================*/
13484
13485void QCamera3HardwareInterface::dump(
13486 const struct camera3_device *device, int fd)
13487{
13488 /* Log level property is read when "adb shell dumpsys media.camera" is
13489 called so that the log level can be controlled without restarting
13490 the media server */
13491 getLogLevel();
13492
13493 LOGD("E");
13494 QCamera3HardwareInterface *hw =
13495 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13496 if (!hw) {
13497 LOGE("NULL camera device");
13498 return;
13499 }
13500
13501 hw->dump(fd);
13502 LOGD("X");
13503 return;
13504}
13505
13506/*===========================================================================
13507 * FUNCTION : flush
13508 *
13509 * DESCRIPTION:
13510 *
13511 * PARAMETERS :
13512 *
13513 *
13514 * RETURN :
13515 *==========================================================================*/
13516
13517int QCamera3HardwareInterface::flush(
13518 const struct camera3_device *device)
13519{
13520 int rc;
13521 LOGD("E");
13522 QCamera3HardwareInterface *hw =
13523 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13524 if (!hw) {
13525 LOGE("NULL camera device");
13526 return -EINVAL;
13527 }
13528
13529 pthread_mutex_lock(&hw->mMutex);
13530 // Validate current state
13531 switch (hw->mState) {
13532 case STARTED:
13533 /* valid state */
13534 break;
13535
13536 case ERROR:
13537 pthread_mutex_unlock(&hw->mMutex);
13538 hw->handleCameraDeviceError();
13539 return -ENODEV;
13540
13541 default:
13542 LOGI("Flush returned during state %d", hw->mState);
13543 pthread_mutex_unlock(&hw->mMutex);
13544 return 0;
13545 }
13546 pthread_mutex_unlock(&hw->mMutex);
13547
13548 rc = hw->flush(true /* restart channels */ );
13549 LOGD("X");
13550 return rc;
13551}
13552
13553/*===========================================================================
13554 * FUNCTION : close_camera_device
13555 *
13556 * DESCRIPTION:
13557 *
13558 * PARAMETERS :
13559 *
13560 *
13561 * RETURN :
13562 *==========================================================================*/
13563int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13564{
13565 int ret = NO_ERROR;
13566 QCamera3HardwareInterface *hw =
13567 reinterpret_cast<QCamera3HardwareInterface *>(
13568 reinterpret_cast<camera3_device_t *>(device)->priv);
13569 if (!hw) {
13570 LOGE("NULL camera device");
13571 return BAD_VALUE;
13572 }
13573
13574 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13575 delete hw;
13576 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013577 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013578 return ret;
13579}
13580
13581/*===========================================================================
13582 * FUNCTION : getWaveletDenoiseProcessPlate
13583 *
13584 * DESCRIPTION: query wavelet denoise process plate
13585 *
13586 * PARAMETERS : None
13587 *
13588 * RETURN : WNR prcocess plate value
13589 *==========================================================================*/
13590cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13591{
13592 char prop[PROPERTY_VALUE_MAX];
13593 memset(prop, 0, sizeof(prop));
13594 property_get("persist.denoise.process.plates", prop, "0");
13595 int processPlate = atoi(prop);
13596 switch(processPlate) {
13597 case 0:
13598 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13599 case 1:
13600 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13601 case 2:
13602 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13603 case 3:
13604 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13605 default:
13606 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13607 }
13608}
13609
13610
13611/*===========================================================================
13612 * FUNCTION : getTemporalDenoiseProcessPlate
13613 *
13614 * DESCRIPTION: query temporal denoise process plate
13615 *
13616 * PARAMETERS : None
13617 *
13618 * RETURN : TNR prcocess plate value
13619 *==========================================================================*/
13620cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13621{
13622 char prop[PROPERTY_VALUE_MAX];
13623 memset(prop, 0, sizeof(prop));
13624 property_get("persist.tnr.process.plates", prop, "0");
13625 int processPlate = atoi(prop);
13626 switch(processPlate) {
13627 case 0:
13628 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13629 case 1:
13630 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13631 case 2:
13632 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13633 case 3:
13634 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13635 default:
13636 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13637 }
13638}
13639
13640
13641/*===========================================================================
13642 * FUNCTION : extractSceneMode
13643 *
13644 * DESCRIPTION: Extract scene mode from frameworks set metadata
13645 *
13646 * PARAMETERS :
13647 * @frame_settings: CameraMetadata reference
13648 * @metaMode: ANDROID_CONTORL_MODE
13649 * @hal_metadata: hal metadata structure
13650 *
13651 * RETURN : None
13652 *==========================================================================*/
13653int32_t QCamera3HardwareInterface::extractSceneMode(
13654 const CameraMetadata &frame_settings, uint8_t metaMode,
13655 metadata_buffer_t *hal_metadata)
13656{
13657 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013658 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13659
13660 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13661 LOGD("Ignoring control mode OFF_KEEP_STATE");
13662 return NO_ERROR;
13663 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013664
13665 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13666 camera_metadata_ro_entry entry =
13667 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13668 if (0 == entry.count)
13669 return rc;
13670
13671 uint8_t fwk_sceneMode = entry.data.u8[0];
13672
13673 int val = lookupHalName(SCENE_MODES_MAP,
13674 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13675 fwk_sceneMode);
13676 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013677 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013678 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013679 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013680 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013681
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013682 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13683 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13684 }
13685
13686 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13687 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013688 cam_hdr_param_t hdr_params;
13689 hdr_params.hdr_enable = 1;
13690 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13691 hdr_params.hdr_need_1x = false;
13692 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13693 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13694 rc = BAD_VALUE;
13695 }
13696 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013697
Thierry Strudel3d639192016-09-09 11:52:26 -070013698 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13699 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13700 rc = BAD_VALUE;
13701 }
13702 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013703
13704 if (mForceHdrSnapshot) {
13705 cam_hdr_param_t hdr_params;
13706 hdr_params.hdr_enable = 1;
13707 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13708 hdr_params.hdr_need_1x = false;
13709 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13710 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13711 rc = BAD_VALUE;
13712 }
13713 }
13714
Thierry Strudel3d639192016-09-09 11:52:26 -070013715 return rc;
13716}
13717
13718/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013719 * FUNCTION : setVideoHdrMode
13720 *
13721 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13722 *
13723 * PARAMETERS :
13724 * @hal_metadata: hal metadata structure
13725 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13726 *
13727 * RETURN : None
13728 *==========================================================================*/
13729int32_t QCamera3HardwareInterface::setVideoHdrMode(
13730 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13731{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013732 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13733 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13734 }
13735
13736 LOGE("Invalid Video HDR mode %d!", vhdr);
13737 return BAD_VALUE;
13738}
13739
13740/*===========================================================================
13741 * FUNCTION : setSensorHDR
13742 *
13743 * DESCRIPTION: Enable/disable sensor HDR.
13744 *
13745 * PARAMETERS :
13746 * @hal_metadata: hal metadata structure
13747 * @enable: boolean whether to enable/disable sensor HDR
13748 *
13749 * RETURN : None
13750 *==========================================================================*/
13751int32_t QCamera3HardwareInterface::setSensorHDR(
13752 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13753{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013754 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013755 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13756
13757 if (enable) {
13758 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13759 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13760 #ifdef _LE_CAMERA_
13761 //Default to staggered HDR for IOT
13762 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13763 #else
13764 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13765 #endif
13766 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13767 }
13768
13769 bool isSupported = false;
13770 switch (sensor_hdr) {
13771 case CAM_SENSOR_HDR_IN_SENSOR:
13772 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13773 CAM_QCOM_FEATURE_SENSOR_HDR) {
13774 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013775 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013776 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013777 break;
13778 case CAM_SENSOR_HDR_ZIGZAG:
13779 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13780 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13781 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013782 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013783 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013784 break;
13785 case CAM_SENSOR_HDR_STAGGERED:
13786 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13787 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13788 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013789 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013790 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013791 break;
13792 case CAM_SENSOR_HDR_OFF:
13793 isSupported = true;
13794 LOGD("Turning off sensor HDR");
13795 break;
13796 default:
13797 LOGE("HDR mode %d not supported", sensor_hdr);
13798 rc = BAD_VALUE;
13799 break;
13800 }
13801
13802 if(isSupported) {
13803 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13804 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13805 rc = BAD_VALUE;
13806 } else {
13807 if(!isVideoHdrEnable)
13808 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013809 }
13810 }
13811 return rc;
13812}
13813
13814/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013815 * FUNCTION : needRotationReprocess
13816 *
13817 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13818 *
13819 * PARAMETERS : none
13820 *
13821 * RETURN : true: needed
13822 * false: no need
13823 *==========================================================================*/
13824bool QCamera3HardwareInterface::needRotationReprocess()
13825{
13826 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13827 // current rotation is not zero, and pp has the capability to process rotation
13828 LOGH("need do reprocess for rotation");
13829 return true;
13830 }
13831
13832 return false;
13833}
13834
13835/*===========================================================================
13836 * FUNCTION : needReprocess
13837 *
13838 * DESCRIPTION: if reprocess in needed
13839 *
13840 * PARAMETERS : none
13841 *
13842 * RETURN : true: needed
13843 * false: no need
13844 *==========================================================================*/
13845bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13846{
13847 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13848 // TODO: add for ZSL HDR later
13849 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13850 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13851 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13852 return true;
13853 } else {
13854 LOGH("already post processed frame");
13855 return false;
13856 }
13857 }
13858 return needRotationReprocess();
13859}
13860
13861/*===========================================================================
13862 * FUNCTION : needJpegExifRotation
13863 *
13864 * DESCRIPTION: if rotation from jpeg is needed
13865 *
13866 * PARAMETERS : none
13867 *
13868 * RETURN : true: needed
13869 * false: no need
13870 *==========================================================================*/
13871bool QCamera3HardwareInterface::needJpegExifRotation()
13872{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013873 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013874 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13875 LOGD("Need use Jpeg EXIF Rotation");
13876 return true;
13877 }
13878 return false;
13879}
13880
13881/*===========================================================================
13882 * FUNCTION : addOfflineReprocChannel
13883 *
13884 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13885 * coming from input channel
13886 *
13887 * PARAMETERS :
13888 * @config : reprocess configuration
13889 * @inputChHandle : pointer to the input (source) channel
13890 *
13891 *
13892 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13893 *==========================================================================*/
13894QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13895 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13896{
13897 int32_t rc = NO_ERROR;
13898 QCamera3ReprocessChannel *pChannel = NULL;
13899
13900 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013901 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13902 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013903 if (NULL == pChannel) {
13904 LOGE("no mem for reprocess channel");
13905 return NULL;
13906 }
13907
13908 rc = pChannel->initialize(IS_TYPE_NONE);
13909 if (rc != NO_ERROR) {
13910 LOGE("init reprocess channel failed, ret = %d", rc);
13911 delete pChannel;
13912 return NULL;
13913 }
13914
13915 // pp feature config
13916 cam_pp_feature_config_t pp_config;
13917 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13918
13919 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13920 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13921 & CAM_QCOM_FEATURE_DSDN) {
13922 //Use CPP CDS incase h/w supports it.
13923 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13924 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13925 }
13926 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13927 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13928 }
13929
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013930 if (config.hdr_param.hdr_enable) {
13931 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13932 pp_config.hdr_param = config.hdr_param;
13933 }
13934
13935 if (mForceHdrSnapshot) {
13936 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13937 pp_config.hdr_param.hdr_enable = 1;
13938 pp_config.hdr_param.hdr_need_1x = 0;
13939 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13940 }
13941
Thierry Strudel3d639192016-09-09 11:52:26 -070013942 rc = pChannel->addReprocStreamsFromSource(pp_config,
13943 config,
13944 IS_TYPE_NONE,
13945 mMetadataChannel);
13946
13947 if (rc != NO_ERROR) {
13948 delete pChannel;
13949 return NULL;
13950 }
13951 return pChannel;
13952}
13953
13954/*===========================================================================
13955 * FUNCTION : getMobicatMask
13956 *
13957 * DESCRIPTION: returns mobicat mask
13958 *
13959 * PARAMETERS : none
13960 *
13961 * RETURN : mobicat mask
13962 *
13963 *==========================================================================*/
13964uint8_t QCamera3HardwareInterface::getMobicatMask()
13965{
13966 return m_MobicatMask;
13967}
13968
13969/*===========================================================================
13970 * FUNCTION : setMobicat
13971 *
13972 * DESCRIPTION: set Mobicat on/off.
13973 *
13974 * PARAMETERS :
13975 * @params : none
13976 *
13977 * RETURN : int32_t type of status
13978 * NO_ERROR -- success
13979 * none-zero failure code
13980 *==========================================================================*/
13981int32_t QCamera3HardwareInterface::setMobicat()
13982{
Thierry Strudel3d639192016-09-09 11:52:26 -070013983 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013984
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013985 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013986 tune_cmd_t tune_cmd;
13987 tune_cmd.type = SET_RELOAD_CHROMATIX;
13988 tune_cmd.module = MODULE_ALL;
13989 tune_cmd.value = TRUE;
13990 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13991 CAM_INTF_PARM_SET_VFE_COMMAND,
13992 tune_cmd);
13993
13994 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13995 CAM_INTF_PARM_SET_PP_COMMAND,
13996 tune_cmd);
13997 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013998
13999 return ret;
14000}
14001
14002/*===========================================================================
14003* FUNCTION : getLogLevel
14004*
14005* DESCRIPTION: Reads the log level property into a variable
14006*
14007* PARAMETERS :
14008* None
14009*
14010* RETURN :
14011* None
14012*==========================================================================*/
14013void QCamera3HardwareInterface::getLogLevel()
14014{
14015 char prop[PROPERTY_VALUE_MAX];
14016 uint32_t globalLogLevel = 0;
14017
14018 property_get("persist.camera.hal.debug", prop, "0");
14019 int val = atoi(prop);
14020 if (0 <= val) {
14021 gCamHal3LogLevel = (uint32_t)val;
14022 }
14023
Thierry Strudel9ec39c62016-12-28 11:30:05 -080014024 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070014025 gKpiDebugLevel = atoi(prop);
14026
14027 property_get("persist.camera.global.debug", prop, "0");
14028 val = atoi(prop);
14029 if (0 <= val) {
14030 globalLogLevel = (uint32_t)val;
14031 }
14032
14033 /* Highest log level among hal.logs and global.logs is selected */
14034 if (gCamHal3LogLevel < globalLogLevel)
14035 gCamHal3LogLevel = globalLogLevel;
14036
14037 return;
14038}
14039
14040/*===========================================================================
14041 * FUNCTION : validateStreamRotations
14042 *
14043 * DESCRIPTION: Check if the rotations requested are supported
14044 *
14045 * PARAMETERS :
14046 * @stream_list : streams to be configured
14047 *
14048 * RETURN : NO_ERROR on success
14049 * -EINVAL on failure
14050 *
14051 *==========================================================================*/
14052int QCamera3HardwareInterface::validateStreamRotations(
14053 camera3_stream_configuration_t *streamList)
14054{
14055 int rc = NO_ERROR;
14056
14057 /*
14058 * Loop through all streams requested in configuration
14059 * Check if unsupported rotations have been requested on any of them
14060 */
14061 for (size_t j = 0; j < streamList->num_streams; j++){
14062 camera3_stream_t *newStream = streamList->streams[j];
14063
Emilian Peev35ceeed2017-06-29 11:58:56 -070014064 switch(newStream->rotation) {
14065 case CAMERA3_STREAM_ROTATION_0:
14066 case CAMERA3_STREAM_ROTATION_90:
14067 case CAMERA3_STREAM_ROTATION_180:
14068 case CAMERA3_STREAM_ROTATION_270:
14069 //Expected values
14070 break;
14071 default:
14072 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
14073 "type:%d and stream format:%d", __func__,
14074 newStream->rotation, newStream->stream_type,
14075 newStream->format);
14076 return -EINVAL;
14077 }
14078
Thierry Strudel3d639192016-09-09 11:52:26 -070014079 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
14080 bool isImplDef = (newStream->format ==
14081 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
14082 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
14083 isImplDef);
14084
14085 if (isRotated && (!isImplDef || isZsl)) {
14086 LOGE("Error: Unsupported rotation of %d requested for stream"
14087 "type:%d and stream format:%d",
14088 newStream->rotation, newStream->stream_type,
14089 newStream->format);
14090 rc = -EINVAL;
14091 break;
14092 }
14093 }
14094
14095 return rc;
14096}
14097
14098/*===========================================================================
14099* FUNCTION : getFlashInfo
14100*
14101* DESCRIPTION: Retrieve information about whether the device has a flash.
14102*
14103* PARAMETERS :
14104* @cameraId : Camera id to query
14105* @hasFlash : Boolean indicating whether there is a flash device
14106* associated with given camera
14107* @flashNode : If a flash device exists, this will be its device node.
14108*
14109* RETURN :
14110* None
14111*==========================================================================*/
14112void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
14113 bool& hasFlash,
14114 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
14115{
14116 cam_capability_t* camCapability = gCamCapability[cameraId];
14117 if (NULL == camCapability) {
14118 hasFlash = false;
14119 flashNode[0] = '\0';
14120 } else {
14121 hasFlash = camCapability->flash_available;
14122 strlcpy(flashNode,
14123 (char*)camCapability->flash_dev_name,
14124 QCAMERA_MAX_FILEPATH_LENGTH);
14125 }
14126}
14127
14128/*===========================================================================
14129* FUNCTION : getEepromVersionInfo
14130*
14131* DESCRIPTION: Retrieve version info of the sensor EEPROM data
14132*
14133* PARAMETERS : None
14134*
14135* RETURN : string describing EEPROM version
14136* "\0" if no such info available
14137*==========================================================================*/
14138const char *QCamera3HardwareInterface::getEepromVersionInfo()
14139{
14140 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14141}
14142
14143/*===========================================================================
14144* FUNCTION : getLdafCalib
14145*
14146* DESCRIPTION: Retrieve Laser AF calibration data
14147*
14148* PARAMETERS : None
14149*
14150* RETURN : Two uint32_t describing laser AF calibration data
14151* NULL if none is available.
14152*==========================================================================*/
14153const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14154{
14155 if (mLdafCalibExist) {
14156 return &mLdafCalib[0];
14157 } else {
14158 return NULL;
14159 }
14160}
14161
14162/*===========================================================================
Arnd Geis082a4d72017-08-24 10:33:07 -070014163* FUNCTION : getEaselFwVersion
14164*
14165* DESCRIPTION: Retrieve Easel firmware version
14166*
14167* PARAMETERS : None
14168*
14169* RETURN : string describing Firmware version
Arnd Geis8cbfc182017-09-07 14:46:41 -070014170* "\0" if version is not up to date
Arnd Geis082a4d72017-08-24 10:33:07 -070014171*==========================================================================*/
14172const char *QCamera3HardwareInterface::getEaselFwVersion()
14173{
Arnd Geis8cbfc182017-09-07 14:46:41 -070014174 if (mEaselFwUpdated) {
14175 return (const char *)&mEaselFwVersion[0];
14176 } else {
14177 return NULL;
Arnd Geis082a4d72017-08-24 10:33:07 -070014178 }
Arnd Geis082a4d72017-08-24 10:33:07 -070014179}
14180
14181/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014182 * FUNCTION : dynamicUpdateMetaStreamInfo
14183 *
14184 * DESCRIPTION: This function:
14185 * (1) stops all the channels
14186 * (2) returns error on pending requests and buffers
14187 * (3) sends metastream_info in setparams
14188 * (4) starts all channels
14189 * This is useful when sensor has to be restarted to apply any
14190 * settings such as frame rate from a different sensor mode
14191 *
14192 * PARAMETERS : None
14193 *
14194 * RETURN : NO_ERROR on success
14195 * Error codes on failure
14196 *
14197 *==========================================================================*/
14198int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14199{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014200 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014201 int rc = NO_ERROR;
14202
14203 LOGD("E");
14204
14205 rc = stopAllChannels();
14206 if (rc < 0) {
14207 LOGE("stopAllChannels failed");
14208 return rc;
14209 }
14210
14211 rc = notifyErrorForPendingRequests();
14212 if (rc < 0) {
14213 LOGE("notifyErrorForPendingRequests failed");
14214 return rc;
14215 }
14216
14217 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14218 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14219 "Format:%d",
14220 mStreamConfigInfo.type[i],
14221 mStreamConfigInfo.stream_sizes[i].width,
14222 mStreamConfigInfo.stream_sizes[i].height,
14223 mStreamConfigInfo.postprocess_mask[i],
14224 mStreamConfigInfo.format[i]);
14225 }
14226
14227 /* Send meta stream info once again so that ISP can start */
14228 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14229 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14230 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14231 mParameters);
14232 if (rc < 0) {
14233 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14234 }
14235
14236 rc = startAllChannels();
14237 if (rc < 0) {
14238 LOGE("startAllChannels failed");
14239 return rc;
14240 }
14241
14242 LOGD("X");
14243 return rc;
14244}
14245
14246/*===========================================================================
14247 * FUNCTION : stopAllChannels
14248 *
14249 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14250 *
14251 * PARAMETERS : None
14252 *
14253 * RETURN : NO_ERROR on success
14254 * Error codes on failure
14255 *
14256 *==========================================================================*/
14257int32_t QCamera3HardwareInterface::stopAllChannels()
14258{
14259 int32_t rc = NO_ERROR;
14260
14261 LOGD("Stopping all channels");
14262 // Stop the Streams/Channels
14263 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14264 it != mStreamInfo.end(); it++) {
14265 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14266 if (channel) {
14267 channel->stop();
14268 }
14269 (*it)->status = INVALID;
14270 }
14271
14272 if (mSupportChannel) {
14273 mSupportChannel->stop();
14274 }
14275 if (mAnalysisChannel) {
14276 mAnalysisChannel->stop();
14277 }
14278 if (mRawDumpChannel) {
14279 mRawDumpChannel->stop();
14280 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014281 if (mHdrPlusRawSrcChannel) {
14282 mHdrPlusRawSrcChannel->stop();
14283 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014284 if (mMetadataChannel) {
14285 /* If content of mStreamInfo is not 0, there is metadata stream */
14286 mMetadataChannel->stop();
14287 }
14288
14289 LOGD("All channels stopped");
14290 return rc;
14291}
14292
14293/*===========================================================================
14294 * FUNCTION : startAllChannels
14295 *
14296 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14297 *
14298 * PARAMETERS : None
14299 *
14300 * RETURN : NO_ERROR on success
14301 * Error codes on failure
14302 *
14303 *==========================================================================*/
14304int32_t QCamera3HardwareInterface::startAllChannels()
14305{
14306 int32_t rc = NO_ERROR;
14307
14308 LOGD("Start all channels ");
14309 // Start the Streams/Channels
14310 if (mMetadataChannel) {
14311 /* If content of mStreamInfo is not 0, there is metadata stream */
14312 rc = mMetadataChannel->start();
14313 if (rc < 0) {
14314 LOGE("META channel start failed");
14315 return rc;
14316 }
14317 }
14318 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14319 it != mStreamInfo.end(); it++) {
14320 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14321 if (channel) {
14322 rc = channel->start();
14323 if (rc < 0) {
14324 LOGE("channel start failed");
14325 return rc;
14326 }
14327 }
14328 }
14329 if (mAnalysisChannel) {
14330 mAnalysisChannel->start();
14331 }
14332 if (mSupportChannel) {
14333 rc = mSupportChannel->start();
14334 if (rc < 0) {
14335 LOGE("Support channel start failed");
14336 return rc;
14337 }
14338 }
14339 if (mRawDumpChannel) {
14340 rc = mRawDumpChannel->start();
14341 if (rc < 0) {
14342 LOGE("RAW dump channel start failed");
14343 return rc;
14344 }
14345 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014346 if (mHdrPlusRawSrcChannel) {
14347 rc = mHdrPlusRawSrcChannel->start();
14348 if (rc < 0) {
14349 LOGE("HDR+ RAW channel start failed");
14350 return rc;
14351 }
14352 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014353
14354 LOGD("All channels started");
14355 return rc;
14356}
14357
14358/*===========================================================================
14359 * FUNCTION : notifyErrorForPendingRequests
14360 *
14361 * DESCRIPTION: This function sends error for all the pending requests/buffers
14362 *
14363 * PARAMETERS : None
14364 *
14365 * RETURN : Error codes
14366 * NO_ERROR on success
14367 *
14368 *==========================================================================*/
14369int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14370{
Emilian Peev7650c122017-01-19 08:24:33 -080014371 notifyErrorFoPendingDepthData(mDepthChannel);
14372
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014373 auto pendingRequest = mPendingRequestsList.begin();
14374 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014375
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014376 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14377 // buffers (for which buffers aren't sent yet).
14378 while (pendingRequest != mPendingRequestsList.end() ||
14379 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14380 if (pendingRequest == mPendingRequestsList.end() ||
14381 pendingBuffer->frame_number < pendingRequest->frame_number) {
14382 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14383 // with error.
14384 for (auto &info : pendingBuffer->mPendingBufferList) {
14385 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014386 camera3_notify_msg_t notify_msg;
14387 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14388 notify_msg.type = CAMERA3_MSG_ERROR;
14389 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014390 notify_msg.message.error.error_stream = info.stream;
14391 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014392 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014393
14394 camera3_stream_buffer_t buffer = {};
14395 buffer.acquire_fence = -1;
14396 buffer.release_fence = -1;
14397 buffer.buffer = info.buffer;
14398 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14399 buffer.stream = info.stream;
14400 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014401 }
14402
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014403 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14404 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14405 pendingBuffer->frame_number > pendingRequest->frame_number) {
14406 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014407 camera3_notify_msg_t notify_msg;
14408 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14409 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014410 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14411 notify_msg.message.error.error_stream = nullptr;
14412 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014413 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014414
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014415 if (pendingRequest->input_buffer != nullptr) {
14416 camera3_capture_result result = {};
14417 result.frame_number = pendingRequest->frame_number;
14418 result.result = nullptr;
14419 result.input_buffer = pendingRequest->input_buffer;
14420 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014421 }
14422
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014423 mShutterDispatcher.clear(pendingRequest->frame_number);
14424 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14425 } else {
14426 // If both buffers and result metadata weren't sent yet, notify about a request error
14427 // and return buffers with error.
14428 for (auto &info : pendingBuffer->mPendingBufferList) {
14429 camera3_notify_msg_t notify_msg;
14430 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14431 notify_msg.type = CAMERA3_MSG_ERROR;
14432 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14433 notify_msg.message.error.error_stream = info.stream;
14434 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14435 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014436
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014437 camera3_stream_buffer_t buffer = {};
14438 buffer.acquire_fence = -1;
14439 buffer.release_fence = -1;
14440 buffer.buffer = info.buffer;
14441 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14442 buffer.stream = info.stream;
14443 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14444 }
14445
14446 if (pendingRequest->input_buffer != nullptr) {
14447 camera3_capture_result result = {};
14448 result.frame_number = pendingRequest->frame_number;
14449 result.result = nullptr;
14450 result.input_buffer = pendingRequest->input_buffer;
14451 orchestrateResult(&result);
14452 }
14453
14454 mShutterDispatcher.clear(pendingRequest->frame_number);
14455 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14456 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014457 }
14458 }
14459
14460 /* Reset pending frame Drop list and requests list */
14461 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014462 mShutterDispatcher.clear();
14463 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014464 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014465 mExpectedFrameDuration = 0;
14466 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014467 LOGH("Cleared all the pending buffers ");
14468
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014469 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014470}
14471
14472bool QCamera3HardwareInterface::isOnEncoder(
14473 const cam_dimension_t max_viewfinder_size,
14474 uint32_t width, uint32_t height)
14475{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014476 return ((width > (uint32_t)max_viewfinder_size.width) ||
14477 (height > (uint32_t)max_viewfinder_size.height) ||
14478 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14479 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014480}
14481
14482/*===========================================================================
14483 * FUNCTION : setBundleInfo
14484 *
14485 * DESCRIPTION: Set bundle info for all streams that are bundle.
14486 *
14487 * PARAMETERS : None
14488 *
14489 * RETURN : NO_ERROR on success
14490 * Error codes on failure
14491 *==========================================================================*/
14492int32_t QCamera3HardwareInterface::setBundleInfo()
14493{
14494 int32_t rc = NO_ERROR;
14495
14496 if (mChannelHandle) {
14497 cam_bundle_config_t bundleInfo;
14498 memset(&bundleInfo, 0, sizeof(bundleInfo));
14499 rc = mCameraHandle->ops->get_bundle_info(
14500 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14501 if (rc != NO_ERROR) {
14502 LOGE("get_bundle_info failed");
14503 return rc;
14504 }
14505 if (mAnalysisChannel) {
14506 mAnalysisChannel->setBundleInfo(bundleInfo);
14507 }
14508 if (mSupportChannel) {
14509 mSupportChannel->setBundleInfo(bundleInfo);
14510 }
14511 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14512 it != mStreamInfo.end(); it++) {
14513 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14514 channel->setBundleInfo(bundleInfo);
14515 }
14516 if (mRawDumpChannel) {
14517 mRawDumpChannel->setBundleInfo(bundleInfo);
14518 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014519 if (mHdrPlusRawSrcChannel) {
14520 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14521 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014522 }
14523
14524 return rc;
14525}
14526
14527/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014528 * FUNCTION : setInstantAEC
14529 *
14530 * DESCRIPTION: Set Instant AEC related params.
14531 *
14532 * PARAMETERS :
14533 * @meta: CameraMetadata reference
14534 *
14535 * RETURN : NO_ERROR on success
14536 * Error codes on failure
14537 *==========================================================================*/
14538int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14539{
14540 int32_t rc = NO_ERROR;
14541 uint8_t val = 0;
14542 char prop[PROPERTY_VALUE_MAX];
14543
14544 // First try to configure instant AEC from framework metadata
14545 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14546 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14547 }
14548
14549 // If framework did not set this value, try to read from set prop.
14550 if (val == 0) {
14551 memset(prop, 0, sizeof(prop));
14552 property_get("persist.camera.instant.aec", prop, "0");
14553 val = (uint8_t)atoi(prop);
14554 }
14555
14556 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14557 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14558 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14559 mInstantAEC = val;
14560 mInstantAECSettledFrameNumber = 0;
14561 mInstantAecFrameIdxCount = 0;
14562 LOGH("instantAEC value set %d",val);
14563 if (mInstantAEC) {
14564 memset(prop, 0, sizeof(prop));
14565 property_get("persist.camera.ae.instant.bound", prop, "10");
14566 int32_t aec_frame_skip_cnt = atoi(prop);
14567 if (aec_frame_skip_cnt >= 0) {
14568 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14569 } else {
14570 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14571 rc = BAD_VALUE;
14572 }
14573 }
14574 } else {
14575 LOGE("Bad instant aec value set %d", val);
14576 rc = BAD_VALUE;
14577 }
14578 return rc;
14579}
14580
14581/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014582 * FUNCTION : get_num_overall_buffers
14583 *
14584 * DESCRIPTION: Estimate number of pending buffers across all requests.
14585 *
14586 * PARAMETERS : None
14587 *
14588 * RETURN : Number of overall pending buffers
14589 *
14590 *==========================================================================*/
14591uint32_t PendingBuffersMap::get_num_overall_buffers()
14592{
14593 uint32_t sum_buffers = 0;
14594 for (auto &req : mPendingBuffersInRequest) {
14595 sum_buffers += req.mPendingBufferList.size();
14596 }
14597 return sum_buffers;
14598}
14599
14600/*===========================================================================
14601 * FUNCTION : removeBuf
14602 *
14603 * DESCRIPTION: Remove a matching buffer from tracker.
14604 *
14605 * PARAMETERS : @buffer: image buffer for the callback
14606 *
14607 * RETURN : None
14608 *
14609 *==========================================================================*/
14610void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14611{
14612 bool buffer_found = false;
14613 for (auto req = mPendingBuffersInRequest.begin();
14614 req != mPendingBuffersInRequest.end(); req++) {
14615 for (auto k = req->mPendingBufferList.begin();
14616 k != req->mPendingBufferList.end(); k++ ) {
14617 if (k->buffer == buffer) {
14618 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14619 req->frame_number, buffer);
14620 k = req->mPendingBufferList.erase(k);
14621 if (req->mPendingBufferList.empty()) {
14622 // Remove this request from Map
14623 req = mPendingBuffersInRequest.erase(req);
14624 }
14625 buffer_found = true;
14626 break;
14627 }
14628 }
14629 if (buffer_found) {
14630 break;
14631 }
14632 }
14633 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14634 get_num_overall_buffers());
14635}
14636
14637/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014638 * FUNCTION : getBufErrStatus
14639 *
14640 * DESCRIPTION: get buffer error status
14641 *
14642 * PARAMETERS : @buffer: buffer handle
14643 *
14644 * RETURN : Error status
14645 *
14646 *==========================================================================*/
14647int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14648{
14649 for (auto& req : mPendingBuffersInRequest) {
14650 for (auto& k : req.mPendingBufferList) {
14651 if (k.buffer == buffer)
14652 return k.bufStatus;
14653 }
14654 }
14655 return CAMERA3_BUFFER_STATUS_OK;
14656}
14657
14658/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014659 * FUNCTION : setPAAFSupport
14660 *
14661 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14662 * feature mask according to stream type and filter
14663 * arrangement
14664 *
14665 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14666 * @stream_type: stream type
14667 * @filter_arrangement: filter arrangement
14668 *
14669 * RETURN : None
14670 *==========================================================================*/
14671void QCamera3HardwareInterface::setPAAFSupport(
14672 cam_feature_mask_t& feature_mask,
14673 cam_stream_type_t stream_type,
14674 cam_color_filter_arrangement_t filter_arrangement)
14675{
Thierry Strudel3d639192016-09-09 11:52:26 -070014676 switch (filter_arrangement) {
14677 case CAM_FILTER_ARRANGEMENT_RGGB:
14678 case CAM_FILTER_ARRANGEMENT_GRBG:
14679 case CAM_FILTER_ARRANGEMENT_GBRG:
14680 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014681 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14682 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014683 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014684 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14685 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014686 }
14687 break;
14688 case CAM_FILTER_ARRANGEMENT_Y:
14689 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14690 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14691 }
14692 break;
14693 default:
14694 break;
14695 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014696 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14697 feature_mask, stream_type, filter_arrangement);
14698
14699
Thierry Strudel3d639192016-09-09 11:52:26 -070014700}
14701
14702/*===========================================================================
14703* FUNCTION : getSensorMountAngle
14704*
14705* DESCRIPTION: Retrieve sensor mount angle
14706*
14707* PARAMETERS : None
14708*
14709* RETURN : sensor mount angle in uint32_t
14710*==========================================================================*/
14711uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14712{
14713 return gCamCapability[mCameraId]->sensor_mount_angle;
14714}
14715
14716/*===========================================================================
14717* FUNCTION : getRelatedCalibrationData
14718*
14719* DESCRIPTION: Retrieve related system calibration data
14720*
14721* PARAMETERS : None
14722*
14723* RETURN : Pointer of related system calibration data
14724*==========================================================================*/
14725const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14726{
14727 return (const cam_related_system_calibration_data_t *)
14728 &(gCamCapability[mCameraId]->related_cam_calibration);
14729}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014730
14731/*===========================================================================
14732 * FUNCTION : is60HzZone
14733 *
14734 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14735 *
14736 * PARAMETERS : None
14737 *
14738 * RETURN : True if in 60Hz zone, False otherwise
14739 *==========================================================================*/
14740bool QCamera3HardwareInterface::is60HzZone()
14741{
14742 time_t t = time(NULL);
14743 struct tm lt;
14744
14745 struct tm* r = localtime_r(&t, &lt);
14746
14747 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14748 return true;
14749 else
14750 return false;
14751}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014752
14753/*===========================================================================
14754 * FUNCTION : adjustBlackLevelForCFA
14755 *
14756 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14757 * of bayer CFA (Color Filter Array).
14758 *
14759 * PARAMETERS : @input: black level pattern in the order of RGGB
14760 * @output: black level pattern in the order of CFA
14761 * @color_arrangement: CFA color arrangement
14762 *
14763 * RETURN : None
14764 *==========================================================================*/
14765template<typename T>
14766void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14767 T input[BLACK_LEVEL_PATTERN_CNT],
14768 T output[BLACK_LEVEL_PATTERN_CNT],
14769 cam_color_filter_arrangement_t color_arrangement)
14770{
14771 switch (color_arrangement) {
14772 case CAM_FILTER_ARRANGEMENT_GRBG:
14773 output[0] = input[1];
14774 output[1] = input[0];
14775 output[2] = input[3];
14776 output[3] = input[2];
14777 break;
14778 case CAM_FILTER_ARRANGEMENT_GBRG:
14779 output[0] = input[2];
14780 output[1] = input[3];
14781 output[2] = input[0];
14782 output[3] = input[1];
14783 break;
14784 case CAM_FILTER_ARRANGEMENT_BGGR:
14785 output[0] = input[3];
14786 output[1] = input[2];
14787 output[2] = input[1];
14788 output[3] = input[0];
14789 break;
14790 case CAM_FILTER_ARRANGEMENT_RGGB:
14791 output[0] = input[0];
14792 output[1] = input[1];
14793 output[2] = input[2];
14794 output[3] = input[3];
14795 break;
14796 default:
14797 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14798 break;
14799 }
14800}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014801
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014802void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14803 CameraMetadata &resultMetadata,
14804 std::shared_ptr<metadata_buffer_t> settings)
14805{
14806 if (settings == nullptr) {
14807 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14808 return;
14809 }
14810
14811 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14812 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14813 }
14814
14815 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14816 String8 str((const char *)gps_methods);
14817 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14818 }
14819
14820 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14821 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14822 }
14823
14824 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14825 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14826 }
14827
14828 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14829 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14830 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14831 }
14832
14833 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14834 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14835 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14836 }
14837
14838 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14839 int32_t fwk_thumb_size[2];
14840 fwk_thumb_size[0] = thumb_size->width;
14841 fwk_thumb_size[1] = thumb_size->height;
14842 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14843 }
14844
14845 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14846 uint8_t fwk_intent = intent[0];
14847 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14848 }
14849}
14850
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014851bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14852 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chenec328c82017-08-30 16:41:08 -070014853 if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
14854 metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
14855 ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
14856 return false;
14857 }
14858
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014859 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14860 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14861 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014862 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014863 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014864 return false;
14865 }
14866
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014867 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014868 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14869 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014870 return false;
14871 }
14872
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014873 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14874 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14875 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14876 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14877 return false;
14878 }
14879
14880 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14881 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14882 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14883 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14884 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
14885 return false;
14886 }
14887
14888 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
14889 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
14890 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
14891 return false;
14892 }
14893
14894 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
14895 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
14896 ANDROID_CONTROL_EFFECT_MODE_OFF) {
14897 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
14898 return false;
14899 }
14900
14901 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
14902 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
14903 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
14904 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
14905 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
14906 return false;
14907 }
14908
14909 // TODO (b/32585046): support non-ZSL.
14910 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
14911 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
14912 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
14913 return false;
14914 }
14915
14916 // TODO (b/32586081): support flash.
14917 if (!metadata.exists(ANDROID_FLASH_MODE) ||
14918 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
14919 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
14920 return false;
14921 }
14922
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014923 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
14924 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
14925 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
14926 return false;
14927 }
14928
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014929
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014930 // TODO (b/36693254, b/36690506): support other outputs.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014931 if (!gEnableMultipleHdrplusOutputs && request.num_output_buffers != 1) {
14932 ALOGV("%s: Only support 1 output: %d", __FUNCTION__, request.num_output_buffers);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014933 return false;
14934 }
14935
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014936 switch (request.output_buffers[0].stream->format) {
14937 case HAL_PIXEL_FORMAT_BLOB:
14938 break;
14939 case HAL_PIXEL_FORMAT_YCbCr_420_888:
14940 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
14941 // TODO (b/36693254): Only support full size.
14942 if (!gEnableMultipleHdrplusOutputs) {
14943 if (static_cast<int>(request.output_buffers[0].stream->width) !=
14944 gCamCapability[mCameraId]->picture_sizes_tbl[0].width ||
14945 static_cast<int>(request.output_buffers[0].stream->height) !=
14946 gCamCapability[mCameraId]->picture_sizes_tbl[0].height) {
14947 ALOGV("%s: Only full size is supported.", __FUNCTION__);
14948 return false;
14949 }
14950 }
14951 break;
14952 default:
14953 ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
14954 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14955 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14956 request.output_buffers[0].stream->width,
14957 request.output_buffers[0].stream->height,
14958 request.output_buffers[0].stream->format);
14959 }
14960 return false;
14961 }
14962
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014963 return true;
14964}
14965
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014966void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
14967 if (hdrPlusRequest == nullptr) return;
14968
14969 for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
14970 // Find the stream for this buffer.
14971 for (auto streamInfo : mStreamInfo) {
14972 if (streamInfo->id == outputBufferIter.first) {
14973 if (streamInfo->channel == mPictureChannel) {
14974 // For picture channel, this buffer is internally allocated so return this
14975 // buffer to picture channel.
14976 mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
14977 } else {
14978 // Unregister this buffer for other channels.
14979 streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
14980 }
14981 break;
14982 }
14983 }
14984 }
14985
14986 hdrPlusRequest->outputBuffers.clear();
14987 hdrPlusRequest->frameworkOutputBuffers.clear();
14988}
14989
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014990bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14991 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14992 const CameraMetadata &metadata)
14993{
14994 if (hdrPlusRequest == nullptr) return false;
14995 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
14996
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014997 status_t res = OK;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014998 pbcamera::CaptureRequest pbRequest;
14999 pbRequest.id = request.frame_number;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015000 // Iterate through all requested output buffers and add them to an HDR+ request.
15001 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
15002 // Find the index of the stream in mStreamInfo.
15003 uint32_t pbStreamId = 0;
15004 bool found = false;
15005 for (auto streamInfo : mStreamInfo) {
15006 if (streamInfo->stream == request.output_buffers[i].stream) {
15007 pbStreamId = streamInfo->id;
15008 found = true;
15009 break;
15010 }
15011 }
15012
15013 if (!found) {
15014 ALOGE("%s: requested stream was not configured.", __FUNCTION__);
15015 abortPendingHdrplusRequest(hdrPlusRequest);
15016 return false;
15017 }
15018 auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
15019 switch (request.output_buffers[i].stream->format) {
15020 case HAL_PIXEL_FORMAT_BLOB:
15021 {
15022 // For jpeg output, get a YUV buffer from pic channel.
15023 QCamera3PicChannel *picChannel =
15024 (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
15025 res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
15026 if (res != OK) {
15027 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
15028 __FUNCTION__, strerror(-res), res);
15029 abortPendingHdrplusRequest(hdrPlusRequest);
15030 return false;
15031 }
15032 break;
15033 }
15034 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15035 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15036 {
15037 // For YUV output, register the buffer and get the buffer def from the channel.
15038 QCamera3ProcessingChannel *channel =
15039 (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
15040 res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
15041 outBuffer.get());
15042 if (res != OK) {
15043 ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
15044 strerror(-res), res);
15045 abortPendingHdrplusRequest(hdrPlusRequest);
15046 return false;
15047 }
15048 break;
15049 }
15050 default:
15051 abortPendingHdrplusRequest(hdrPlusRequest);
15052 return false;
15053 }
15054
15055 pbcamera::StreamBuffer buffer;
15056 buffer.streamId = pbStreamId;
15057 buffer.dmaBufFd = outBuffer->fd;
15058 buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
15059 buffer.dataSize = outBuffer->frame_len;
15060
15061 pbRequest.outputBuffers.push_back(buffer);
15062
15063 hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
15064 hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
15065 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015066
15067 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070015068 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015069 if (res != OK) {
15070 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
15071 strerror(-res), res);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015072 abortPendingHdrplusRequest(hdrPlusRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015073 return false;
15074 }
15075
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015076 return true;
15077}
15078
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015079status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
15080{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015081 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
15082 return OK;
15083 }
15084
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015085 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015086 if (res != OK) {
15087 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
15088 strerror(-res), res);
15089 return res;
15090 }
15091 gHdrPlusClientOpening = true;
15092
15093 return OK;
15094}
15095
Chien-Yu Chenee335912017-02-09 17:53:20 -080015096status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
15097{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015098 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015099
Chien-Yu Chena6c99062017-05-23 13:45:06 -070015100 if (mHdrPlusModeEnabled) {
15101 return OK;
15102 }
15103
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015104 // Check if gHdrPlusClient is opened or being opened.
15105 if (gHdrPlusClient == nullptr) {
15106 if (gHdrPlusClientOpening) {
15107 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
15108 return OK;
15109 }
15110
15111 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015112 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015113 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
15114 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015115 return res;
15116 }
15117
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015118 // When opening HDR+ client completes, HDR+ mode will be enabled.
15119 return OK;
15120
Chien-Yu Chenee335912017-02-09 17:53:20 -080015121 }
15122
15123 // Configure stream for HDR+.
15124 res = configureHdrPlusStreamsLocked();
15125 if (res != OK) {
15126 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015127 return res;
15128 }
15129
15130 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
15131 res = gHdrPlusClient->setZslHdrPlusMode(true);
15132 if (res != OK) {
15133 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080015134 return res;
15135 }
15136
15137 mHdrPlusModeEnabled = true;
15138 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
15139
15140 return OK;
15141}
15142
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015143void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
15144{
15145 if (gHdrPlusClientOpening) {
15146 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
15147 }
15148}
15149
Chien-Yu Chenee335912017-02-09 17:53:20 -080015150void QCamera3HardwareInterface::disableHdrPlusModeLocked()
15151{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015152 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015153 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015154 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
15155 if (res != OK) {
15156 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
15157 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015158
15159 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015160 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015161 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015162 }
15163
15164 mHdrPlusModeEnabled = false;
15165 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
15166}
15167
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015168bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
15169{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015170 // Check that at least one YUV or one JPEG output is configured.
15171 // TODO: Support RAW (b/36690506)
15172 for (auto streamInfo : mStreamInfo) {
15173 if (streamInfo != nullptr && streamInfo->stream != nullptr) {
15174 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
15175 (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
15176 streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15177 streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
15178 return true;
15179 }
15180 }
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015181 }
15182
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015183 return false;
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015184}
15185
Chien-Yu Chenee335912017-02-09 17:53:20 -080015186status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015187{
15188 pbcamera::InputConfiguration inputConfig;
15189 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
15190 status_t res = OK;
15191
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015192 // Sensor MIPI will send data to Easel.
15193 inputConfig.isSensorInput = true;
15194 inputConfig.sensorMode.cameraId = mCameraId;
15195 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
15196 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
15197 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
15198 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
15199 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
15200 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
15201 if (mSensorModeInfo.num_raw_bits != 10) {
15202 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
15203 mSensorModeInfo.num_raw_bits);
15204 return BAD_VALUE;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015205 }
15206
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015207 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015208
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015209 // Iterate through configured output streams in HAL and configure those streams in HDR+
15210 // service.
15211 for (auto streamInfo : mStreamInfo) {
15212 pbcamera::StreamConfiguration outputConfig;
15213 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
15214 switch (streamInfo->stream->format) {
15215 case HAL_PIXEL_FORMAT_BLOB:
15216 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15217 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15218 res = fillPbStreamConfig(&outputConfig, streamInfo->id,
15219 streamInfo->channel, /*stream index*/0);
15220 if (res != OK) {
15221 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
15222 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015223
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015224 return res;
15225 }
15226
15227 outputStreamConfigs.push_back(outputConfig);
15228 break;
15229 default:
15230 // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
15231 break;
15232 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015233 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015234 }
15235
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015236 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015237 if (res != OK) {
15238 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
15239 strerror(-res), res);
15240 return res;
15241 }
15242
15243 return OK;
15244}
15245
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015246void QCamera3HardwareInterface::handleEaselFatalError()
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015247{
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015248 pthread_mutex_lock(&mMutex);
15249 mState = ERROR;
15250 pthread_mutex_unlock(&mMutex);
15251
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015252 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015253}
15254
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015255void QCamera3HardwareInterface::handleEaselFatalErrorAsync()
15256{
15257 if (mEaselErrorFuture.valid()) {
15258 // The error future has been invoked.
15259 return;
15260 }
15261
15262 // Launch a future to handle the fatal error.
15263 mEaselErrorFuture = std::async(std::launch::async,
15264 &QCamera3HardwareInterface::handleEaselFatalError, this);
15265}
15266
15267void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15268{
15269 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15270 handleEaselFatalErrorAsync();
15271}
15272
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015273void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15274{
Arnd Geis8cbfc182017-09-07 14:46:41 -070015275 int rc = NO_ERROR;
15276
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015277 if (client == nullptr) {
15278 ALOGE("%s: Opened client is null.", __FUNCTION__);
15279 return;
15280 }
15281
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015282 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015283 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15284
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015285 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015286 if (!gHdrPlusClientOpening) {
15287 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15288 return;
15289 }
15290
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015291 gHdrPlusClient = std::move(client);
15292 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015293 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015294
15295 // Set static metadata.
15296 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15297 if (res != OK) {
15298 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15299 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015300 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015301 gHdrPlusClient = nullptr;
15302 return;
15303 }
15304
15305 // Enable HDR+ mode.
15306 res = enableHdrPlusModeLocked();
15307 if (res != OK) {
15308 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15309 }
Arnd Geis8cbfc182017-09-07 14:46:41 -070015310
15311 // Get Easel firmware version
15312 if (EaselManagerClientOpened) {
15313 rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
15314 if (rc != OK) {
15315 ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
15316 } else {
15317 mEaselFwUpdated = true;
15318 }
15319 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015320}
15321
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015322void QCamera3HardwareInterface::onOpenFailed(status_t err)
15323{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015324 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015325 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015326 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015327 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015328}
15329
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015330void QCamera3HardwareInterface::onFatalError()
15331{
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015332 ALOGE("%s: HDR+ client encountered a fatal error.", __FUNCTION__);
15333 handleEaselFatalErrorAsync();
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015334}
15335
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015336void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15337{
15338 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15339 __LINE__, requestId, apSensorTimestampNs);
15340
15341 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15342}
15343
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015344void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15345{
15346 pthread_mutex_lock(&mMutex);
15347
15348 // Find the pending request for this result metadata.
15349 auto requestIter = mPendingRequestsList.begin();
15350 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15351 requestIter++;
15352 }
15353
15354 if (requestIter == mPendingRequestsList.end()) {
15355 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15356 pthread_mutex_unlock(&mMutex);
15357 return;
15358 }
15359
15360 requestIter->partial_result_cnt++;
15361
15362 CameraMetadata metadata;
15363 uint8_t ready = true;
15364 metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15365
15366 // Send it to framework.
15367 camera3_capture_result_t result = {};
15368
15369 result.result = metadata.getAndLock();
15370 // Populate metadata result
15371 result.frame_number = requestId;
15372 result.num_output_buffers = 0;
15373 result.output_buffers = NULL;
15374 result.partial_result = requestIter->partial_result_cnt;
15375
15376 orchestrateResult(&result);
15377 metadata.unlock(result.result);
15378
15379 pthread_mutex_unlock(&mMutex);
15380}
15381
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070015382void QCamera3HardwareInterface::onPostview(uint32_t requestId,
15383 std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
15384 uint32_t stride, int32_t format)
15385{
15386 if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
15387 ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
15388 __LINE__, width, height, requestId);
15389 char buf[FILENAME_MAX] = {};
15390 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
15391 requestId, width, height);
15392
15393 pbcamera::StreamConfiguration config = {};
15394 config.image.width = width;
15395 config.image.height = height;
15396 config.image.format = format;
15397
15398 pbcamera::PlaneConfiguration plane = {};
15399 plane.stride = stride;
15400 plane.scanline = height;
15401
15402 config.image.planes.push_back(plane);
15403
15404 pbcamera::StreamBuffer buffer = {};
15405 buffer.streamId = 0;
15406 buffer.dmaBufFd = -1;
15407 buffer.data = postview->data();
15408 buffer.dataSize = postview->size();
15409
15410 hdrplus_client_utils::writePpm(buf, config, buffer);
15411 }
15412
15413 pthread_mutex_lock(&mMutex);
15414
15415 // Find the pending request for this result metadata.
15416 auto requestIter = mPendingRequestsList.begin();
15417 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15418 requestIter++;
15419 }
15420
15421 if (requestIter == mPendingRequestsList.end()) {
15422 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15423 pthread_mutex_unlock(&mMutex);
15424 return;
15425 }
15426
15427 requestIter->partial_result_cnt++;
15428
15429 CameraMetadata metadata;
15430 int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
15431 static_cast<int32_t>(stride)};
15432 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
15433 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
15434
15435 // Send it to framework.
15436 camera3_capture_result_t result = {};
15437
15438 result.result = metadata.getAndLock();
15439 // Populate metadata result
15440 result.frame_number = requestId;
15441 result.num_output_buffers = 0;
15442 result.output_buffers = NULL;
15443 result.partial_result = requestIter->partial_result_cnt;
15444
15445 orchestrateResult(&result);
15446 metadata.unlock(result.result);
15447
15448 pthread_mutex_unlock(&mMutex);
15449}
15450
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015451void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015452 const camera_metadata_t &resultMetadata)
15453{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015454 if (result == nullptr) {
15455 ALOGE("%s: result is nullptr.", __FUNCTION__);
15456 return;
15457 }
15458
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015459 // Find the pending HDR+ request.
15460 HdrPlusPendingRequest pendingRequest;
15461 {
15462 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15463 auto req = mHdrPlusPendingRequests.find(result->requestId);
15464 pendingRequest = req->second;
15465 }
15466
15467 // Update the result metadata with the settings of the HDR+ still capture request because
15468 // the result metadata belongs to a ZSL buffer.
15469 CameraMetadata metadata;
15470 metadata = &resultMetadata;
15471 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15472 camera_metadata_t* updatedResultMetadata = metadata.release();
15473
15474 uint32_t halSnapshotStreamId = 0;
15475 if (mPictureChannel != nullptr) {
15476 halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
15477 }
15478
15479 auto halMetadata = std::make_shared<metadata_buffer_t>();
15480 clear_metadata_buffer(halMetadata.get());
15481
15482 // Convert updated result metadata to HAL metadata.
15483 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15484 halSnapshotStreamId, /*minFrameDuration*/0);
15485 if (res != 0) {
15486 ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
15487 }
15488
15489 for (auto &outputBuffer : result->outputBuffers) {
15490 uint32_t streamId = outputBuffer.streamId;
15491
15492 // Find the framework output buffer in the pending request.
15493 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15494 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15495 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15496 streamId);
15497 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015498 }
15499
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015500 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15501
15502 // Find the channel for the output buffer.
15503 QCamera3ProcessingChannel *channel =
15504 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15505
15506 // Find the output buffer def.
15507 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15508 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15509 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15510 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015511 }
15512
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015513 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015514
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015515 // Check whether to dump the buffer.
15516 if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15517 frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
15518 // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
15519 char prop[PROPERTY_VALUE_MAX];
15520 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15521 bool dumpYuvOutput = atoi(prop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015522
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015523 if (dumpYuvOutput) {
15524 // Dump yuv buffer to a ppm file.
15525 pbcamera::StreamConfiguration outputConfig;
15526 status_t rc = fillPbStreamConfig(&outputConfig, streamId,
15527 channel, /*stream index*/0);
15528 if (rc == OK) {
15529 char buf[FILENAME_MAX] = {};
15530 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15531 result->requestId, streamId,
15532 outputConfig.image.width, outputConfig.image.height);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015533
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015534 hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
15535 } else {
15536 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
15537 "%s (%d).", __FUNCTION__, strerror(-rc), rc);
15538 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015539 }
15540 }
15541
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015542 if (channel == mPictureChannel) {
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015543 // Return the buffer to pic channel for encoding.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015544 mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
15545 frameworkOutputBuffer->buffer, result->requestId,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015546 halMetadata);
15547 } else {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015548 // Return the buffer to camera framework.
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015549 pthread_mutex_lock(&mMutex);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015550 handleBufferWithLock(frameworkOutputBuffer, result->requestId);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015551 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015552
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015553 channel->unregisterBuffer(outputBufferDef.get());
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015554 }
15555 }
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015556
15557 // Send HDR+ metadata to framework.
15558 {
15559 pthread_mutex_lock(&mMutex);
15560
15561 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15562 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
15563 pthread_mutex_unlock(&mMutex);
15564 }
15565
15566 // Remove the HDR+ pending request.
15567 {
15568 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15569 auto req = mHdrPlusPendingRequests.find(result->requestId);
15570 mHdrPlusPendingRequests.erase(req);
15571 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015572}
15573
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015574void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15575{
15576 if (failedResult == nullptr) {
15577 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15578 return;
15579 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015580
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015581 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015582
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015583 // Find the pending HDR+ request.
15584 HdrPlusPendingRequest pendingRequest;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015585 {
15586 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015587 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15588 if (req == mHdrPlusPendingRequests.end()) {
15589 ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
15590 return;
15591 }
15592 pendingRequest = req->second;
15593 }
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015594
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015595 for (auto &outputBuffer : failedResult->outputBuffers) {
15596 uint32_t streamId = outputBuffer.streamId;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015597
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015598 // Find the channel
15599 // Find the framework output buffer in the pending request.
15600 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15601 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15602 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15603 streamId);
15604 continue;
15605 }
15606
15607 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15608
15609 // Find the channel for the output buffer.
15610 QCamera3ProcessingChannel *channel =
15611 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15612
15613 // Find the output buffer def.
15614 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15615 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15616 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15617 continue;
15618 }
15619
15620 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
15621
15622 if (channel == mPictureChannel) {
15623 // Return the buffer to pic channel.
15624 mPictureChannel->returnYuvBuffer(outputBufferDef.get());
15625 } else {
15626 channel->unregisterBuffer(outputBufferDef.get());
15627 }
15628 }
15629
15630 // Remove the HDR+ pending request.
15631 {
15632 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15633 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15634 mHdrPlusPendingRequests.erase(req);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015635 }
15636
15637 pthread_mutex_lock(&mMutex);
15638
15639 // Find the pending buffers.
15640 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15641 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15642 if (pendingBuffers->frame_number == failedResult->requestId) {
15643 break;
15644 }
15645 pendingBuffers++;
15646 }
15647
15648 // Send out buffer errors for the pending buffers.
15649 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15650 std::vector<camera3_stream_buffer_t> streamBuffers;
15651 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15652 // Prepare a stream buffer.
15653 camera3_stream_buffer_t streamBuffer = {};
15654 streamBuffer.stream = buffer.stream;
15655 streamBuffer.buffer = buffer.buffer;
15656 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15657 streamBuffer.acquire_fence = -1;
15658 streamBuffer.release_fence = -1;
15659
15660 streamBuffers.push_back(streamBuffer);
15661
15662 // Send out error buffer event.
15663 camera3_notify_msg_t notify_msg = {};
15664 notify_msg.type = CAMERA3_MSG_ERROR;
15665 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15666 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15667 notify_msg.message.error.error_stream = buffer.stream;
15668
15669 orchestrateNotify(&notify_msg);
15670 }
15671
15672 camera3_capture_result_t result = {};
15673 result.frame_number = pendingBuffers->frame_number;
15674 result.num_output_buffers = streamBuffers.size();
15675 result.output_buffers = &streamBuffers[0];
15676
15677 // Send out result with buffer errors.
15678 orchestrateResult(&result);
15679
15680 // Remove pending buffers.
15681 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15682 }
15683
15684 // Remove pending request.
15685 auto halRequest = mPendingRequestsList.begin();
15686 while (halRequest != mPendingRequestsList.end()) {
15687 if (halRequest->frame_number == failedResult->requestId) {
15688 mPendingRequestsList.erase(halRequest);
15689 break;
15690 }
15691 halRequest++;
15692 }
15693
15694 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015695}
15696
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015697
15698ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15699 mParent(parent) {}
15700
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015701void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015702{
15703 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015704
15705 if (isReprocess) {
15706 mReprocessShutters.emplace(frameNumber, Shutter());
15707 } else {
15708 mShutters.emplace(frameNumber, Shutter());
15709 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015710}
15711
15712void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15713{
15714 std::lock_guard<std::mutex> lock(mLock);
15715
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015716 std::map<uint32_t, Shutter> *shutters = nullptr;
15717
15718 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015719 auto shutter = mShutters.find(frameNumber);
15720 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015721 shutter = mReprocessShutters.find(frameNumber);
15722 if (shutter == mReprocessShutters.end()) {
15723 // Shutter was already sent.
15724 return;
15725 }
15726 shutters = &mReprocessShutters;
15727 } else {
15728 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015729 }
15730
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015731 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015732 shutter->second.ready = true;
15733 shutter->second.timestamp = timestamp;
15734
15735 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015736 shutter = shutters->begin();
15737 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015738 if (!shutter->second.ready) {
15739 // If this shutter is not ready, the following shutters can't be sent.
15740 break;
15741 }
15742
15743 camera3_notify_msg_t msg = {};
15744 msg.type = CAMERA3_MSG_SHUTTER;
15745 msg.message.shutter.frame_number = shutter->first;
15746 msg.message.shutter.timestamp = shutter->second.timestamp;
15747 mParent->orchestrateNotify(&msg);
15748
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015749 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015750 }
15751}
15752
15753void ShutterDispatcher::clear(uint32_t frameNumber)
15754{
15755 std::lock_guard<std::mutex> lock(mLock);
15756 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015757 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015758}
15759
15760void ShutterDispatcher::clear()
15761{
15762 std::lock_guard<std::mutex> lock(mLock);
15763
15764 // Log errors for stale shutters.
15765 for (auto &shutter : mShutters) {
15766 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15767 __FUNCTION__, shutter.first, shutter.second.ready,
15768 shutter.second.timestamp);
15769 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015770
15771 // Log errors for stale reprocess shutters.
15772 for (auto &shutter : mReprocessShutters) {
15773 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15774 __FUNCTION__, shutter.first, shutter.second.ready,
15775 shutter.second.timestamp);
15776 }
15777
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015778 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015779 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015780}
15781
15782OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15783 mParent(parent) {}
15784
15785status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15786{
15787 std::lock_guard<std::mutex> lock(mLock);
15788 mStreamBuffers.clear();
15789 if (!streamList) {
15790 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15791 return -EINVAL;
15792 }
15793
15794 // Create a "frame-number -> buffer" map for each stream.
15795 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15796 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15797 }
15798
15799 return OK;
15800}
15801
15802status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15803{
15804 std::lock_guard<std::mutex> lock(mLock);
15805
15806 // Find the "frame-number -> buffer" map for the stream.
15807 auto buffers = mStreamBuffers.find(stream);
15808 if (buffers == mStreamBuffers.end()) {
15809 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15810 return -EINVAL;
15811 }
15812
15813 // Create an unready buffer for this frame number.
15814 buffers->second.emplace(frameNumber, Buffer());
15815 return OK;
15816}
15817
15818void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15819 const camera3_stream_buffer_t &buffer)
15820{
15821 std::lock_guard<std::mutex> lock(mLock);
15822
15823 // Find the frame number -> buffer map for the stream.
15824 auto buffers = mStreamBuffers.find(buffer.stream);
15825 if (buffers == mStreamBuffers.end()) {
15826 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15827 return;
15828 }
15829
15830 // Find the unready buffer this frame number and mark it ready.
15831 auto pendingBuffer = buffers->second.find(frameNumber);
15832 if (pendingBuffer == buffers->second.end()) {
15833 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15834 return;
15835 }
15836
15837 pendingBuffer->second.ready = true;
15838 pendingBuffer->second.buffer = buffer;
15839
15840 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15841 pendingBuffer = buffers->second.begin();
15842 while (pendingBuffer != buffers->second.end()) {
15843 if (!pendingBuffer->second.ready) {
15844 // If this buffer is not ready, the following buffers can't be sent.
15845 break;
15846 }
15847
15848 camera3_capture_result_t result = {};
15849 result.frame_number = pendingBuffer->first;
15850 result.num_output_buffers = 1;
15851 result.output_buffers = &pendingBuffer->second.buffer;
15852
15853 // Send out result with buffer errors.
15854 mParent->orchestrateResult(&result);
15855
15856 pendingBuffer = buffers->second.erase(pendingBuffer);
15857 }
15858}
15859
15860void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15861{
15862 std::lock_guard<std::mutex> lock(mLock);
15863
15864 // Log errors for stale buffers.
15865 for (auto &buffers : mStreamBuffers) {
15866 for (auto &buffer : buffers.second) {
15867 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15868 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15869 }
15870 buffers.second.clear();
15871 }
15872
15873 if (clearConfiguredStreams) {
15874 mStreamBuffers.clear();
15875 }
15876}
15877
Thierry Strudel3d639192016-09-09 11:52:26 -070015878}; //end namespace qcamera