blob: d35c65eaa5494e5883a1025149539c43f9349a99 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070075// mm_camera has 2 partial results: 3A, and final result.
76// HDR+ requests have 3 partial results: postview, next request ready, and final result.
77#define PARTIAL_RESULT_COUNT 3
Thierry Strudel3d639192016-09-09 11:52:26 -070078#define FRAME_SKIP_DELAY 0
79
80#define MAX_VALUE_8BIT ((1<<8)-1)
81#define MAX_VALUE_10BIT ((1<<10)-1)
82#define MAX_VALUE_12BIT ((1<<12)-1)
83
84#define VIDEO_4K_WIDTH 3840
85#define VIDEO_4K_HEIGHT 2160
86
Jason Leeb9e76432017-03-10 17:14:19 -080087#define MAX_EIS_WIDTH 3840
88#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070089
90#define MAX_RAW_STREAMS 1
91#define MAX_STALLING_STREAMS 1
92#define MAX_PROCESSED_STREAMS 3
93/* Batch mode is enabled only if FPS set is equal to or greater than this */
94#define MIN_FPS_FOR_BATCH_MODE (120)
95#define PREVIEW_FPS_FOR_HFR (30)
96#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080097#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070098#define MAX_HFR_BATCH_SIZE (8)
99#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +0100101#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700148std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -0700155bool gEnableMultipleHdrplusOutputs = false; // Whether to enable multiple output from Easel HDR+.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800157// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
158bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700159
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700160std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700161
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
164 {"On", CAM_CDS_MODE_ON},
165 {"Off", CAM_CDS_MODE_OFF},
166 {"Auto",CAM_CDS_MODE_AUTO}
167};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700168const QCamera3HardwareInterface::QCameraMap<
169 camera_metadata_enum_android_video_hdr_mode_t,
170 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
171 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
172 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
173};
174
Thierry Strudel54dc9782017-02-15 12:12:10 -0800175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_binning_correction_mode_t,
177 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
178 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
179 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
180};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700181
182const QCamera3HardwareInterface::QCameraMap<
183 camera_metadata_enum_android_ir_mode_t,
184 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
185 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
186 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
187 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
188};
Thierry Strudel3d639192016-09-09 11:52:26 -0700189
190const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_effect_mode_t,
192 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
193 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
194 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
195 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
196 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
197 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
198 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
199 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
202};
203
204const QCamera3HardwareInterface::QCameraMap<
205 camera_metadata_enum_android_control_awb_mode_t,
206 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
207 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
208 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
209 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
210 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
211 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
212 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
215 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
216};
217
218const QCamera3HardwareInterface::QCameraMap<
219 camera_metadata_enum_android_control_scene_mode_t,
220 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
221 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
222 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
223 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
227 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
228 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
229 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
230 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
231 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
232 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
233 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
234 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
235 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800236 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
237 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_af_mode_t,
242 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
245 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
246 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
247 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_color_correction_aberration_mode_t,
254 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
255 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
256 CAM_COLOR_CORRECTION_ABERRATION_OFF },
257 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
258 CAM_COLOR_CORRECTION_ABERRATION_FAST },
259 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
260 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_control_ae_antibanding_mode_t,
265 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
270};
271
272const QCamera3HardwareInterface::QCameraMap<
273 camera_metadata_enum_android_control_ae_mode_t,
274 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
275 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
278 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700279 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
280 { (camera_metadata_enum_android_control_ae_mode_t)
281 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_flash_mode_t,
286 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
287 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
288 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
289 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_statistics_face_detect_mode_t,
294 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
297 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
298};
299
300const QCamera3HardwareInterface::QCameraMap<
301 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
302 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
303 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
304 CAM_FOCUS_UNCALIBRATED },
305 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
306 CAM_FOCUS_APPROXIMATE },
307 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
308 CAM_FOCUS_CALIBRATED }
309};
310
311const QCamera3HardwareInterface::QCameraMap<
312 camera_metadata_enum_android_lens_state_t,
313 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
314 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
315 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
316};
317
318const int32_t available_thumbnail_sizes[] = {0, 0,
319 176, 144,
320 240, 144,
321 256, 144,
322 240, 160,
323 256, 154,
324 240, 240,
325 320, 240};
326
327const QCamera3HardwareInterface::QCameraMap<
328 camera_metadata_enum_android_sensor_test_pattern_mode_t,
329 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
335 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
336};
337
338/* Since there is no mapping for all the options some Android enum are not listed.
339 * Also, the order in this list is important because while mapping from HAL to Android it will
340 * traverse from lower to higher index which means that for HAL values that are map to different
341 * Android values, the traverse logic will select the first one found.
342 */
343const QCamera3HardwareInterface::QCameraMap<
344 camera_metadata_enum_android_sensor_reference_illuminant1_t,
345 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
361 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
362};
363
364const QCamera3HardwareInterface::QCameraMap<
365 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
366 { 60, CAM_HFR_MODE_60FPS},
367 { 90, CAM_HFR_MODE_90FPS},
368 { 120, CAM_HFR_MODE_120FPS},
369 { 150, CAM_HFR_MODE_150FPS},
370 { 180, CAM_HFR_MODE_180FPS},
371 { 210, CAM_HFR_MODE_210FPS},
372 { 240, CAM_HFR_MODE_240FPS},
373 { 480, CAM_HFR_MODE_480FPS},
374};
375
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_instant_aec_mode_t,
378 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
379 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
381 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
382};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800383
384const QCamera3HardwareInterface::QCameraMap<
385 qcamera3_ext_exposure_meter_mode_t,
386 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
387 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
388 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
389 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
390 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
391 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
392 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
393 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
394};
395
396const QCamera3HardwareInterface::QCameraMap<
397 qcamera3_ext_iso_mode_t,
398 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
399 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
400 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
401 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
402 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
403 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
404 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
405 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
406 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
407};
408
Thierry Strudel3d639192016-09-09 11:52:26 -0700409camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
410 .initialize = QCamera3HardwareInterface::initialize,
411 .configure_streams = QCamera3HardwareInterface::configure_streams,
412 .register_stream_buffers = NULL,
413 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
414 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
415 .get_metadata_vendor_tag_ops = NULL,
416 .dump = QCamera3HardwareInterface::dump,
417 .flush = QCamera3HardwareInterface::flush,
418 .reserved = {0},
419};
420
421// initialise to some default value
422uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
423
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700424static inline void logEaselEvent(const char *tag, const char *event) {
425 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
426 struct timespec ts = {};
427 static int64_t kMsPerSec = 1000;
428 static int64_t kNsPerMs = 1000000;
429 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
430 if (res != OK) {
431 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
432 } else {
433 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
434 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
435 }
436 }
437}
438
Thierry Strudel3d639192016-09-09 11:52:26 -0700439/*===========================================================================
440 * FUNCTION : QCamera3HardwareInterface
441 *
442 * DESCRIPTION: constructor of QCamera3HardwareInterface
443 *
444 * PARAMETERS :
445 * @cameraId : camera ID
446 *
447 * RETURN : none
448 *==========================================================================*/
449QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
450 const camera_module_callbacks_t *callbacks)
451 : mCameraId(cameraId),
452 mCameraHandle(NULL),
453 mCameraInitialized(false),
454 mCallbackOps(NULL),
455 mMetadataChannel(NULL),
456 mPictureChannel(NULL),
457 mRawChannel(NULL),
458 mSupportChannel(NULL),
459 mAnalysisChannel(NULL),
460 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700461 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700462 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800463 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100464 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800465 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700466 mChannelHandle(0),
467 mFirstConfiguration(true),
468 mFlush(false),
469 mFlushPerf(false),
470 mParamHeap(NULL),
471 mParameters(NULL),
472 mPrevParameters(NULL),
473 m_bIsVideo(false),
474 m_bIs4KVideo(false),
475 m_bEisSupportedSize(false),
476 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800477 m_bEis3PropertyEnabled(false),
Binhao Lin09245482017-08-31 18:25:29 -0700478 m_bAVTimerEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700479 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700480 mShutterDispatcher(this),
481 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mMinProcessedFrameDuration(0),
483 mMinJpegFrameDuration(0),
484 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100485 mExpectedFrameDuration(0),
486 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700487 mMetaFrameCount(0U),
488 mUpdateDebugLevel(false),
489 mCallbacks(callbacks),
490 mCaptureIntent(0),
491 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800492 /* DevCamDebug metadata internal m control*/
493 mDevCamDebugMetaEnable(0),
494 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700495 mBatchSize(0),
496 mToBeQueuedVidBufs(0),
497 mHFRVideoFps(DEFAULT_VIDEO_FPS),
498 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800499 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800500 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mFirstFrameNumberInBatch(0),
502 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800503 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700504 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
505 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000506 mPDSupported(false),
507 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700508 mInstantAEC(false),
509 mResetInstantAEC(false),
510 mInstantAECSettledFrameNumber(0),
511 mAecSkipDisplayFrameBound(0),
512 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700513 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800514 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700515 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700517 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700518 mState(CLOSED),
519 mIsDeviceLinked(false),
520 mIsMainCamera(true),
521 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700522 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800523 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800524 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700525 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800526 mIsApInputUsedForHdrPlus(false),
527 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700528 m_bSensorHDREnabled(false),
529 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700530{
531 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700532 mCommon.init(gCamCapability[cameraId]);
533 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700534#ifndef USE_HAL_3_3
535 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
536#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700537 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700538#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700539 mCameraDevice.common.close = close_camera_device;
540 mCameraDevice.ops = &mCameraOps;
541 mCameraDevice.priv = this;
542 gCamCapability[cameraId]->version = CAM_HAL_V3;
543 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
544 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
545 gCamCapability[cameraId]->min_num_pp_bufs = 3;
546
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800547 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700548
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800549 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700550 mPendingLiveRequest = 0;
551 mCurrentRequestId = -1;
552 pthread_mutex_init(&mMutex, NULL);
553
554 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
555 mDefaultMetadata[i] = NULL;
556
557 // Getting system props of different kinds
558 char prop[PROPERTY_VALUE_MAX];
559 memset(prop, 0, sizeof(prop));
560 property_get("persist.camera.raw.dump", prop, "0");
561 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800562 property_get("persist.camera.hal3.force.hdr", prop, "0");
563 mForceHdrSnapshot = atoi(prop);
564
Thierry Strudel3d639192016-09-09 11:52:26 -0700565 if (mEnableRawDump)
566 LOGD("Raw dump from Camera HAL enabled");
567
568 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
569 memset(mLdafCalib, 0, sizeof(mLdafCalib));
570
Arnd Geis082a4d72017-08-24 10:33:07 -0700571 memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
Arnd Geis8cbfc182017-09-07 14:46:41 -0700572 mEaselFwUpdated = false;
Arnd Geis082a4d72017-08-24 10:33:07 -0700573
Thierry Strudel3d639192016-09-09 11:52:26 -0700574 memset(prop, 0, sizeof(prop));
575 property_get("persist.camera.tnr.preview", prop, "0");
576 m_bTnrPreview = (uint8_t)atoi(prop);
577
578 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800579 property_get("persist.camera.swtnr.preview", prop, "1");
580 m_bSwTnrPreview = (uint8_t)atoi(prop);
581
582 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700583 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700584 m_bTnrVideo = (uint8_t)atoi(prop);
585
586 memset(prop, 0, sizeof(prop));
587 property_get("persist.camera.avtimer.debug", prop, "0");
588 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800589 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700590
Thierry Strudel54dc9782017-02-15 12:12:10 -0800591 memset(prop, 0, sizeof(prop));
592 property_get("persist.camera.cacmode.disable", prop, "0");
593 m_cacModeDisabled = (uint8_t)atoi(prop);
594
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700595 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700596 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700597
Thierry Strudel3d639192016-09-09 11:52:26 -0700598 //Load and read GPU library.
599 lib_surface_utils = NULL;
600 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700601 mSurfaceStridePadding = CAM_PAD_TO_64;
602#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700603 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
604 if (lib_surface_utils) {
605 *(void **)&LINK_get_surface_pixel_alignment =
606 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
607 if (LINK_get_surface_pixel_alignment) {
608 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
609 }
610 dlclose(lib_surface_utils);
611 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700612#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000613 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
614 mPDSupported = (0 <= mPDIndex) ? true : false;
615
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700616 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700617}
618
619/*===========================================================================
620 * FUNCTION : ~QCamera3HardwareInterface
621 *
622 * DESCRIPTION: destructor of QCamera3HardwareInterface
623 *
624 * PARAMETERS : none
625 *
626 * RETURN : none
627 *==========================================================================*/
628QCamera3HardwareInterface::~QCamera3HardwareInterface()
629{
630 LOGD("E");
631
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800632 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700633
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800634 // Disable power hint and enable the perf lock for close camera
635 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
636 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
637
638 // unlink of dualcam during close camera
639 if (mIsDeviceLinked) {
640 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
641 &m_pDualCamCmdPtr->bundle_info;
642 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
643 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
644 pthread_mutex_lock(&gCamLock);
645
646 if (mIsMainCamera == 1) {
647 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
648 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
649 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
650 // related session id should be session id of linked session
651 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
652 } else {
653 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
654 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
655 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
656 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
657 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800658 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800659 pthread_mutex_unlock(&gCamLock);
660
661 rc = mCameraHandle->ops->set_dual_cam_cmd(
662 mCameraHandle->camera_handle);
663 if (rc < 0) {
664 LOGE("Dualcam: Unlink failed, but still proceed to close");
665 }
666 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700667
668 /* We need to stop all streams before deleting any stream */
669 if (mRawDumpChannel) {
670 mRawDumpChannel->stop();
671 }
672
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700673 if (mHdrPlusRawSrcChannel) {
674 mHdrPlusRawSrcChannel->stop();
675 }
676
Thierry Strudel3d639192016-09-09 11:52:26 -0700677 // NOTE: 'camera3_stream_t *' objects are already freed at
678 // this stage by the framework
679 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
680 it != mStreamInfo.end(); it++) {
681 QCamera3ProcessingChannel *channel = (*it)->channel;
682 if (channel) {
683 channel->stop();
684 }
685 }
686 if (mSupportChannel)
687 mSupportChannel->stop();
688
689 if (mAnalysisChannel) {
690 mAnalysisChannel->stop();
691 }
692 if (mMetadataChannel) {
693 mMetadataChannel->stop();
694 }
695 if (mChannelHandle) {
696 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700697 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700698 LOGD("stopping channel %d", mChannelHandle);
699 }
700
701 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
702 it != mStreamInfo.end(); it++) {
703 QCamera3ProcessingChannel *channel = (*it)->channel;
704 if (channel)
705 delete channel;
706 free (*it);
707 }
708 if (mSupportChannel) {
709 delete mSupportChannel;
710 mSupportChannel = NULL;
711 }
712
713 if (mAnalysisChannel) {
714 delete mAnalysisChannel;
715 mAnalysisChannel = NULL;
716 }
717 if (mRawDumpChannel) {
718 delete mRawDumpChannel;
719 mRawDumpChannel = NULL;
720 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700721 if (mHdrPlusRawSrcChannel) {
722 delete mHdrPlusRawSrcChannel;
723 mHdrPlusRawSrcChannel = NULL;
724 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700725 if (mDummyBatchChannel) {
726 delete mDummyBatchChannel;
727 mDummyBatchChannel = NULL;
728 }
729
730 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800731 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700732
733 if (mMetadataChannel) {
734 delete mMetadataChannel;
735 mMetadataChannel = NULL;
736 }
737
738 /* Clean up all channels */
739 if (mCameraInitialized) {
740 if(!mFirstConfiguration){
741 //send the last unconfigure
742 cam_stream_size_info_t stream_config_info;
743 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
744 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
745 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800746 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700747 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700748 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700749 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
750 stream_config_info);
751 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
752 if (rc < 0) {
753 LOGE("set_parms failed for unconfigure");
754 }
755 }
756 deinitParameters();
757 }
758
759 if (mChannelHandle) {
760 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
761 mChannelHandle);
762 LOGH("deleting channel %d", mChannelHandle);
763 mChannelHandle = 0;
764 }
765
766 if (mState != CLOSED)
767 closeCamera();
768
769 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
770 req.mPendingBufferList.clear();
771 }
772 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700773 for (pendingRequestIterator i = mPendingRequestsList.begin();
774 i != mPendingRequestsList.end();) {
775 i = erasePendingRequest(i);
776 }
777 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
778 if (mDefaultMetadata[i])
779 free_camera_metadata(mDefaultMetadata[i]);
780
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800781 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700782
783 pthread_cond_destroy(&mRequestCond);
784
785 pthread_cond_destroy(&mBuffersCond);
786
787 pthread_mutex_destroy(&mMutex);
788 LOGD("X");
789}
790
791/*===========================================================================
792 * FUNCTION : erasePendingRequest
793 *
794 * DESCRIPTION: function to erase a desired pending request after freeing any
795 * allocated memory
796 *
797 * PARAMETERS :
798 * @i : iterator pointing to pending request to be erased
799 *
800 * RETURN : iterator pointing to the next request
801 *==========================================================================*/
802QCamera3HardwareInterface::pendingRequestIterator
803 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
804{
805 if (i->input_buffer != NULL) {
806 free(i->input_buffer);
807 i->input_buffer = NULL;
808 }
809 if (i->settings != NULL)
810 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100811
812 mExpectedInflightDuration -= i->expectedFrameDuration;
813 if (mExpectedInflightDuration < 0) {
814 LOGE("Negative expected in-flight duration!");
815 mExpectedInflightDuration = 0;
816 }
817
Thierry Strudel3d639192016-09-09 11:52:26 -0700818 return mPendingRequestsList.erase(i);
819}
820
821/*===========================================================================
822 * FUNCTION : camEvtHandle
823 *
824 * DESCRIPTION: Function registered to mm-camera-interface to handle events
825 *
826 * PARAMETERS :
827 * @camera_handle : interface layer camera handle
828 * @evt : ptr to event
829 * @user_data : user data ptr
830 *
831 * RETURN : none
832 *==========================================================================*/
833void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
834 mm_camera_event_t *evt,
835 void *user_data)
836{
837 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
838 if (obj && evt) {
839 switch(evt->server_event_type) {
840 case CAM_EVENT_TYPE_DAEMON_DIED:
841 pthread_mutex_lock(&obj->mMutex);
842 obj->mState = ERROR;
843 pthread_mutex_unlock(&obj->mMutex);
844 LOGE("Fatal, camera daemon died");
845 break;
846
847 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
848 LOGD("HAL got request pull from Daemon");
849 pthread_mutex_lock(&obj->mMutex);
850 obj->mWokenUpByDaemon = true;
851 obj->unblockRequestIfNecessary();
852 pthread_mutex_unlock(&obj->mMutex);
853 break;
854
855 default:
856 LOGW("Warning: Unhandled event %d",
857 evt->server_event_type);
858 break;
859 }
860 } else {
861 LOGE("NULL user_data/evt");
862 }
863}
864
865/*===========================================================================
866 * FUNCTION : openCamera
867 *
868 * DESCRIPTION: open camera
869 *
870 * PARAMETERS :
871 * @hw_device : double ptr for camera device struct
872 *
873 * RETURN : int32_t type of status
874 * NO_ERROR -- success
875 * none-zero failure code
876 *==========================================================================*/
877int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
878{
879 int rc = 0;
880 if (mState != CLOSED) {
881 *hw_device = NULL;
882 return PERMISSION_DENIED;
883 }
884
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700885 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800886 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700887 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
888 mCameraId);
889
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700890 if (mCameraHandle) {
891 LOGE("Failure: Camera already opened");
892 return ALREADY_EXISTS;
893 }
894
895 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700896 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700897 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700898 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700899 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700900 if (rc != 0) {
901 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
902 return rc;
903 }
Arnd Geis8cbfc182017-09-07 14:46:41 -0700904 mEaselFwUpdated = false;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700905 }
906 }
907
Thierry Strudel3d639192016-09-09 11:52:26 -0700908 rc = openCamera();
909 if (rc == 0) {
910 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800911 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700912 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700913
914 // Suspend Easel because opening camera failed.
915 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700916 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700917 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
918 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700919 if (suspendErr != 0) {
920 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
921 strerror(-suspendErr), suspendErr);
922 }
923 }
924 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800925 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700926
Thierry Strudel3d639192016-09-09 11:52:26 -0700927 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
928 mCameraId, rc);
929
930 if (rc == NO_ERROR) {
931 mState = OPENED;
932 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800933
Thierry Strudel3d639192016-09-09 11:52:26 -0700934 return rc;
935}
936
937/*===========================================================================
938 * FUNCTION : openCamera
939 *
940 * DESCRIPTION: open camera
941 *
942 * PARAMETERS : none
943 *
944 * RETURN : int32_t type of status
945 * NO_ERROR -- success
946 * none-zero failure code
947 *==========================================================================*/
948int QCamera3HardwareInterface::openCamera()
949{
950 int rc = 0;
951 char value[PROPERTY_VALUE_MAX];
952
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800953 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800954
Thierry Strudel3d639192016-09-09 11:52:26 -0700955 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
956 if (rc < 0) {
957 LOGE("Failed to reserve flash for camera id: %d",
958 mCameraId);
959 return UNKNOWN_ERROR;
960 }
961
962 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
963 if (rc) {
964 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
965 return rc;
966 }
967
968 if (!mCameraHandle) {
969 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
970 return -ENODEV;
971 }
972
973 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
974 camEvtHandle, (void *)this);
975
976 if (rc < 0) {
977 LOGE("Error, failed to register event callback");
978 /* Not closing camera here since it is already handled in destructor */
979 return FAILED_TRANSACTION;
980 }
981
982 mExifParams.debug_params =
983 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
984 if (mExifParams.debug_params) {
985 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
986 } else {
987 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
988 return NO_MEMORY;
989 }
990 mFirstConfiguration = true;
991
992 //Notify display HAL that a camera session is active.
993 //But avoid calling the same during bootup because camera service might open/close
994 //cameras at boot time during its initialization and display service will also internally
995 //wait for camera service to initialize first while calling this display API, resulting in a
996 //deadlock situation. Since boot time camera open/close calls are made only to fetch
997 //capabilities, no need of this display bw optimization.
998 //Use "service.bootanim.exit" property to know boot status.
999 property_get("service.bootanim.exit", value, "0");
1000 if (atoi(value) == 1) {
1001 pthread_mutex_lock(&gCamLock);
1002 if (gNumCameraSessions++ == 0) {
1003 setCameraLaunchStatus(true);
1004 }
1005 pthread_mutex_unlock(&gCamLock);
1006 }
1007
1008 //fill the session id needed while linking dual cam
1009 pthread_mutex_lock(&gCamLock);
1010 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1011 &sessionId[mCameraId]);
1012 pthread_mutex_unlock(&gCamLock);
1013
1014 if (rc < 0) {
1015 LOGE("Error, failed to get sessiion id");
1016 return UNKNOWN_ERROR;
1017 } else {
1018 //Allocate related cam sync buffer
1019 //this is needed for the payload that goes along with bundling cmd for related
1020 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001021 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1022 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001023 if(rc != OK) {
1024 rc = NO_MEMORY;
1025 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1026 return NO_MEMORY;
1027 }
1028
1029 //Map memory for related cam sync buffer
1030 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001031 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1032 m_pDualCamCmdHeap->getFd(0),
1033 sizeof(cam_dual_camera_cmd_info_t),
1034 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001035 if(rc < 0) {
1036 LOGE("Dualcam: failed to map Related cam sync buffer");
1037 rc = FAILED_TRANSACTION;
1038 return NO_MEMORY;
1039 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001040 m_pDualCamCmdPtr =
1041 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001042 }
1043
1044 LOGH("mCameraId=%d",mCameraId);
1045
1046 return NO_ERROR;
1047}
1048
1049/*===========================================================================
1050 * FUNCTION : closeCamera
1051 *
1052 * DESCRIPTION: close camera
1053 *
1054 * PARAMETERS : none
1055 *
1056 * RETURN : int32_t type of status
1057 * NO_ERROR -- success
1058 * none-zero failure code
1059 *==========================================================================*/
1060int QCamera3HardwareInterface::closeCamera()
1061{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001062 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001063 int rc = NO_ERROR;
1064 char value[PROPERTY_VALUE_MAX];
1065
1066 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1067 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001068
1069 // unmap memory for related cam sync buffer
1070 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001071 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001072 if (NULL != m_pDualCamCmdHeap) {
1073 m_pDualCamCmdHeap->deallocate();
1074 delete m_pDualCamCmdHeap;
1075 m_pDualCamCmdHeap = NULL;
1076 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001077 }
1078
Thierry Strudel3d639192016-09-09 11:52:26 -07001079 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1080 mCameraHandle = NULL;
1081
1082 //reset session id to some invalid id
1083 pthread_mutex_lock(&gCamLock);
1084 sessionId[mCameraId] = 0xDEADBEEF;
1085 pthread_mutex_unlock(&gCamLock);
1086
1087 //Notify display HAL that there is no active camera session
1088 //but avoid calling the same during bootup. Refer to openCamera
1089 //for more details.
1090 property_get("service.bootanim.exit", value, "0");
1091 if (atoi(value) == 1) {
1092 pthread_mutex_lock(&gCamLock);
1093 if (--gNumCameraSessions == 0) {
1094 setCameraLaunchStatus(false);
1095 }
1096 pthread_mutex_unlock(&gCamLock);
1097 }
1098
Thierry Strudel3d639192016-09-09 11:52:26 -07001099 if (mExifParams.debug_params) {
1100 free(mExifParams.debug_params);
1101 mExifParams.debug_params = NULL;
1102 }
1103 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1104 LOGW("Failed to release flash for camera id: %d",
1105 mCameraId);
1106 }
1107 mState = CLOSED;
1108 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1109 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001110
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001111 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001112 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1113 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001114 if (gHdrPlusClient != nullptr) {
1115 // Disable HDR+ mode.
1116 disableHdrPlusModeLocked();
1117 // Disconnect Easel if it's connected.
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001118 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001119 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001120 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001121
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001122 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001123 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001124 if (rc != 0) {
1125 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1126 }
1127
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001128 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001129 if (rc != 0) {
1130 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1131 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001132 }
1133 }
1134
Thierry Strudel3d639192016-09-09 11:52:26 -07001135 return rc;
1136}
1137
1138/*===========================================================================
1139 * FUNCTION : initialize
1140 *
1141 * DESCRIPTION: Initialize frameworks callback functions
1142 *
1143 * PARAMETERS :
1144 * @callback_ops : callback function to frameworks
1145 *
1146 * RETURN :
1147 *
1148 *==========================================================================*/
1149int QCamera3HardwareInterface::initialize(
1150 const struct camera3_callback_ops *callback_ops)
1151{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001152 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001153 int rc;
1154
1155 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1156 pthread_mutex_lock(&mMutex);
1157
1158 // Validate current state
1159 switch (mState) {
1160 case OPENED:
1161 /* valid state */
1162 break;
1163 default:
1164 LOGE("Invalid state %d", mState);
1165 rc = -ENODEV;
1166 goto err1;
1167 }
1168
1169 rc = initParameters();
1170 if (rc < 0) {
1171 LOGE("initParamters failed %d", rc);
1172 goto err1;
1173 }
1174 mCallbackOps = callback_ops;
1175
1176 mChannelHandle = mCameraHandle->ops->add_channel(
1177 mCameraHandle->camera_handle, NULL, NULL, this);
1178 if (mChannelHandle == 0) {
1179 LOGE("add_channel failed");
1180 rc = -ENOMEM;
1181 pthread_mutex_unlock(&mMutex);
1182 return rc;
1183 }
1184
1185 pthread_mutex_unlock(&mMutex);
1186 mCameraInitialized = true;
1187 mState = INITIALIZED;
1188 LOGI("X");
1189 return 0;
1190
1191err1:
1192 pthread_mutex_unlock(&mMutex);
1193 return rc;
1194}
1195
1196/*===========================================================================
1197 * FUNCTION : validateStreamDimensions
1198 *
1199 * DESCRIPTION: Check if the configuration requested are those advertised
1200 *
1201 * PARAMETERS :
1202 * @stream_list : streams to be configured
1203 *
1204 * RETURN :
1205 *
1206 *==========================================================================*/
1207int QCamera3HardwareInterface::validateStreamDimensions(
1208 camera3_stream_configuration_t *streamList)
1209{
1210 int rc = NO_ERROR;
1211 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001212 uint32_t depthWidth = 0;
1213 uint32_t depthHeight = 0;
1214 if (mPDSupported) {
1215 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1216 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1217 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001218
1219 camera3_stream_t *inputStream = NULL;
1220 /*
1221 * Loop through all streams to find input stream if it exists*
1222 */
1223 for (size_t i = 0; i< streamList->num_streams; i++) {
1224 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1225 if (inputStream != NULL) {
1226 LOGE("Error, Multiple input streams requested");
1227 return -EINVAL;
1228 }
1229 inputStream = streamList->streams[i];
1230 }
1231 }
1232 /*
1233 * Loop through all streams requested in configuration
1234 * Check if unsupported sizes have been requested on any of them
1235 */
1236 for (size_t j = 0; j < streamList->num_streams; j++) {
1237 bool sizeFound = false;
1238 camera3_stream_t *newStream = streamList->streams[j];
1239
1240 uint32_t rotatedHeight = newStream->height;
1241 uint32_t rotatedWidth = newStream->width;
1242 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1243 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1244 rotatedHeight = newStream->width;
1245 rotatedWidth = newStream->height;
1246 }
1247
1248 /*
1249 * Sizes are different for each type of stream format check against
1250 * appropriate table.
1251 */
1252 switch (newStream->format) {
1253 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1254 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1255 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001256 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1257 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1258 mPDSupported) {
1259 if ((depthWidth == newStream->width) &&
1260 (depthHeight == newStream->height)) {
1261 sizeFound = true;
1262 }
1263 break;
1264 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001265 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1266 for (size_t i = 0; i < count; i++) {
1267 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1268 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1269 sizeFound = true;
1270 break;
1271 }
1272 }
1273 break;
1274 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001275 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1276 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001277 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001278 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001279 if ((depthSamplesCount == newStream->width) &&
1280 (1 == newStream->height)) {
1281 sizeFound = true;
1282 }
1283 break;
1284 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001285 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1286 /* Verify set size against generated sizes table */
1287 for (size_t i = 0; i < count; i++) {
1288 if (((int32_t)rotatedWidth ==
1289 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1290 ((int32_t)rotatedHeight ==
1291 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1292 sizeFound = true;
1293 break;
1294 }
1295 }
1296 break;
1297 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1298 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1299 default:
1300 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1301 || newStream->stream_type == CAMERA3_STREAM_INPUT
1302 || IS_USAGE_ZSL(newStream->usage)) {
1303 if (((int32_t)rotatedWidth ==
1304 gCamCapability[mCameraId]->active_array_size.width) &&
1305 ((int32_t)rotatedHeight ==
1306 gCamCapability[mCameraId]->active_array_size.height)) {
1307 sizeFound = true;
1308 break;
1309 }
1310 /* We could potentially break here to enforce ZSL stream
1311 * set from frameworks always is full active array size
1312 * but it is not clear from the spc if framework will always
1313 * follow that, also we have logic to override to full array
1314 * size, so keeping the logic lenient at the moment
1315 */
1316 }
1317 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1318 MAX_SIZES_CNT);
1319 for (size_t i = 0; i < count; i++) {
1320 if (((int32_t)rotatedWidth ==
1321 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1322 ((int32_t)rotatedHeight ==
1323 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1324 sizeFound = true;
1325 break;
1326 }
1327 }
1328 break;
1329 } /* End of switch(newStream->format) */
1330
1331 /* We error out even if a single stream has unsupported size set */
1332 if (!sizeFound) {
1333 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1334 rotatedWidth, rotatedHeight, newStream->format,
1335 gCamCapability[mCameraId]->active_array_size.width,
1336 gCamCapability[mCameraId]->active_array_size.height);
1337 rc = -EINVAL;
1338 break;
1339 }
1340 } /* End of for each stream */
1341 return rc;
1342}
1343
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001344/*===========================================================================
1345 * FUNCTION : validateUsageFlags
1346 *
1347 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1348 *
1349 * PARAMETERS :
1350 * @stream_list : streams to be configured
1351 *
1352 * RETURN :
1353 * NO_ERROR if the usage flags are supported
1354 * error code if usage flags are not supported
1355 *
1356 *==========================================================================*/
1357int QCamera3HardwareInterface::validateUsageFlags(
1358 const camera3_stream_configuration_t* streamList)
1359{
1360 for (size_t j = 0; j < streamList->num_streams; j++) {
1361 const camera3_stream_t *newStream = streamList->streams[j];
1362
1363 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1364 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1365 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1366 continue;
1367 }
1368
Jason Leec4cf5032017-05-24 18:31:41 -07001369 // Here we only care whether it's EIS3 or not
1370 char is_type_value[PROPERTY_VALUE_MAX];
1371 property_get("persist.camera.is_type", is_type_value, "4");
1372 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1373 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1374 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1375 isType = IS_TYPE_NONE;
1376
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001377 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1378 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1379 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1380 bool forcePreviewUBWC = true;
1381 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1382 forcePreviewUBWC = false;
1383 }
1384 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001385 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001386 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001387 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001388 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001389 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001390
1391 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1392 // So color spaces will always match.
1393
1394 // Check whether underlying formats of shared streams match.
1395 if (isVideo && isPreview && videoFormat != previewFormat) {
1396 LOGE("Combined video and preview usage flag is not supported");
1397 return -EINVAL;
1398 }
1399 if (isPreview && isZSL && previewFormat != zslFormat) {
1400 LOGE("Combined preview and zsl usage flag is not supported");
1401 return -EINVAL;
1402 }
1403 if (isVideo && isZSL && videoFormat != zslFormat) {
1404 LOGE("Combined video and zsl usage flag is not supported");
1405 return -EINVAL;
1406 }
1407 }
1408 return NO_ERROR;
1409}
1410
1411/*===========================================================================
1412 * FUNCTION : validateUsageFlagsForEis
1413 *
1414 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1415 *
1416 * PARAMETERS :
1417 * @stream_list : streams to be configured
1418 *
1419 * RETURN :
1420 * NO_ERROR if the usage flags are supported
1421 * error code if usage flags are not supported
1422 *
1423 *==========================================================================*/
1424int QCamera3HardwareInterface::validateUsageFlagsForEis(
1425 const camera3_stream_configuration_t* streamList)
1426{
1427 for (size_t j = 0; j < streamList->num_streams; j++) {
1428 const camera3_stream_t *newStream = streamList->streams[j];
1429
1430 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1431 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1432
1433 // Because EIS is "hard-coded" for certain use case, and current
1434 // implementation doesn't support shared preview and video on the same
1435 // stream, return failure if EIS is forced on.
1436 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1437 LOGE("Combined video and preview usage flag is not supported due to EIS");
1438 return -EINVAL;
1439 }
1440 }
1441 return NO_ERROR;
1442}
1443
Thierry Strudel3d639192016-09-09 11:52:26 -07001444/*==============================================================================
1445 * FUNCTION : isSupportChannelNeeded
1446 *
1447 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1448 *
1449 * PARAMETERS :
1450 * @stream_list : streams to be configured
1451 * @stream_config_info : the config info for streams to be configured
1452 *
1453 * RETURN : Boolen true/false decision
1454 *
1455 *==========================================================================*/
1456bool QCamera3HardwareInterface::isSupportChannelNeeded(
1457 camera3_stream_configuration_t *streamList,
1458 cam_stream_size_info_t stream_config_info)
1459{
1460 uint32_t i;
1461 bool pprocRequested = false;
1462 /* Check for conditions where PProc pipeline does not have any streams*/
1463 for (i = 0; i < stream_config_info.num_streams; i++) {
1464 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1465 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1466 pprocRequested = true;
1467 break;
1468 }
1469 }
1470
1471 if (pprocRequested == false )
1472 return true;
1473
1474 /* Dummy stream needed if only raw or jpeg streams present */
1475 for (i = 0; i < streamList->num_streams; i++) {
1476 switch(streamList->streams[i]->format) {
1477 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1478 case HAL_PIXEL_FORMAT_RAW10:
1479 case HAL_PIXEL_FORMAT_RAW16:
1480 case HAL_PIXEL_FORMAT_BLOB:
1481 break;
1482 default:
1483 return false;
1484 }
1485 }
1486 return true;
1487}
1488
1489/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001490 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001491 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001492 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001493 *
1494 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001495 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001496 *
1497 * RETURN : int32_t type of status
1498 * NO_ERROR -- success
1499 * none-zero failure code
1500 *
1501 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001502int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001503{
1504 int32_t rc = NO_ERROR;
1505
1506 cam_dimension_t max_dim = {0, 0};
1507 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1508 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1509 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1510 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1511 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1512 }
1513
1514 clear_metadata_buffer(mParameters);
1515
1516 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1517 max_dim);
1518 if (rc != NO_ERROR) {
1519 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1520 return rc;
1521 }
1522
1523 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1524 if (rc != NO_ERROR) {
1525 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1526 return rc;
1527 }
1528
1529 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001530 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001531
1532 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1533 mParameters);
1534 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001535 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001536 return rc;
1537 }
1538
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001539 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001540 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1541 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1542 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1543 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1544 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001545
1546 return rc;
1547}
1548
1549/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001550 * FUNCTION : getCurrentSensorModeInfo
1551 *
1552 * DESCRIPTION: Get sensor mode information that is currently selected.
1553 *
1554 * PARAMETERS :
1555 * @sensorModeInfo : sensor mode information (output)
1556 *
1557 * RETURN : int32_t type of status
1558 * NO_ERROR -- success
1559 * none-zero failure code
1560 *
1561 *==========================================================================*/
1562int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1563{
1564 int32_t rc = NO_ERROR;
1565
1566 clear_metadata_buffer(mParameters);
1567 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1568
1569 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1570 mParameters);
1571 if (rc != NO_ERROR) {
1572 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1573 return rc;
1574 }
1575
1576 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1577 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1578 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1579 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1580 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1581 sensorModeInfo.num_raw_bits);
1582
1583 return rc;
1584}
1585
1586/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001587 * FUNCTION : addToPPFeatureMask
1588 *
1589 * DESCRIPTION: add additional features to pp feature mask based on
1590 * stream type and usecase
1591 *
1592 * PARAMETERS :
1593 * @stream_format : stream type for feature mask
1594 * @stream_idx : stream idx within postprocess_mask list to change
1595 *
1596 * RETURN : NULL
1597 *
1598 *==========================================================================*/
1599void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1600 uint32_t stream_idx)
1601{
1602 char feature_mask_value[PROPERTY_VALUE_MAX];
1603 cam_feature_mask_t feature_mask;
1604 int args_converted;
1605 int property_len;
1606
1607 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001608#ifdef _LE_CAMERA_
1609 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1610 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1611 property_len = property_get("persist.camera.hal3.feature",
1612 feature_mask_value, swtnr_feature_mask_value);
1613#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001614 property_len = property_get("persist.camera.hal3.feature",
1615 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001616#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001617 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1618 (feature_mask_value[1] == 'x')) {
1619 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1620 } else {
1621 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1622 }
1623 if (1 != args_converted) {
1624 feature_mask = 0;
1625 LOGE("Wrong feature mask %s", feature_mask_value);
1626 return;
1627 }
1628
1629 switch (stream_format) {
1630 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1631 /* Add LLVD to pp feature mask only if video hint is enabled */
1632 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1633 mStreamConfigInfo.postprocess_mask[stream_idx]
1634 |= CAM_QTI_FEATURE_SW_TNR;
1635 LOGH("Added SW TNR to pp feature mask");
1636 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1637 mStreamConfigInfo.postprocess_mask[stream_idx]
1638 |= CAM_QCOM_FEATURE_LLVD;
1639 LOGH("Added LLVD SeeMore to pp feature mask");
1640 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001641 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1642 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1643 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1644 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001645 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1646 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1647 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1648 CAM_QTI_FEATURE_BINNING_CORRECTION;
1649 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001650 break;
1651 }
1652 default:
1653 break;
1654 }
1655 LOGD("PP feature mask %llx",
1656 mStreamConfigInfo.postprocess_mask[stream_idx]);
1657}
1658
1659/*==============================================================================
1660 * FUNCTION : updateFpsInPreviewBuffer
1661 *
1662 * DESCRIPTION: update FPS information in preview buffer.
1663 *
1664 * PARAMETERS :
1665 * @metadata : pointer to metadata buffer
1666 * @frame_number: frame_number to look for in pending buffer list
1667 *
1668 * RETURN : None
1669 *
1670 *==========================================================================*/
1671void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1672 uint32_t frame_number)
1673{
1674 // Mark all pending buffers for this particular request
1675 // with corresponding framerate information
1676 for (List<PendingBuffersInRequest>::iterator req =
1677 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1678 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1679 for(List<PendingBufferInfo>::iterator j =
1680 req->mPendingBufferList.begin();
1681 j != req->mPendingBufferList.end(); j++) {
1682 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1683 if ((req->frame_number == frame_number) &&
1684 (channel->getStreamTypeMask() &
1685 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1686 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1687 CAM_INTF_PARM_FPS_RANGE, metadata) {
1688 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1689 struct private_handle_t *priv_handle =
1690 (struct private_handle_t *)(*(j->buffer));
1691 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1692 }
1693 }
1694 }
1695 }
1696}
1697
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001698/*==============================================================================
1699 * FUNCTION : updateTimeStampInPendingBuffers
1700 *
1701 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1702 * of a frame number
1703 *
1704 * PARAMETERS :
1705 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1706 * @timestamp : timestamp to be set
1707 *
1708 * RETURN : None
1709 *
1710 *==========================================================================*/
1711void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1712 uint32_t frameNumber, nsecs_t timestamp)
1713{
1714 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1715 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
Binhao Lin09245482017-08-31 18:25:29 -07001716 // WAR: save the av_timestamp to the next frame
1717 if(req->frame_number == frameNumber + 1) {
1718 req->av_timestamp = timestamp;
1719 }
1720
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001721 if (req->frame_number != frameNumber)
1722 continue;
1723
1724 for (auto k = req->mPendingBufferList.begin();
1725 k != req->mPendingBufferList.end(); k++ ) {
Binhao Lin09245482017-08-31 18:25:29 -07001726 // WAR: update timestamp when it's not VT usecase
1727 QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
1728 if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
1729 m_bAVTimerEnabled)) {
1730 struct private_handle_t *priv_handle =
1731 (struct private_handle_t *) (*(k->buffer));
1732 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1733 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001734 }
1735 }
1736 return;
1737}
1738
Thierry Strudel3d639192016-09-09 11:52:26 -07001739/*===========================================================================
1740 * FUNCTION : configureStreams
1741 *
1742 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1743 * and output streams.
1744 *
1745 * PARAMETERS :
1746 * @stream_list : streams to be configured
1747 *
1748 * RETURN :
1749 *
1750 *==========================================================================*/
1751int QCamera3HardwareInterface::configureStreams(
1752 camera3_stream_configuration_t *streamList)
1753{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001754 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001755 int rc = 0;
1756
1757 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001758 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001759 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001760 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001761
1762 return rc;
1763}
1764
1765/*===========================================================================
1766 * FUNCTION : configureStreamsPerfLocked
1767 *
1768 * DESCRIPTION: configureStreams while perfLock is held.
1769 *
1770 * PARAMETERS :
1771 * @stream_list : streams to be configured
1772 *
1773 * RETURN : int32_t type of status
1774 * NO_ERROR -- success
1775 * none-zero failure code
1776 *==========================================================================*/
1777int QCamera3HardwareInterface::configureStreamsPerfLocked(
1778 camera3_stream_configuration_t *streamList)
1779{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001780 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001781 int rc = 0;
1782
1783 // Sanity check stream_list
1784 if (streamList == NULL) {
1785 LOGE("NULL stream configuration");
1786 return BAD_VALUE;
1787 }
1788 if (streamList->streams == NULL) {
1789 LOGE("NULL stream list");
1790 return BAD_VALUE;
1791 }
1792
1793 if (streamList->num_streams < 1) {
1794 LOGE("Bad number of streams requested: %d",
1795 streamList->num_streams);
1796 return BAD_VALUE;
1797 }
1798
1799 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1800 LOGE("Maximum number of streams %d exceeded: %d",
1801 MAX_NUM_STREAMS, streamList->num_streams);
1802 return BAD_VALUE;
1803 }
1804
Jason Leec4cf5032017-05-24 18:31:41 -07001805 mOpMode = streamList->operation_mode;
1806 LOGD("mOpMode: %d", mOpMode);
1807
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001808 rc = validateUsageFlags(streamList);
1809 if (rc != NO_ERROR) {
1810 return rc;
1811 }
1812
Thierry Strudel3d639192016-09-09 11:52:26 -07001813 /* first invalidate all the steams in the mStreamList
1814 * if they appear again, they will be validated */
1815 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1816 it != mStreamInfo.end(); it++) {
1817 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1818 if (channel) {
1819 channel->stop();
1820 }
1821 (*it)->status = INVALID;
1822 }
1823
1824 if (mRawDumpChannel) {
1825 mRawDumpChannel->stop();
1826 delete mRawDumpChannel;
1827 mRawDumpChannel = NULL;
1828 }
1829
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001830 if (mHdrPlusRawSrcChannel) {
1831 mHdrPlusRawSrcChannel->stop();
1832 delete mHdrPlusRawSrcChannel;
1833 mHdrPlusRawSrcChannel = NULL;
1834 }
1835
Thierry Strudel3d639192016-09-09 11:52:26 -07001836 if (mSupportChannel)
1837 mSupportChannel->stop();
1838
1839 if (mAnalysisChannel) {
1840 mAnalysisChannel->stop();
1841 }
1842 if (mMetadataChannel) {
1843 /* If content of mStreamInfo is not 0, there is metadata stream */
1844 mMetadataChannel->stop();
1845 }
1846 if (mChannelHandle) {
1847 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001848 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001849 LOGD("stopping channel %d", mChannelHandle);
1850 }
1851
1852 pthread_mutex_lock(&mMutex);
1853
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001854 mPictureChannel = NULL;
1855
Thierry Strudel3d639192016-09-09 11:52:26 -07001856 // Check state
1857 switch (mState) {
1858 case INITIALIZED:
1859 case CONFIGURED:
1860 case STARTED:
1861 /* valid state */
1862 break;
1863 default:
1864 LOGE("Invalid state %d", mState);
1865 pthread_mutex_unlock(&mMutex);
1866 return -ENODEV;
1867 }
1868
1869 /* Check whether we have video stream */
1870 m_bIs4KVideo = false;
1871 m_bIsVideo = false;
1872 m_bEisSupportedSize = false;
1873 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001874 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001875 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001876 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001877 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001878 uint32_t videoWidth = 0U;
1879 uint32_t videoHeight = 0U;
1880 size_t rawStreamCnt = 0;
1881 size_t stallStreamCnt = 0;
1882 size_t processedStreamCnt = 0;
1883 // Number of streams on ISP encoder path
1884 size_t numStreamsOnEncoder = 0;
1885 size_t numYuv888OnEncoder = 0;
1886 bool bYuv888OverrideJpeg = false;
1887 cam_dimension_t largeYuv888Size = {0, 0};
1888 cam_dimension_t maxViewfinderSize = {0, 0};
1889 bool bJpegExceeds4K = false;
1890 bool bJpegOnEncoder = false;
1891 bool bUseCommonFeatureMask = false;
1892 cam_feature_mask_t commonFeatureMask = 0;
1893 bool bSmallJpegSize = false;
1894 uint32_t width_ratio;
1895 uint32_t height_ratio;
1896 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1897 camera3_stream_t *inputStream = NULL;
1898 bool isJpeg = false;
1899 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001900 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001901 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001902
1903 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1904
1905 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001906 uint8_t eis_prop_set;
1907 uint32_t maxEisWidth = 0;
1908 uint32_t maxEisHeight = 0;
1909
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001910 // Initialize all instant AEC related variables
1911 mInstantAEC = false;
1912 mResetInstantAEC = false;
1913 mInstantAECSettledFrameNumber = 0;
1914 mAecSkipDisplayFrameBound = 0;
1915 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001916 mCurrFeatureState = 0;
1917 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001918
Binhao Lin09245482017-08-31 18:25:29 -07001919 m_bAVTimerEnabled = false;
1920
Thierry Strudel3d639192016-09-09 11:52:26 -07001921 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1922
1923 size_t count = IS_TYPE_MAX;
1924 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1925 for (size_t i = 0; i < count; i++) {
1926 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001927 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1928 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001929 break;
1930 }
1931 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001932
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001933 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001934 maxEisWidth = MAX_EIS_WIDTH;
1935 maxEisHeight = MAX_EIS_HEIGHT;
1936 }
1937
1938 /* EIS setprop control */
1939 char eis_prop[PROPERTY_VALUE_MAX];
1940 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001941 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001942 eis_prop_set = (uint8_t)atoi(eis_prop);
1943
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001944 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001945 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1946
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001947 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1948 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001949
Thierry Strudel3d639192016-09-09 11:52:26 -07001950 /* stream configurations */
1951 for (size_t i = 0; i < streamList->num_streams; i++) {
1952 camera3_stream_t *newStream = streamList->streams[i];
1953 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1954 "height = %d, rotation = %d, usage = 0x%x",
1955 i, newStream->stream_type, newStream->format,
1956 newStream->width, newStream->height, newStream->rotation,
1957 newStream->usage);
1958 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1959 newStream->stream_type == CAMERA3_STREAM_INPUT){
1960 isZsl = true;
1961 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001962 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1963 IS_USAGE_PREVIEW(newStream->usage)) {
1964 isPreview = true;
1965 }
1966
Thierry Strudel3d639192016-09-09 11:52:26 -07001967 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1968 inputStream = newStream;
1969 }
1970
Emilian Peev7650c122017-01-19 08:24:33 -08001971 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1972 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001973 isJpeg = true;
1974 jpegSize.width = newStream->width;
1975 jpegSize.height = newStream->height;
1976 if (newStream->width > VIDEO_4K_WIDTH ||
1977 newStream->height > VIDEO_4K_HEIGHT)
1978 bJpegExceeds4K = true;
1979 }
1980
1981 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1982 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1983 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001984 // In HAL3 we can have multiple different video streams.
1985 // The variables video width and height are used below as
1986 // dimensions of the biggest of them
1987 if (videoWidth < newStream->width ||
1988 videoHeight < newStream->height) {
1989 videoWidth = newStream->width;
1990 videoHeight = newStream->height;
1991 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001992 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1993 (VIDEO_4K_HEIGHT <= newStream->height)) {
1994 m_bIs4KVideo = true;
1995 }
1996 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1997 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001998
Thierry Strudel3d639192016-09-09 11:52:26 -07001999 }
2000 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
2001 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
2002 switch (newStream->format) {
2003 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002004 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2005 depthPresent = true;
2006 break;
2007 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002008 stallStreamCnt++;
2009 if (isOnEncoder(maxViewfinderSize, newStream->width,
2010 newStream->height)) {
2011 numStreamsOnEncoder++;
2012 bJpegOnEncoder = true;
2013 }
2014 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
2015 newStream->width);
2016 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2017 newStream->height);;
2018 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2019 "FATAL: max_downscale_factor cannot be zero and so assert");
2020 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2021 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2022 LOGH("Setting small jpeg size flag to true");
2023 bSmallJpegSize = true;
2024 }
2025 break;
2026 case HAL_PIXEL_FORMAT_RAW10:
2027 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2028 case HAL_PIXEL_FORMAT_RAW16:
2029 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002030 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2031 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2032 pdStatCount++;
2033 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002034 break;
2035 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2036 processedStreamCnt++;
2037 if (isOnEncoder(maxViewfinderSize, newStream->width,
2038 newStream->height)) {
2039 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2040 !IS_USAGE_ZSL(newStream->usage)) {
2041 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2042 }
2043 numStreamsOnEncoder++;
2044 }
2045 break;
2046 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2047 processedStreamCnt++;
2048 if (isOnEncoder(maxViewfinderSize, newStream->width,
2049 newStream->height)) {
2050 // If Yuv888 size is not greater than 4K, set feature mask
2051 // to SUPERSET so that it support concurrent request on
2052 // YUV and JPEG.
2053 if (newStream->width <= VIDEO_4K_WIDTH &&
2054 newStream->height <= VIDEO_4K_HEIGHT) {
2055 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2056 }
2057 numStreamsOnEncoder++;
2058 numYuv888OnEncoder++;
2059 largeYuv888Size.width = newStream->width;
2060 largeYuv888Size.height = newStream->height;
2061 }
2062 break;
2063 default:
2064 processedStreamCnt++;
2065 if (isOnEncoder(maxViewfinderSize, newStream->width,
2066 newStream->height)) {
2067 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2068 numStreamsOnEncoder++;
2069 }
2070 break;
2071 }
2072
2073 }
2074 }
2075
2076 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2077 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2078 !m_bIsVideo) {
2079 m_bEisEnable = false;
2080 }
2081
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002082 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2083 pthread_mutex_unlock(&mMutex);
2084 return -EINVAL;
2085 }
2086
Thierry Strudel54dc9782017-02-15 12:12:10 -08002087 uint8_t forceEnableTnr = 0;
2088 char tnr_prop[PROPERTY_VALUE_MAX];
2089 memset(tnr_prop, 0, sizeof(tnr_prop));
2090 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2091 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2092
Thierry Strudel3d639192016-09-09 11:52:26 -07002093 /* Logic to enable/disable TNR based on specific config size/etc.*/
2094 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002095 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2096 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002097 else if (forceEnableTnr)
2098 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002099
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002100 char videoHdrProp[PROPERTY_VALUE_MAX];
2101 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2102 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2103 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2104
2105 if (hdr_mode_prop == 1 && m_bIsVideo &&
2106 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2107 m_bVideoHdrEnabled = true;
2108 else
2109 m_bVideoHdrEnabled = false;
2110
2111
Thierry Strudel3d639192016-09-09 11:52:26 -07002112 /* Check if num_streams is sane */
2113 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2114 rawStreamCnt > MAX_RAW_STREAMS ||
2115 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2116 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2117 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2118 pthread_mutex_unlock(&mMutex);
2119 return -EINVAL;
2120 }
2121 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002122 if (isZsl && m_bIs4KVideo) {
2123 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002124 pthread_mutex_unlock(&mMutex);
2125 return -EINVAL;
2126 }
2127 /* Check if stream sizes are sane */
2128 if (numStreamsOnEncoder > 2) {
2129 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2130 pthread_mutex_unlock(&mMutex);
2131 return -EINVAL;
2132 } else if (1 < numStreamsOnEncoder){
2133 bUseCommonFeatureMask = true;
2134 LOGH("Multiple streams above max viewfinder size, common mask needed");
2135 }
2136
2137 /* Check if BLOB size is greater than 4k in 4k recording case */
2138 if (m_bIs4KVideo && bJpegExceeds4K) {
2139 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2140 pthread_mutex_unlock(&mMutex);
2141 return -EINVAL;
2142 }
2143
Emilian Peev7650c122017-01-19 08:24:33 -08002144 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2145 depthPresent) {
2146 LOGE("HAL doesn't support depth streams in HFR mode!");
2147 pthread_mutex_unlock(&mMutex);
2148 return -EINVAL;
2149 }
2150
Thierry Strudel3d639192016-09-09 11:52:26 -07002151 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2152 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2153 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2154 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2155 // configurations:
2156 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2157 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2158 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2159 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2160 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2161 __func__);
2162 pthread_mutex_unlock(&mMutex);
2163 return -EINVAL;
2164 }
2165
2166 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2167 // the YUV stream's size is greater or equal to the JPEG size, set common
2168 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2169 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2170 jpegSize.width, jpegSize.height) &&
2171 largeYuv888Size.width > jpegSize.width &&
2172 largeYuv888Size.height > jpegSize.height) {
2173 bYuv888OverrideJpeg = true;
2174 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2175 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2176 }
2177
2178 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2179 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2180 commonFeatureMask);
2181 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2182 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2183
2184 rc = validateStreamDimensions(streamList);
2185 if (rc == NO_ERROR) {
2186 rc = validateStreamRotations(streamList);
2187 }
2188 if (rc != NO_ERROR) {
2189 LOGE("Invalid stream configuration requested!");
2190 pthread_mutex_unlock(&mMutex);
2191 return rc;
2192 }
2193
Emilian Peev0f3c3162017-03-15 12:57:46 +00002194 if (1 < pdStatCount) {
2195 LOGE("HAL doesn't support multiple PD streams");
2196 pthread_mutex_unlock(&mMutex);
2197 return -EINVAL;
2198 }
2199
2200 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2201 (1 == pdStatCount)) {
2202 LOGE("HAL doesn't support PD streams in HFR mode!");
2203 pthread_mutex_unlock(&mMutex);
2204 return -EINVAL;
2205 }
2206
Thierry Strudel3d639192016-09-09 11:52:26 -07002207 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2208 for (size_t i = 0; i < streamList->num_streams; i++) {
2209 camera3_stream_t *newStream = streamList->streams[i];
2210 LOGH("newStream type = %d, stream format = %d "
2211 "stream size : %d x %d, stream rotation = %d",
2212 newStream->stream_type, newStream->format,
2213 newStream->width, newStream->height, newStream->rotation);
2214 //if the stream is in the mStreamList validate it
2215 bool stream_exists = false;
2216 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2217 it != mStreamInfo.end(); it++) {
2218 if ((*it)->stream == newStream) {
2219 QCamera3ProcessingChannel *channel =
2220 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2221 stream_exists = true;
2222 if (channel)
2223 delete channel;
2224 (*it)->status = VALID;
2225 (*it)->stream->priv = NULL;
2226 (*it)->channel = NULL;
2227 }
2228 }
2229 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2230 //new stream
2231 stream_info_t* stream_info;
2232 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2233 if (!stream_info) {
2234 LOGE("Could not allocate stream info");
2235 rc = -ENOMEM;
2236 pthread_mutex_unlock(&mMutex);
2237 return rc;
2238 }
2239 stream_info->stream = newStream;
2240 stream_info->status = VALID;
2241 stream_info->channel = NULL;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07002242 stream_info->id = i;
Thierry Strudel3d639192016-09-09 11:52:26 -07002243 mStreamInfo.push_back(stream_info);
2244 }
2245 /* Covers Opaque ZSL and API1 F/W ZSL */
2246 if (IS_USAGE_ZSL(newStream->usage)
2247 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2248 if (zslStream != NULL) {
2249 LOGE("Multiple input/reprocess streams requested!");
2250 pthread_mutex_unlock(&mMutex);
2251 return BAD_VALUE;
2252 }
2253 zslStream = newStream;
2254 }
2255 /* Covers YUV reprocess */
2256 if (inputStream != NULL) {
2257 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2258 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2259 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2260 && inputStream->width == newStream->width
2261 && inputStream->height == newStream->height) {
2262 if (zslStream != NULL) {
2263 /* This scenario indicates multiple YUV streams with same size
2264 * as input stream have been requested, since zsl stream handle
2265 * is solely use for the purpose of overriding the size of streams
2266 * which share h/w streams we will just make a guess here as to
2267 * which of the stream is a ZSL stream, this will be refactored
2268 * once we make generic logic for streams sharing encoder output
2269 */
2270 LOGH("Warning, Multiple ip/reprocess streams requested!");
2271 }
2272 zslStream = newStream;
2273 }
2274 }
2275 }
2276
2277 /* If a zsl stream is set, we know that we have configured at least one input or
2278 bidirectional stream */
2279 if (NULL != zslStream) {
2280 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2281 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2282 mInputStreamInfo.format = zslStream->format;
2283 mInputStreamInfo.usage = zslStream->usage;
2284 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2285 mInputStreamInfo.dim.width,
2286 mInputStreamInfo.dim.height,
2287 mInputStreamInfo.format, mInputStreamInfo.usage);
2288 }
2289
2290 cleanAndSortStreamInfo();
2291 if (mMetadataChannel) {
2292 delete mMetadataChannel;
2293 mMetadataChannel = NULL;
2294 }
2295 if (mSupportChannel) {
2296 delete mSupportChannel;
2297 mSupportChannel = NULL;
2298 }
2299
2300 if (mAnalysisChannel) {
2301 delete mAnalysisChannel;
2302 mAnalysisChannel = NULL;
2303 }
2304
2305 if (mDummyBatchChannel) {
2306 delete mDummyBatchChannel;
2307 mDummyBatchChannel = NULL;
2308 }
2309
Emilian Peev7650c122017-01-19 08:24:33 -08002310 if (mDepthChannel) {
2311 mDepthChannel = NULL;
2312 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002313 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002314
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002315 mShutterDispatcher.clear();
2316 mOutputBufferDispatcher.clear();
2317
Thierry Strudel2896d122017-02-23 19:18:03 -08002318 char is_type_value[PROPERTY_VALUE_MAX];
2319 property_get("persist.camera.is_type", is_type_value, "4");
2320 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2321
Binhao Line406f062017-05-03 14:39:44 -07002322 char property_value[PROPERTY_VALUE_MAX];
2323 property_get("persist.camera.gzoom.at", property_value, "0");
2324 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002325 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2326 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2327 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2328 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002329
2330 property_get("persist.camera.gzoom.4k", property_value, "0");
2331 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2332
Thierry Strudel3d639192016-09-09 11:52:26 -07002333 //Create metadata channel and initialize it
2334 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2335 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2336 gCamCapability[mCameraId]->color_arrangement);
2337 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2338 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002339 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002340 if (mMetadataChannel == NULL) {
2341 LOGE("failed to allocate metadata channel");
2342 rc = -ENOMEM;
2343 pthread_mutex_unlock(&mMutex);
2344 return rc;
2345 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002346 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002347 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2348 if (rc < 0) {
2349 LOGE("metadata channel initialization failed");
2350 delete mMetadataChannel;
2351 mMetadataChannel = NULL;
2352 pthread_mutex_unlock(&mMutex);
2353 return rc;
2354 }
2355
Thierry Strudel2896d122017-02-23 19:18:03 -08002356 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002357 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002358 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002359 // Keep track of preview/video streams indices.
2360 // There could be more than one preview streams, but only one video stream.
2361 int32_t video_stream_idx = -1;
2362 int32_t preview_stream_idx[streamList->num_streams];
2363 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002364 bool previewTnr[streamList->num_streams];
2365 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2366 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2367 // Loop through once to determine preview TNR conditions before creating channels.
2368 for (size_t i = 0; i < streamList->num_streams; i++) {
2369 camera3_stream_t *newStream = streamList->streams[i];
2370 uint32_t stream_usage = newStream->usage;
2371 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2372 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2373 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2374 video_stream_idx = (int32_t)i;
2375 else
2376 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2377 }
2378 }
2379 // By default, preview stream TNR is disabled.
2380 // Enable TNR to the preview stream if all conditions below are satisfied:
2381 // 1. preview resolution == video resolution.
2382 // 2. video stream TNR is enabled.
2383 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2384 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2385 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2386 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2387 if (m_bTnrEnabled && m_bTnrVideo &&
2388 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2389 video_stream->width == preview_stream->width &&
2390 video_stream->height == preview_stream->height) {
2391 previewTnr[preview_stream_idx[i]] = true;
2392 }
2393 }
2394
Thierry Strudel3d639192016-09-09 11:52:26 -07002395 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2396 /* Allocate channel objects for the requested streams */
2397 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002398
Thierry Strudel3d639192016-09-09 11:52:26 -07002399 camera3_stream_t *newStream = streamList->streams[i];
2400 uint32_t stream_usage = newStream->usage;
2401 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2402 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2403 struct camera_info *p_info = NULL;
2404 pthread_mutex_lock(&gCamLock);
2405 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2406 pthread_mutex_unlock(&gCamLock);
2407 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2408 || IS_USAGE_ZSL(newStream->usage)) &&
2409 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002410 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002411 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002412 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2413 if (bUseCommonFeatureMask)
2414 zsl_ppmask = commonFeatureMask;
2415 else
2416 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002417 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002418 if (numStreamsOnEncoder > 0)
2419 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2420 else
2421 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002422 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002423 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002424 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002425 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002426 LOGH("Input stream configured, reprocess config");
2427 } else {
2428 //for non zsl streams find out the format
2429 switch (newStream->format) {
2430 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2431 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002432 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002433 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2434 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2435 /* add additional features to pp feature mask */
2436 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2437 mStreamConfigInfo.num_streams);
2438
2439 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2440 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2441 CAM_STREAM_TYPE_VIDEO;
2442 if (m_bTnrEnabled && m_bTnrVideo) {
2443 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2444 CAM_QCOM_FEATURE_CPP_TNR;
2445 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2446 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2447 ~CAM_QCOM_FEATURE_CDS;
2448 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002449 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2450 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2451 CAM_QTI_FEATURE_PPEISCORE;
2452 }
Binhao Line406f062017-05-03 14:39:44 -07002453 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2454 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2455 CAM_QCOM_FEATURE_GOOG_ZOOM;
2456 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002457 } else {
2458 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2459 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002460 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002461 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2462 CAM_QCOM_FEATURE_CPP_TNR;
2463 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2464 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2465 ~CAM_QCOM_FEATURE_CDS;
2466 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002467 if(!m_bSwTnrPreview) {
2468 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2469 ~CAM_QTI_FEATURE_SW_TNR;
2470 }
Binhao Line406f062017-05-03 14:39:44 -07002471 if (is_goog_zoom_preview_enabled) {
2472 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2473 CAM_QCOM_FEATURE_GOOG_ZOOM;
2474 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002475 padding_info.width_padding = mSurfaceStridePadding;
2476 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002477 previewSize.width = (int32_t)newStream->width;
2478 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002479 }
2480 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2481 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2482 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2483 newStream->height;
2484 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2485 newStream->width;
2486 }
2487 }
2488 break;
2489 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002490 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002491 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2492 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2493 if (bUseCommonFeatureMask)
2494 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2495 commonFeatureMask;
2496 else
2497 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2498 CAM_QCOM_FEATURE_NONE;
2499 } else {
2500 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2501 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2502 }
2503 break;
2504 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002505 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002506 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2507 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2508 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2509 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2510 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002511 /* Remove rotation if it is not supported
2512 for 4K LiveVideo snapshot case (online processing) */
2513 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2514 CAM_QCOM_FEATURE_ROTATION)) {
2515 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2516 &= ~CAM_QCOM_FEATURE_ROTATION;
2517 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002518 } else {
2519 if (bUseCommonFeatureMask &&
2520 isOnEncoder(maxViewfinderSize, newStream->width,
2521 newStream->height)) {
2522 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2523 } else {
2524 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2525 }
2526 }
2527 if (isZsl) {
2528 if (zslStream) {
2529 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2530 (int32_t)zslStream->width;
2531 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2532 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002533 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2534 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002535 } else {
2536 LOGE("Error, No ZSL stream identified");
2537 pthread_mutex_unlock(&mMutex);
2538 return -EINVAL;
2539 }
2540 } else if (m_bIs4KVideo) {
2541 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2542 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2543 } else if (bYuv888OverrideJpeg) {
2544 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2545 (int32_t)largeYuv888Size.width;
2546 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2547 (int32_t)largeYuv888Size.height;
2548 }
2549 break;
2550 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2551 case HAL_PIXEL_FORMAT_RAW16:
2552 case HAL_PIXEL_FORMAT_RAW10:
2553 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2554 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2555 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002556 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2557 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2558 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2559 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2560 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2561 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2562 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2563 gCamCapability[mCameraId]->dt[mPDIndex];
2564 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2565 gCamCapability[mCameraId]->vc[mPDIndex];
2566 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002567 break;
2568 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002569 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002570 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2571 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2572 break;
2573 }
2574 }
2575
2576 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2577 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2578 gCamCapability[mCameraId]->color_arrangement);
2579
2580 if (newStream->priv == NULL) {
2581 //New stream, construct channel
2582 switch (newStream->stream_type) {
2583 case CAMERA3_STREAM_INPUT:
2584 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2585 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2586 break;
2587 case CAMERA3_STREAM_BIDIRECTIONAL:
2588 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2589 GRALLOC_USAGE_HW_CAMERA_WRITE;
2590 break;
2591 case CAMERA3_STREAM_OUTPUT:
2592 /* For video encoding stream, set read/write rarely
2593 * flag so that they may be set to un-cached */
2594 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2595 newStream->usage |=
2596 (GRALLOC_USAGE_SW_READ_RARELY |
2597 GRALLOC_USAGE_SW_WRITE_RARELY |
2598 GRALLOC_USAGE_HW_CAMERA_WRITE);
2599 else if (IS_USAGE_ZSL(newStream->usage))
2600 {
2601 LOGD("ZSL usage flag skipping");
2602 }
2603 else if (newStream == zslStream
2604 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2605 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2606 } else
2607 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2608 break;
2609 default:
2610 LOGE("Invalid stream_type %d", newStream->stream_type);
2611 break;
2612 }
2613
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002614 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002615 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2616 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2617 QCamera3ProcessingChannel *channel = NULL;
2618 switch (newStream->format) {
2619 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2620 if ((newStream->usage &
2621 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2622 (streamList->operation_mode ==
2623 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2624 ) {
2625 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2626 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002627 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002628 this,
2629 newStream,
2630 (cam_stream_type_t)
2631 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2632 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2633 mMetadataChannel,
2634 0); //heap buffers are not required for HFR video channel
2635 if (channel == NULL) {
2636 LOGE("allocation of channel failed");
2637 pthread_mutex_unlock(&mMutex);
2638 return -ENOMEM;
2639 }
2640 //channel->getNumBuffers() will return 0 here so use
2641 //MAX_INFLIGH_HFR_REQUESTS
2642 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2643 newStream->priv = channel;
2644 LOGI("num video buffers in HFR mode: %d",
2645 MAX_INFLIGHT_HFR_REQUESTS);
2646 } else {
2647 /* Copy stream contents in HFR preview only case to create
2648 * dummy batch channel so that sensor streaming is in
2649 * HFR mode */
2650 if (!m_bIsVideo && (streamList->operation_mode ==
2651 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2652 mDummyBatchStream = *newStream;
2653 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002654 int bufferCount = MAX_INFLIGHT_REQUESTS;
2655 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2656 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002657 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2658 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2659 bufferCount = m_bIs4KVideo ?
2660 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2661 }
2662
Thierry Strudel2896d122017-02-23 19:18:03 -08002663 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002664 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2665 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002666 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002667 this,
2668 newStream,
2669 (cam_stream_type_t)
2670 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2671 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2672 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002673 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002674 if (channel == NULL) {
2675 LOGE("allocation of channel failed");
2676 pthread_mutex_unlock(&mMutex);
2677 return -ENOMEM;
2678 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002679 /* disable UBWC for preview, though supported,
2680 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002681 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002682 (previewSize.width == (int32_t)videoWidth)&&
2683 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002684 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002685 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002686 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002687 /* When goog_zoom is linked to the preview or video stream,
2688 * disable ubwc to the linked stream */
2689 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2690 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2691 channel->setUBWCEnabled(false);
2692 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002693 newStream->max_buffers = channel->getNumBuffers();
2694 newStream->priv = channel;
2695 }
2696 break;
2697 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2698 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2699 mChannelHandle,
2700 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002701 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002702 this,
2703 newStream,
2704 (cam_stream_type_t)
2705 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2706 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2707 mMetadataChannel);
2708 if (channel == NULL) {
2709 LOGE("allocation of YUV channel failed");
2710 pthread_mutex_unlock(&mMutex);
2711 return -ENOMEM;
2712 }
2713 newStream->max_buffers = channel->getNumBuffers();
2714 newStream->priv = channel;
2715 break;
2716 }
2717 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2718 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002719 case HAL_PIXEL_FORMAT_RAW10: {
2720 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2721 (HAL_DATASPACE_DEPTH != newStream->data_space))
2722 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002723 mRawChannel = new QCamera3RawChannel(
2724 mCameraHandle->camera_handle, mChannelHandle,
2725 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002726 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002727 this, newStream,
2728 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002729 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002730 if (mRawChannel == NULL) {
2731 LOGE("allocation of raw channel failed");
2732 pthread_mutex_unlock(&mMutex);
2733 return -ENOMEM;
2734 }
2735 newStream->max_buffers = mRawChannel->getNumBuffers();
2736 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2737 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002738 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002739 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002740 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2741 mDepthChannel = new QCamera3DepthChannel(
2742 mCameraHandle->camera_handle, mChannelHandle,
2743 mCameraHandle->ops, NULL, NULL, &padding_info,
2744 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2745 mMetadataChannel);
2746 if (NULL == mDepthChannel) {
2747 LOGE("Allocation of depth channel failed");
2748 pthread_mutex_unlock(&mMutex);
2749 return NO_MEMORY;
2750 }
2751 newStream->priv = mDepthChannel;
2752 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2753 } else {
2754 // Max live snapshot inflight buffer is 1. This is to mitigate
2755 // frame drop issues for video snapshot. The more buffers being
2756 // allocated, the more frame drops there are.
2757 mPictureChannel = new QCamera3PicChannel(
2758 mCameraHandle->camera_handle, mChannelHandle,
2759 mCameraHandle->ops, captureResultCb,
2760 setBufferErrorStatus, &padding_info, this, newStream,
2761 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2762 m_bIs4KVideo, isZsl, mMetadataChannel,
2763 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2764 if (mPictureChannel == NULL) {
2765 LOGE("allocation of channel failed");
2766 pthread_mutex_unlock(&mMutex);
2767 return -ENOMEM;
2768 }
2769 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2770 newStream->max_buffers = mPictureChannel->getNumBuffers();
2771 mPictureChannel->overrideYuvSize(
2772 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2773 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002774 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002775 break;
2776
2777 default:
2778 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002779 pthread_mutex_unlock(&mMutex);
2780 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002781 }
2782 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2783 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2784 } else {
2785 LOGE("Error, Unknown stream type");
2786 pthread_mutex_unlock(&mMutex);
2787 return -EINVAL;
2788 }
2789
2790 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002791 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002792 // Here we only care whether it's EIS3 or not
2793 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2794 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2795 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2796 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002797 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002798 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002799 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002800 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2801 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2802 }
2803 }
2804
2805 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2806 it != mStreamInfo.end(); it++) {
2807 if ((*it)->stream == newStream) {
2808 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2809 break;
2810 }
2811 }
2812 } else {
2813 // Channel already exists for this stream
2814 // Do nothing for now
2815 }
2816 padding_info = gCamCapability[mCameraId]->padding_info;
2817
Emilian Peev7650c122017-01-19 08:24:33 -08002818 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002819 * since there is no real stream associated with it
2820 */
Emilian Peev7650c122017-01-19 08:24:33 -08002821 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002822 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2823 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002824 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002825 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002826 }
2827
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002828 // Let buffer dispatcher know the configured streams.
2829 mOutputBufferDispatcher.configureStreams(streamList);
2830
Thierry Strudel2896d122017-02-23 19:18:03 -08002831 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2832 onlyRaw = false;
2833 }
2834
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002835 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002836 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002837 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002838 cam_analysis_info_t analysisInfo;
2839 int32_t ret = NO_ERROR;
2840 ret = mCommon.getAnalysisInfo(
2841 FALSE,
2842 analysisFeatureMask,
2843 &analysisInfo);
2844 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002845 cam_color_filter_arrangement_t analysis_color_arrangement =
2846 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2847 CAM_FILTER_ARRANGEMENT_Y :
2848 gCamCapability[mCameraId]->color_arrangement);
2849 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2850 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002851 cam_dimension_t analysisDim;
2852 analysisDim = mCommon.getMatchingDimension(previewSize,
2853 analysisInfo.analysis_recommended_res);
2854
2855 mAnalysisChannel = new QCamera3SupportChannel(
2856 mCameraHandle->camera_handle,
2857 mChannelHandle,
2858 mCameraHandle->ops,
2859 &analysisInfo.analysis_padding_info,
2860 analysisFeatureMask,
2861 CAM_STREAM_TYPE_ANALYSIS,
2862 &analysisDim,
2863 (analysisInfo.analysis_format
2864 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2865 : CAM_FORMAT_YUV_420_NV21),
2866 analysisInfo.hw_analysis_supported,
2867 gCamCapability[mCameraId]->color_arrangement,
2868 this,
2869 0); // force buffer count to 0
2870 } else {
2871 LOGW("getAnalysisInfo failed, ret = %d", ret);
2872 }
2873 if (!mAnalysisChannel) {
2874 LOGW("Analysis channel cannot be created");
2875 }
2876 }
2877
Thierry Strudel3d639192016-09-09 11:52:26 -07002878 //RAW DUMP channel
2879 if (mEnableRawDump && isRawStreamRequested == false){
2880 cam_dimension_t rawDumpSize;
2881 rawDumpSize = getMaxRawSize(mCameraId);
2882 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2883 setPAAFSupport(rawDumpFeatureMask,
2884 CAM_STREAM_TYPE_RAW,
2885 gCamCapability[mCameraId]->color_arrangement);
2886 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2887 mChannelHandle,
2888 mCameraHandle->ops,
2889 rawDumpSize,
2890 &padding_info,
2891 this, rawDumpFeatureMask);
2892 if (!mRawDumpChannel) {
2893 LOGE("Raw Dump channel cannot be created");
2894 pthread_mutex_unlock(&mMutex);
2895 return -ENOMEM;
2896 }
2897 }
2898
Thierry Strudel3d639192016-09-09 11:52:26 -07002899 if (mAnalysisChannel) {
2900 cam_analysis_info_t analysisInfo;
2901 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2902 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2903 CAM_STREAM_TYPE_ANALYSIS;
2904 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2905 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002906 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002907 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2908 &analysisInfo);
2909 if (rc != NO_ERROR) {
2910 LOGE("getAnalysisInfo failed, ret = %d", rc);
2911 pthread_mutex_unlock(&mMutex);
2912 return rc;
2913 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002914 cam_color_filter_arrangement_t analysis_color_arrangement =
2915 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2916 CAM_FILTER_ARRANGEMENT_Y :
2917 gCamCapability[mCameraId]->color_arrangement);
2918 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2919 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2920 analysis_color_arrangement);
2921
Thierry Strudel3d639192016-09-09 11:52:26 -07002922 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002923 mCommon.getMatchingDimension(previewSize,
2924 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002925 mStreamConfigInfo.num_streams++;
2926 }
2927
Thierry Strudel2896d122017-02-23 19:18:03 -08002928 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002929 cam_analysis_info_t supportInfo;
2930 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2931 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2932 setPAAFSupport(callbackFeatureMask,
2933 CAM_STREAM_TYPE_CALLBACK,
2934 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002935 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002936 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002937 if (ret != NO_ERROR) {
2938 /* Ignore the error for Mono camera
2939 * because the PAAF bit mask is only set
2940 * for CAM_STREAM_TYPE_ANALYSIS stream type
2941 */
2942 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2943 LOGW("getAnalysisInfo failed, ret = %d", ret);
2944 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002945 }
2946 mSupportChannel = new QCamera3SupportChannel(
2947 mCameraHandle->camera_handle,
2948 mChannelHandle,
2949 mCameraHandle->ops,
2950 &gCamCapability[mCameraId]->padding_info,
2951 callbackFeatureMask,
2952 CAM_STREAM_TYPE_CALLBACK,
2953 &QCamera3SupportChannel::kDim,
2954 CAM_FORMAT_YUV_420_NV21,
2955 supportInfo.hw_analysis_supported,
2956 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002957 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002958 if (!mSupportChannel) {
2959 LOGE("dummy channel cannot be created");
2960 pthread_mutex_unlock(&mMutex);
2961 return -ENOMEM;
2962 }
2963 }
2964
2965 if (mSupportChannel) {
2966 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2967 QCamera3SupportChannel::kDim;
2968 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2969 CAM_STREAM_TYPE_CALLBACK;
2970 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2971 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2972 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2973 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2974 gCamCapability[mCameraId]->color_arrangement);
2975 mStreamConfigInfo.num_streams++;
2976 }
2977
2978 if (mRawDumpChannel) {
2979 cam_dimension_t rawSize;
2980 rawSize = getMaxRawSize(mCameraId);
2981 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2982 rawSize;
2983 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2984 CAM_STREAM_TYPE_RAW;
2985 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2986 CAM_QCOM_FEATURE_NONE;
2987 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2988 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2989 gCamCapability[mCameraId]->color_arrangement);
2990 mStreamConfigInfo.num_streams++;
2991 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002992
2993 if (mHdrPlusRawSrcChannel) {
2994 cam_dimension_t rawSize;
2995 rawSize = getMaxRawSize(mCameraId);
2996 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2997 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2998 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2999 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3000 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3001 gCamCapability[mCameraId]->color_arrangement);
3002 mStreamConfigInfo.num_streams++;
3003 }
3004
Thierry Strudel3d639192016-09-09 11:52:26 -07003005 /* In HFR mode, if video stream is not added, create a dummy channel so that
3006 * ISP can create a batch mode even for preview only case. This channel is
3007 * never 'start'ed (no stream-on), it is only 'initialized' */
3008 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
3009 !m_bIsVideo) {
3010 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3011 setPAAFSupport(dummyFeatureMask,
3012 CAM_STREAM_TYPE_VIDEO,
3013 gCamCapability[mCameraId]->color_arrangement);
3014 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
3015 mChannelHandle,
3016 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003017 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07003018 this,
3019 &mDummyBatchStream,
3020 CAM_STREAM_TYPE_VIDEO,
3021 dummyFeatureMask,
3022 mMetadataChannel);
3023 if (NULL == mDummyBatchChannel) {
3024 LOGE("creation of mDummyBatchChannel failed."
3025 "Preview will use non-hfr sensor mode ");
3026 }
3027 }
3028 if (mDummyBatchChannel) {
3029 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3030 mDummyBatchStream.width;
3031 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3032 mDummyBatchStream.height;
3033 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3034 CAM_STREAM_TYPE_VIDEO;
3035 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3036 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3037 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3038 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3039 gCamCapability[mCameraId]->color_arrangement);
3040 mStreamConfigInfo.num_streams++;
3041 }
3042
3043 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3044 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003045 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003046 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003047
3048 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3049 for (pendingRequestIterator i = mPendingRequestsList.begin();
3050 i != mPendingRequestsList.end();) {
3051 i = erasePendingRequest(i);
3052 }
3053 mPendingFrameDropList.clear();
3054 // Initialize/Reset the pending buffers list
3055 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3056 req.mPendingBufferList.clear();
3057 }
3058 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003059 mExpectedInflightDuration = 0;
3060 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003061
Thierry Strudel3d639192016-09-09 11:52:26 -07003062 mCurJpegMeta.clear();
3063 //Get min frame duration for this streams configuration
3064 deriveMinFrameDuration();
3065
Chien-Yu Chenee335912017-02-09 17:53:20 -08003066 mFirstPreviewIntentSeen = false;
3067
3068 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003069 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003070 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3071 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003072 disableHdrPlusModeLocked();
3073 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003074
Thierry Strudel3d639192016-09-09 11:52:26 -07003075 // Update state
3076 mState = CONFIGURED;
3077
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003078 mFirstMetadataCallback = true;
3079
Thierry Strudel3d639192016-09-09 11:52:26 -07003080 pthread_mutex_unlock(&mMutex);
3081
3082 return rc;
3083}
3084
3085/*===========================================================================
3086 * FUNCTION : validateCaptureRequest
3087 *
3088 * DESCRIPTION: validate a capture request from camera service
3089 *
3090 * PARAMETERS :
3091 * @request : request from framework to process
3092 *
3093 * RETURN :
3094 *
3095 *==========================================================================*/
3096int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003097 camera3_capture_request_t *request,
3098 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003099{
3100 ssize_t idx = 0;
3101 const camera3_stream_buffer_t *b;
3102 CameraMetadata meta;
3103
3104 /* Sanity check the request */
3105 if (request == NULL) {
3106 LOGE("NULL capture request");
3107 return BAD_VALUE;
3108 }
3109
3110 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3111 /*settings cannot be null for the first request*/
3112 return BAD_VALUE;
3113 }
3114
3115 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003116 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3117 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003118 LOGE("Request %d: No output buffers provided!",
3119 __FUNCTION__, frameNumber);
3120 return BAD_VALUE;
3121 }
3122 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3123 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3124 request->num_output_buffers, MAX_NUM_STREAMS);
3125 return BAD_VALUE;
3126 }
3127 if (request->input_buffer != NULL) {
3128 b = request->input_buffer;
3129 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3130 LOGE("Request %d: Buffer %ld: Status not OK!",
3131 frameNumber, (long)idx);
3132 return BAD_VALUE;
3133 }
3134 if (b->release_fence != -1) {
3135 LOGE("Request %d: Buffer %ld: Has a release fence!",
3136 frameNumber, (long)idx);
3137 return BAD_VALUE;
3138 }
3139 if (b->buffer == NULL) {
3140 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3141 frameNumber, (long)idx);
3142 return BAD_VALUE;
3143 }
3144 }
3145
3146 // Validate all buffers
3147 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003148 if (b == NULL) {
3149 return BAD_VALUE;
3150 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003151 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003152 QCamera3ProcessingChannel *channel =
3153 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3154 if (channel == NULL) {
3155 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3156 frameNumber, (long)idx);
3157 return BAD_VALUE;
3158 }
3159 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3160 LOGE("Request %d: Buffer %ld: Status not OK!",
3161 frameNumber, (long)idx);
3162 return BAD_VALUE;
3163 }
3164 if (b->release_fence != -1) {
3165 LOGE("Request %d: Buffer %ld: Has a release fence!",
3166 frameNumber, (long)idx);
3167 return BAD_VALUE;
3168 }
3169 if (b->buffer == NULL) {
3170 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3171 frameNumber, (long)idx);
3172 return BAD_VALUE;
3173 }
3174 if (*(b->buffer) == NULL) {
3175 LOGE("Request %d: Buffer %ld: NULL private handle!",
3176 frameNumber, (long)idx);
3177 return BAD_VALUE;
3178 }
3179 idx++;
3180 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003181 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003182 return NO_ERROR;
3183}
3184
3185/*===========================================================================
3186 * FUNCTION : deriveMinFrameDuration
3187 *
3188 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3189 * on currently configured streams.
3190 *
3191 * PARAMETERS : NONE
3192 *
3193 * RETURN : NONE
3194 *
3195 *==========================================================================*/
3196void QCamera3HardwareInterface::deriveMinFrameDuration()
3197{
3198 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003199 bool hasRaw = false;
3200
3201 mMinRawFrameDuration = 0;
3202 mMinJpegFrameDuration = 0;
3203 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003204
3205 maxJpegDim = 0;
3206 maxProcessedDim = 0;
3207 maxRawDim = 0;
3208
3209 // Figure out maximum jpeg, processed, and raw dimensions
3210 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3211 it != mStreamInfo.end(); it++) {
3212
3213 // Input stream doesn't have valid stream_type
3214 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3215 continue;
3216
3217 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3218 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3219 if (dimension > maxJpegDim)
3220 maxJpegDim = dimension;
3221 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3222 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3223 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003224 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003225 if (dimension > maxRawDim)
3226 maxRawDim = dimension;
3227 } else {
3228 if (dimension > maxProcessedDim)
3229 maxProcessedDim = dimension;
3230 }
3231 }
3232
3233 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3234 MAX_SIZES_CNT);
3235
3236 //Assume all jpeg dimensions are in processed dimensions.
3237 if (maxJpegDim > maxProcessedDim)
3238 maxProcessedDim = maxJpegDim;
3239 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003240 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003241 maxRawDim = INT32_MAX;
3242
3243 for (size_t i = 0; i < count; i++) {
3244 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3245 gCamCapability[mCameraId]->raw_dim[i].height;
3246 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3247 maxRawDim = dimension;
3248 }
3249 }
3250
3251 //Find minimum durations for processed, jpeg, and raw
3252 for (size_t i = 0; i < count; i++) {
3253 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3254 gCamCapability[mCameraId]->raw_dim[i].height) {
3255 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3256 break;
3257 }
3258 }
3259 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3260 for (size_t i = 0; i < count; i++) {
3261 if (maxProcessedDim ==
3262 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3263 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3264 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3265 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3266 break;
3267 }
3268 }
3269}
3270
3271/*===========================================================================
3272 * FUNCTION : getMinFrameDuration
3273 *
3274 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3275 * and current request configuration.
3276 *
3277 * PARAMETERS : @request: requset sent by the frameworks
3278 *
3279 * RETURN : min farme duration for a particular request
3280 *
3281 *==========================================================================*/
3282int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3283{
3284 bool hasJpegStream = false;
3285 bool hasRawStream = false;
3286 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3287 const camera3_stream_t *stream = request->output_buffers[i].stream;
3288 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3289 hasJpegStream = true;
3290 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3291 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3292 stream->format == HAL_PIXEL_FORMAT_RAW16)
3293 hasRawStream = true;
3294 }
3295
3296 if (!hasJpegStream)
3297 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3298 else
3299 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3300}
3301
3302/*===========================================================================
3303 * FUNCTION : handleBuffersDuringFlushLock
3304 *
3305 * DESCRIPTION: Account for buffers returned from back-end during flush
3306 * This function is executed while mMutex is held by the caller.
3307 *
3308 * PARAMETERS :
3309 * @buffer: image buffer for the callback
3310 *
3311 * RETURN :
3312 *==========================================================================*/
3313void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3314{
3315 bool buffer_found = false;
3316 for (List<PendingBuffersInRequest>::iterator req =
3317 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3318 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3319 for (List<PendingBufferInfo>::iterator i =
3320 req->mPendingBufferList.begin();
3321 i != req->mPendingBufferList.end(); i++) {
3322 if (i->buffer == buffer->buffer) {
3323 mPendingBuffersMap.numPendingBufsAtFlush--;
3324 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3325 buffer->buffer, req->frame_number,
3326 mPendingBuffersMap.numPendingBufsAtFlush);
3327 buffer_found = true;
3328 break;
3329 }
3330 }
3331 if (buffer_found) {
3332 break;
3333 }
3334 }
3335 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3336 //signal the flush()
3337 LOGD("All buffers returned to HAL. Continue flush");
3338 pthread_cond_signal(&mBuffersCond);
3339 }
3340}
3341
Thierry Strudel3d639192016-09-09 11:52:26 -07003342/*===========================================================================
3343 * FUNCTION : handleBatchMetadata
3344 *
3345 * DESCRIPTION: Handles metadata buffer callback in batch mode
3346 *
3347 * PARAMETERS : @metadata_buf: metadata buffer
3348 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3349 * the meta buf in this method
3350 *
3351 * RETURN :
3352 *
3353 *==========================================================================*/
3354void QCamera3HardwareInterface::handleBatchMetadata(
3355 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3356{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003357 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003358
3359 if (NULL == metadata_buf) {
3360 LOGE("metadata_buf is NULL");
3361 return;
3362 }
3363 /* In batch mode, the metdata will contain the frame number and timestamp of
3364 * the last frame in the batch. Eg: a batch containing buffers from request
3365 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3366 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3367 * multiple process_capture_results */
3368 metadata_buffer_t *metadata =
3369 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3370 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3371 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3372 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3373 uint32_t frame_number = 0, urgent_frame_number = 0;
3374 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3375 bool invalid_metadata = false;
3376 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3377 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003378 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003379
3380 int32_t *p_frame_number_valid =
3381 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3382 uint32_t *p_frame_number =
3383 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3384 int64_t *p_capture_time =
3385 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3386 int32_t *p_urgent_frame_number_valid =
3387 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3388 uint32_t *p_urgent_frame_number =
3389 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3390
3391 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3392 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3393 (NULL == p_urgent_frame_number)) {
3394 LOGE("Invalid metadata");
3395 invalid_metadata = true;
3396 } else {
3397 frame_number_valid = *p_frame_number_valid;
3398 last_frame_number = *p_frame_number;
3399 last_frame_capture_time = *p_capture_time;
3400 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3401 last_urgent_frame_number = *p_urgent_frame_number;
3402 }
3403
3404 /* In batchmode, when no video buffers are requested, set_parms are sent
3405 * for every capture_request. The difference between consecutive urgent
3406 * frame numbers and frame numbers should be used to interpolate the
3407 * corresponding frame numbers and time stamps */
3408 pthread_mutex_lock(&mMutex);
3409 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003410 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3411 if(idx < 0) {
3412 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3413 last_urgent_frame_number);
3414 mState = ERROR;
3415 pthread_mutex_unlock(&mMutex);
3416 return;
3417 }
3418 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003419 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3420 first_urgent_frame_number;
3421
3422 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3423 urgent_frame_number_valid,
3424 first_urgent_frame_number, last_urgent_frame_number);
3425 }
3426
3427 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003428 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3429 if(idx < 0) {
3430 LOGE("Invalid frame number received: %d. Irrecoverable error",
3431 last_frame_number);
3432 mState = ERROR;
3433 pthread_mutex_unlock(&mMutex);
3434 return;
3435 }
3436 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003437 frameNumDiff = last_frame_number + 1 -
3438 first_frame_number;
3439 mPendingBatchMap.removeItem(last_frame_number);
3440
3441 LOGD("frm: valid: %d frm_num: %d - %d",
3442 frame_number_valid,
3443 first_frame_number, last_frame_number);
3444
3445 }
3446 pthread_mutex_unlock(&mMutex);
3447
3448 if (urgent_frame_number_valid || frame_number_valid) {
3449 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3450 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3451 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3452 urgentFrameNumDiff, last_urgent_frame_number);
3453 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3454 LOGE("frameNumDiff: %d frameNum: %d",
3455 frameNumDiff, last_frame_number);
3456 }
3457
3458 for (size_t i = 0; i < loopCount; i++) {
3459 /* handleMetadataWithLock is called even for invalid_metadata for
3460 * pipeline depth calculation */
3461 if (!invalid_metadata) {
3462 /* Infer frame number. Batch metadata contains frame number of the
3463 * last frame */
3464 if (urgent_frame_number_valid) {
3465 if (i < urgentFrameNumDiff) {
3466 urgent_frame_number =
3467 first_urgent_frame_number + i;
3468 LOGD("inferred urgent frame_number: %d",
3469 urgent_frame_number);
3470 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3471 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3472 } else {
3473 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3474 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3475 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3476 }
3477 }
3478
3479 /* Infer frame number. Batch metadata contains frame number of the
3480 * last frame */
3481 if (frame_number_valid) {
3482 if (i < frameNumDiff) {
3483 frame_number = first_frame_number + i;
3484 LOGD("inferred frame_number: %d", frame_number);
3485 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3486 CAM_INTF_META_FRAME_NUMBER, frame_number);
3487 } else {
3488 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3489 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3490 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3491 }
3492 }
3493
3494 if (last_frame_capture_time) {
3495 //Infer timestamp
3496 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003497 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003498 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003499 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003500 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3501 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3502 LOGD("batch capture_time: %lld, capture_time: %lld",
3503 last_frame_capture_time, capture_time);
3504 }
3505 }
3506 pthread_mutex_lock(&mMutex);
3507 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003508 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003509 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3510 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003511 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003512 pthread_mutex_unlock(&mMutex);
3513 }
3514
3515 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003516 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003517 mMetadataChannel->bufDone(metadata_buf);
3518 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003519 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003520 }
3521}
3522
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003523void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3524 camera3_error_msg_code_t errorCode)
3525{
3526 camera3_notify_msg_t notify_msg;
3527 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3528 notify_msg.type = CAMERA3_MSG_ERROR;
3529 notify_msg.message.error.error_code = errorCode;
3530 notify_msg.message.error.error_stream = NULL;
3531 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003532 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003533
3534 return;
3535}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003536
3537/*===========================================================================
3538 * FUNCTION : sendPartialMetadataWithLock
3539 *
3540 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3541 *
3542 * PARAMETERS : @metadata: metadata buffer
3543 * @requestIter: The iterator for the pending capture request for
3544 * which the partial result is being sen
3545 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3546 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003547 * @isJumpstartMetadata: Whether this is a partial metadata for
3548 * jumpstart, i.e. even though it doesn't map to a valid partial
3549 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003550 *
3551 * RETURN :
3552 *
3553 *==========================================================================*/
3554
3555void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3556 metadata_buffer_t *metadata,
3557 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003558 bool lastUrgentMetadataInBatch,
3559 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003560{
3561 camera3_capture_result_t result;
3562 memset(&result, 0, sizeof(camera3_capture_result_t));
3563
3564 requestIter->partial_result_cnt++;
3565
3566 // Extract 3A metadata
3567 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003568 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3569 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003570 // Populate metadata result
3571 result.frame_number = requestIter->frame_number;
3572 result.num_output_buffers = 0;
3573 result.output_buffers = NULL;
3574 result.partial_result = requestIter->partial_result_cnt;
3575
3576 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003577 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003578 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3579 // Notify HDR+ client about the partial metadata.
3580 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3581 result.partial_result == PARTIAL_RESULT_COUNT);
3582 }
3583 }
3584
3585 orchestrateResult(&result);
3586 LOGD("urgent frame_number = %u", result.frame_number);
3587 free_camera_metadata((camera_metadata_t *)result.result);
3588}
3589
Thierry Strudel3d639192016-09-09 11:52:26 -07003590/*===========================================================================
3591 * FUNCTION : handleMetadataWithLock
3592 *
3593 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3594 *
3595 * PARAMETERS : @metadata_buf: metadata buffer
3596 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3597 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003598 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3599 * last urgent metadata in a batch. Always true for non-batch mode
3600 * @lastMetadataInBatch: Boolean to indicate whether this is the
3601 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003602 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3603 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003604 *
3605 * RETURN :
3606 *
3607 *==========================================================================*/
3608void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003609 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003610 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3611 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003612{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003613 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003614 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3615 //during flush do not send metadata from this thread
3616 LOGD("not sending metadata during flush or when mState is error");
3617 if (free_and_bufdone_meta_buf) {
3618 mMetadataChannel->bufDone(metadata_buf);
3619 free(metadata_buf);
3620 }
3621 return;
3622 }
3623
3624 //not in flush
3625 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3626 int32_t frame_number_valid, urgent_frame_number_valid;
3627 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003628 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003629 nsecs_t currentSysTime;
3630
3631 int32_t *p_frame_number_valid =
3632 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3633 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3634 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003635 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003636 int32_t *p_urgent_frame_number_valid =
3637 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3638 uint32_t *p_urgent_frame_number =
3639 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3640 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3641 metadata) {
3642 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3643 *p_frame_number_valid, *p_frame_number);
3644 }
3645
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003646 camera_metadata_t *resultMetadata = nullptr;
3647
Thierry Strudel3d639192016-09-09 11:52:26 -07003648 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3649 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3650 LOGE("Invalid metadata");
3651 if (free_and_bufdone_meta_buf) {
3652 mMetadataChannel->bufDone(metadata_buf);
3653 free(metadata_buf);
3654 }
3655 goto done_metadata;
3656 }
3657 frame_number_valid = *p_frame_number_valid;
3658 frame_number = *p_frame_number;
3659 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003660 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003661 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3662 urgent_frame_number = *p_urgent_frame_number;
3663 currentSysTime = systemTime(CLOCK_MONOTONIC);
3664
Jason Lee603176d2017-05-31 11:43:27 -07003665 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3666 const int tries = 3;
3667 nsecs_t bestGap, measured;
3668 for (int i = 0; i < tries; ++i) {
3669 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3670 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3671 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3672 const nsecs_t gap = tmono2 - tmono;
3673 if (i == 0 || gap < bestGap) {
3674 bestGap = gap;
3675 measured = tbase - ((tmono + tmono2) >> 1);
3676 }
3677 }
3678 capture_time -= measured;
3679 }
3680
Thierry Strudel3d639192016-09-09 11:52:26 -07003681 // Detect if buffers from any requests are overdue
3682 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003683 int64_t timeout;
3684 {
3685 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3686 // If there is a pending HDR+ request, the following requests may be blocked until the
3687 // HDR+ request is done. So allow a longer timeout.
3688 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3689 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003690 if (timeout < mExpectedInflightDuration) {
3691 timeout = mExpectedInflightDuration;
3692 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003693 }
3694
3695 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003696 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003697 assert(missed.stream->priv);
3698 if (missed.stream->priv) {
3699 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3700 assert(ch->mStreams[0]);
3701 if (ch->mStreams[0]) {
3702 LOGE("Cancel missing frame = %d, buffer = %p,"
3703 "stream type = %d, stream format = %d",
3704 req.frame_number, missed.buffer,
3705 ch->mStreams[0]->getMyType(), missed.stream->format);
3706 ch->timeoutFrame(req.frame_number);
3707 }
3708 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003709 }
3710 }
3711 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003712 //For the very first metadata callback, regardless whether it contains valid
3713 //frame number, send the partial metadata for the jumpstarting requests.
3714 //Note that this has to be done even if the metadata doesn't contain valid
3715 //urgent frame number, because in the case only 1 request is ever submitted
3716 //to HAL, there won't be subsequent valid urgent frame number.
3717 if (mFirstMetadataCallback) {
3718 for (pendingRequestIterator i =
3719 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3720 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003721 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3722 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003723 }
3724 }
3725 mFirstMetadataCallback = false;
3726 }
3727
Thierry Strudel3d639192016-09-09 11:52:26 -07003728 //Partial result on process_capture_result for timestamp
3729 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003730 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003731
3732 //Recieved an urgent Frame Number, handle it
3733 //using partial results
3734 for (pendingRequestIterator i =
3735 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3736 LOGD("Iterator Frame = %d urgent frame = %d",
3737 i->frame_number, urgent_frame_number);
3738
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003739 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003740 (i->partial_result_cnt == 0)) {
3741 LOGE("Error: HAL missed urgent metadata for frame number %d",
3742 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003743 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003744 }
3745
3746 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003747 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003748 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3749 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003750 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3751 // Instant AEC settled for this frame.
3752 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3753 mInstantAECSettledFrameNumber = urgent_frame_number;
3754 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003755 break;
3756 }
3757 }
3758 }
3759
3760 if (!frame_number_valid) {
3761 LOGD("Not a valid normal frame number, used as SOF only");
3762 if (free_and_bufdone_meta_buf) {
3763 mMetadataChannel->bufDone(metadata_buf);
3764 free(metadata_buf);
3765 }
3766 goto done_metadata;
3767 }
3768 LOGH("valid frame_number = %u, capture_time = %lld",
3769 frame_number, capture_time);
3770
Emilian Peev4e0fe952017-06-30 12:40:09 -07003771 handleDepthDataLocked(metadata->depth_data, frame_number,
3772 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003773
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003774 // Check whether any stream buffer corresponding to this is dropped or not
3775 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3776 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3777 for (auto & pendingRequest : mPendingRequestsList) {
3778 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3779 mInstantAECSettledFrameNumber)) {
3780 camera3_notify_msg_t notify_msg = {};
3781 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003782 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003783 QCamera3ProcessingChannel *channel =
3784 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003785 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003786 if (p_cam_frame_drop) {
3787 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003788 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003789 // Got the stream ID for drop frame.
3790 dropFrame = true;
3791 break;
3792 }
3793 }
3794 } else {
3795 // This is instant AEC case.
3796 // For instant AEC drop the stream untill AEC is settled.
3797 dropFrame = true;
3798 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003799
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003800 if (dropFrame) {
3801 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3802 if (p_cam_frame_drop) {
3803 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003804 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003805 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003806 } else {
3807 // For instant AEC, inform frame drop and frame number
3808 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3809 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003810 pendingRequest.frame_number, streamID,
3811 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003812 }
3813 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003814 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003815 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003816 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003817 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003818 if (p_cam_frame_drop) {
3819 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003820 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003821 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003822 } else {
3823 // For instant AEC, inform frame drop and frame number
3824 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3825 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003826 pendingRequest.frame_number, streamID,
3827 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003828 }
3829 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003830 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003831 PendingFrameDrop.stream_ID = streamID;
3832 // Add the Frame drop info to mPendingFrameDropList
3833 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003834 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003835 }
3836 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003837 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003838
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003839 for (auto & pendingRequest : mPendingRequestsList) {
3840 // Find the pending request with the frame number.
3841 if (pendingRequest.frame_number == frame_number) {
3842 // Update the sensor timestamp.
3843 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003844
Thierry Strudel3d639192016-09-09 11:52:26 -07003845
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003846 /* Set the timestamp in display metadata so that clients aware of
3847 private_handle such as VT can use this un-modified timestamps.
3848 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003849 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003850
Thierry Strudel3d639192016-09-09 11:52:26 -07003851 // Find channel requiring metadata, meaning internal offline postprocess
3852 // is needed.
3853 //TODO: for now, we don't support two streams requiring metadata at the same time.
3854 // (because we are not making copies, and metadata buffer is not reference counted.
3855 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003856 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3857 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003858 if (iter->need_metadata) {
3859 internalPproc = true;
3860 QCamera3ProcessingChannel *channel =
3861 (QCamera3ProcessingChannel *)iter->stream->priv;
3862 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003863 if(p_is_metabuf_queued != NULL) {
3864 *p_is_metabuf_queued = true;
3865 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003866 break;
3867 }
3868 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003869 for (auto itr = pendingRequest.internalRequestList.begin();
3870 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003871 if (itr->need_metadata) {
3872 internalPproc = true;
3873 QCamera3ProcessingChannel *channel =
3874 (QCamera3ProcessingChannel *)itr->stream->priv;
3875 channel->queueReprocMetadata(metadata_buf);
3876 break;
3877 }
3878 }
3879
Thierry Strudel54dc9782017-02-15 12:12:10 -08003880 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003881
3882 bool *enableZsl = nullptr;
3883 if (gExposeEnableZslKey) {
3884 enableZsl = &pendingRequest.enableZsl;
3885 }
3886
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003887 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003888 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003889 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003890
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003891 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003892
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003893 if (pendingRequest.blob_request) {
3894 //Dump tuning metadata if enabled and available
3895 char prop[PROPERTY_VALUE_MAX];
3896 memset(prop, 0, sizeof(prop));
3897 property_get("persist.camera.dumpmetadata", prop, "0");
3898 int32_t enabled = atoi(prop);
3899 if (enabled && metadata->is_tuning_params_valid) {
3900 dumpMetadataToFile(metadata->tuning_params,
3901 mMetaFrameCount,
3902 enabled,
3903 "Snapshot",
3904 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003905 }
3906 }
3907
3908 if (!internalPproc) {
3909 LOGD("couldn't find need_metadata for this metadata");
3910 // Return metadata buffer
3911 if (free_and_bufdone_meta_buf) {
3912 mMetadataChannel->bufDone(metadata_buf);
3913 free(metadata_buf);
3914 }
3915 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003916
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003917 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003918 }
3919 }
3920
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003921 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3922
3923 // Try to send out capture result metadata.
3924 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003925 return;
3926
Thierry Strudel3d639192016-09-09 11:52:26 -07003927done_metadata:
3928 for (pendingRequestIterator i = mPendingRequestsList.begin();
3929 i != mPendingRequestsList.end() ;i++) {
3930 i->pipeline_depth++;
3931 }
3932 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3933 unblockRequestIfNecessary();
3934}
3935
3936/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003937 * FUNCTION : handleDepthDataWithLock
3938 *
3939 * DESCRIPTION: Handles incoming depth data
3940 *
3941 * PARAMETERS : @depthData : Depth data
3942 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003943 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003944 *
3945 * RETURN :
3946 *
3947 *==========================================================================*/
3948void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003949 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003950 uint32_t currentFrameNumber;
3951 buffer_handle_t *depthBuffer;
3952
3953 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003954 return;
3955 }
3956
3957 camera3_stream_buffer_t resultBuffer =
3958 {.acquire_fence = -1,
3959 .release_fence = -1,
3960 .status = CAMERA3_BUFFER_STATUS_OK,
3961 .buffer = nullptr,
3962 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003963 do {
3964 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3965 if (nullptr == depthBuffer) {
3966 break;
3967 }
3968
Emilian Peev7650c122017-01-19 08:24:33 -08003969 resultBuffer.buffer = depthBuffer;
3970 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003971 if (valid) {
3972 int32_t rc = mDepthChannel->populateDepthData(depthData,
3973 frameNumber);
3974 if (NO_ERROR != rc) {
3975 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3976 } else {
3977 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3978 }
Emilian Peev7650c122017-01-19 08:24:33 -08003979 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003980 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003981 }
3982 } else if (currentFrameNumber > frameNumber) {
3983 break;
3984 } else {
3985 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3986 {{currentFrameNumber, mDepthChannel->getStream(),
3987 CAMERA3_MSG_ERROR_BUFFER}}};
3988 orchestrateNotify(&notify_msg);
3989
3990 LOGE("Depth buffer for frame number: %d is missing "
3991 "returning back!", currentFrameNumber);
3992 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3993 }
3994 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003995 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003996 } while (currentFrameNumber < frameNumber);
3997}
3998
3999/*===========================================================================
4000 * FUNCTION : notifyErrorFoPendingDepthData
4001 *
4002 * DESCRIPTION: Returns error for any pending depth buffers
4003 *
4004 * PARAMETERS : depthCh - depth channel that needs to get flushed
4005 *
4006 * RETURN :
4007 *
4008 *==========================================================================*/
4009void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
4010 QCamera3DepthChannel *depthCh) {
4011 uint32_t currentFrameNumber;
4012 buffer_handle_t *depthBuffer;
4013
4014 if (nullptr == depthCh) {
4015 return;
4016 }
4017
4018 camera3_notify_msg_t notify_msg =
4019 {.type = CAMERA3_MSG_ERROR,
4020 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4021 camera3_stream_buffer_t resultBuffer =
4022 {.acquire_fence = -1,
4023 .release_fence = -1,
4024 .buffer = nullptr,
4025 .stream = depthCh->getStream(),
4026 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004027
4028 while (nullptr !=
4029 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4030 depthCh->unmapBuffer(currentFrameNumber);
4031
4032 notify_msg.message.error.frame_number = currentFrameNumber;
4033 orchestrateNotify(&notify_msg);
4034
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004035 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004036 };
4037}
4038
4039/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004040 * FUNCTION : hdrPlusPerfLock
4041 *
4042 * DESCRIPTION: perf lock for HDR+ using custom intent
4043 *
4044 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4045 *
4046 * RETURN : None
4047 *
4048 *==========================================================================*/
4049void QCamera3HardwareInterface::hdrPlusPerfLock(
4050 mm_camera_super_buf_t *metadata_buf)
4051{
4052 if (NULL == metadata_buf) {
4053 LOGE("metadata_buf is NULL");
4054 return;
4055 }
4056 metadata_buffer_t *metadata =
4057 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4058 int32_t *p_frame_number_valid =
4059 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4060 uint32_t *p_frame_number =
4061 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4062
4063 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4064 LOGE("%s: Invalid metadata", __func__);
4065 return;
4066 }
4067
Wei Wang01385482017-08-03 10:49:34 -07004068 //acquire perf lock for 2 secs after the last HDR frame is captured
4069 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004070 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4071 if ((p_frame_number != NULL) &&
4072 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004073 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004074 }
4075 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004076}
4077
4078/*===========================================================================
4079 * FUNCTION : handleInputBufferWithLock
4080 *
4081 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4082 *
4083 * PARAMETERS : @frame_number: frame number of the input buffer
4084 *
4085 * RETURN :
4086 *
4087 *==========================================================================*/
4088void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4089{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004090 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004091 pendingRequestIterator i = mPendingRequestsList.begin();
4092 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4093 i++;
4094 }
4095 if (i != mPendingRequestsList.end() && i->input_buffer) {
4096 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004097 CameraMetadata settings;
4098 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4099 if(i->settings) {
4100 settings = i->settings;
4101 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4102 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004103 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004104 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004105 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004106 } else {
4107 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004108 }
4109
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004110 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4111 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4112 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004113
4114 camera3_capture_result result;
4115 memset(&result, 0, sizeof(camera3_capture_result));
4116 result.frame_number = frame_number;
4117 result.result = i->settings;
4118 result.input_buffer = i->input_buffer;
4119 result.partial_result = PARTIAL_RESULT_COUNT;
4120
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004121 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004122 LOGD("Input request metadata and input buffer frame_number = %u",
4123 i->frame_number);
4124 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004125
4126 // Dispatch result metadata that may be just unblocked by this reprocess result.
4127 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004128 } else {
4129 LOGE("Could not find input request for frame number %d", frame_number);
4130 }
4131}
4132
4133/*===========================================================================
4134 * FUNCTION : handleBufferWithLock
4135 *
4136 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4137 *
4138 * PARAMETERS : @buffer: image buffer for the callback
4139 * @frame_number: frame number of the image buffer
4140 *
4141 * RETURN :
4142 *
4143 *==========================================================================*/
4144void QCamera3HardwareInterface::handleBufferWithLock(
4145 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4146{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004147 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004148
4149 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4150 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4151 }
4152
Thierry Strudel3d639192016-09-09 11:52:26 -07004153 /* Nothing to be done during error state */
4154 if ((ERROR == mState) || (DEINIT == mState)) {
4155 return;
4156 }
4157 if (mFlushPerf) {
4158 handleBuffersDuringFlushLock(buffer);
4159 return;
4160 }
4161 //not in flush
4162 // If the frame number doesn't exist in the pending request list,
4163 // directly send the buffer to the frameworks, and update pending buffers map
4164 // Otherwise, book-keep the buffer.
4165 pendingRequestIterator i = mPendingRequestsList.begin();
4166 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4167 i++;
4168 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004169
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004170 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004171 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004172 // For a reprocessing request, try to send out result metadata.
4173 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004174 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004175 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004176
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004177 // Check if this frame was dropped.
4178 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4179 m != mPendingFrameDropList.end(); m++) {
4180 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4181 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4182 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4183 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4184 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4185 frame_number, streamID);
4186 m = mPendingFrameDropList.erase(m);
4187 break;
4188 }
4189 }
4190
Binhao Lin09245482017-08-31 18:25:29 -07004191 // WAR for encoder avtimer timestamp issue
4192 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4193 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
4194 m_bAVTimerEnabled) {
4195 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
4196 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
4197 if (req->frame_number != frame_number)
4198 continue;
4199 if(req->av_timestamp == 0) {
4200 buffer->status |= CAMERA3_BUFFER_STATUS_ERROR;
4201 }
4202 else {
4203 struct private_handle_t *priv_handle =
4204 (struct private_handle_t *) (*(buffer->buffer));
4205 setMetaData(priv_handle, SET_VT_TIMESTAMP, &(req->av_timestamp));
4206 }
4207 }
4208 }
4209
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004210 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4211 LOGH("result frame_number = %d, buffer = %p",
4212 frame_number, buffer->buffer);
4213
4214 mPendingBuffersMap.removeBuf(buffer->buffer);
4215 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4216
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004217 if (mPreviewStarted == false) {
4218 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4219 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004220 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4221
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004222 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4223 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4224 mPreviewStarted = true;
4225
4226 // Set power hint for preview
4227 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4228 }
4229 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004230}
4231
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004232void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004233 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004234{
4235 // Find the pending request for this result metadata.
4236 auto requestIter = mPendingRequestsList.begin();
4237 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4238 requestIter++;
4239 }
4240
4241 if (requestIter == mPendingRequestsList.end()) {
4242 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4243 return;
4244 }
4245
4246 // Update the result metadata
4247 requestIter->resultMetadata = resultMetadata;
4248
4249 // Check what type of request this is.
4250 bool liveRequest = false;
4251 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004252 // HDR+ request doesn't have partial results.
4253 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004254 } else if (requestIter->input_buffer != nullptr) {
4255 // Reprocessing request result is the same as settings.
4256 requestIter->resultMetadata = requestIter->settings;
4257 // Reprocessing request doesn't have partial results.
4258 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4259 } else {
4260 liveRequest = true;
Chien-Yu Chen0a921f92017-08-27 17:25:33 -07004261 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004262 mPendingLiveRequest--;
4263
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004264 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004265 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004266 // For a live request, send the metadata to HDR+ client.
4267 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4268 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4269 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4270 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004271 }
4272 }
4273
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004274 // Remove len shading map if it's not requested.
4275 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4276 CameraMetadata metadata;
4277 metadata.acquire(resultMetadata);
4278 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4279 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4280 &requestIter->requestedLensShadingMapMode, 1);
4281
4282 requestIter->resultMetadata = metadata.release();
4283 }
4284
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004285 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4286}
4287
4288void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4289 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004290 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4291 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004292 bool readyToSend = true;
4293
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004294 // Iterate through the pending requests to send out result metadata that are ready. Also if
4295 // this result metadata belongs to a live request, notify errors for previous live requests
4296 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004297 auto iter = mPendingRequestsList.begin();
4298 while (iter != mPendingRequestsList.end()) {
4299 // Check if current pending request is ready. If it's not ready, the following pending
4300 // requests are also not ready.
4301 if (readyToSend && iter->resultMetadata == nullptr) {
4302 readyToSend = false;
4303 }
4304
4305 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4306
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004307 camera3_capture_result_t result = {};
4308 result.frame_number = iter->frame_number;
4309 result.result = iter->resultMetadata;
4310 result.partial_result = iter->partial_result_cnt;
4311
4312 // If this pending buffer has result metadata, we may be able to send out shutter callback
4313 // and result metadata.
4314 if (iter->resultMetadata != nullptr) {
4315 if (!readyToSend) {
4316 // If any of the previous pending request is not ready, this pending request is
4317 // also not ready to send in order to keep shutter callbacks and result metadata
4318 // in order.
4319 iter++;
4320 continue;
4321 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004322 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004323 // If the result metadata belongs to a live request, notify errors for previous pending
4324 // live requests.
4325 mPendingLiveRequest--;
4326
4327 CameraMetadata dummyMetadata;
4328 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4329 result.result = dummyMetadata.release();
4330
4331 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004332
4333 // partial_result should be PARTIAL_RESULT_CNT in case of
4334 // ERROR_RESULT.
4335 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4336 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004337 } else {
4338 iter++;
4339 continue;
4340 }
4341
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004342 result.output_buffers = nullptr;
4343 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004344 orchestrateResult(&result);
4345
4346 // For reprocessing, result metadata is the same as settings so do not free it here to
4347 // avoid double free.
4348 if (result.result != iter->settings) {
4349 free_camera_metadata((camera_metadata_t *)result.result);
4350 }
4351 iter->resultMetadata = nullptr;
4352 iter = erasePendingRequest(iter);
4353 }
4354
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004355 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004356 for (auto &iter : mPendingRequestsList) {
4357 // Increment pipeline depth for the following pending requests.
4358 if (iter.frame_number > frameNumber) {
4359 iter.pipeline_depth++;
4360 }
4361 }
4362 }
4363
4364 unblockRequestIfNecessary();
4365}
4366
Thierry Strudel3d639192016-09-09 11:52:26 -07004367/*===========================================================================
4368 * FUNCTION : unblockRequestIfNecessary
4369 *
4370 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4371 * that mMutex is held when this function is called.
4372 *
4373 * PARAMETERS :
4374 *
4375 * RETURN :
4376 *
4377 *==========================================================================*/
4378void QCamera3HardwareInterface::unblockRequestIfNecessary()
4379{
4380 // Unblock process_capture_request
4381 pthread_cond_signal(&mRequestCond);
4382}
4383
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004384/*===========================================================================
4385 * FUNCTION : isHdrSnapshotRequest
4386 *
4387 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4388 *
4389 * PARAMETERS : camera3 request structure
4390 *
4391 * RETURN : boolean decision variable
4392 *
4393 *==========================================================================*/
4394bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4395{
4396 if (request == NULL) {
4397 LOGE("Invalid request handle");
4398 assert(0);
4399 return false;
4400 }
4401
4402 if (!mForceHdrSnapshot) {
4403 CameraMetadata frame_settings;
4404 frame_settings = request->settings;
4405
4406 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4407 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4408 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4409 return false;
4410 }
4411 } else {
4412 return false;
4413 }
4414
4415 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4416 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4417 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4418 return false;
4419 }
4420 } else {
4421 return false;
4422 }
4423 }
4424
4425 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4426 if (request->output_buffers[i].stream->format
4427 == HAL_PIXEL_FORMAT_BLOB) {
4428 return true;
4429 }
4430 }
4431
4432 return false;
4433}
4434/*===========================================================================
4435 * FUNCTION : orchestrateRequest
4436 *
4437 * DESCRIPTION: Orchestrates a capture request from camera service
4438 *
4439 * PARAMETERS :
4440 * @request : request from framework to process
4441 *
4442 * RETURN : Error status codes
4443 *
4444 *==========================================================================*/
4445int32_t QCamera3HardwareInterface::orchestrateRequest(
4446 camera3_capture_request_t *request)
4447{
4448
4449 uint32_t originalFrameNumber = request->frame_number;
4450 uint32_t originalOutputCount = request->num_output_buffers;
4451 const camera_metadata_t *original_settings = request->settings;
4452 List<InternalRequest> internallyRequestedStreams;
4453 List<InternalRequest> emptyInternalList;
4454
4455 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4456 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4457 uint32_t internalFrameNumber;
4458 CameraMetadata modified_meta;
4459
4460
4461 /* Add Blob channel to list of internally requested streams */
4462 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4463 if (request->output_buffers[i].stream->format
4464 == HAL_PIXEL_FORMAT_BLOB) {
4465 InternalRequest streamRequested;
4466 streamRequested.meteringOnly = 1;
4467 streamRequested.need_metadata = 0;
4468 streamRequested.stream = request->output_buffers[i].stream;
4469 internallyRequestedStreams.push_back(streamRequested);
4470 }
4471 }
4472 request->num_output_buffers = 0;
4473 auto itr = internallyRequestedStreams.begin();
4474
4475 /* Modify setting to set compensation */
4476 modified_meta = request->settings;
4477 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4478 uint8_t aeLock = 1;
4479 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4480 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4481 camera_metadata_t *modified_settings = modified_meta.release();
4482 request->settings = modified_settings;
4483
4484 /* Capture Settling & -2x frame */
4485 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4486 request->frame_number = internalFrameNumber;
4487 processCaptureRequest(request, internallyRequestedStreams);
4488
4489 request->num_output_buffers = originalOutputCount;
4490 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4491 request->frame_number = internalFrameNumber;
4492 processCaptureRequest(request, emptyInternalList);
4493 request->num_output_buffers = 0;
4494
4495 modified_meta = modified_settings;
4496 expCompensation = 0;
4497 aeLock = 1;
4498 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4499 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4500 modified_settings = modified_meta.release();
4501 request->settings = modified_settings;
4502
4503 /* Capture Settling & 0X frame */
4504
4505 itr = internallyRequestedStreams.begin();
4506 if (itr == internallyRequestedStreams.end()) {
4507 LOGE("Error Internally Requested Stream list is empty");
4508 assert(0);
4509 } else {
4510 itr->need_metadata = 0;
4511 itr->meteringOnly = 1;
4512 }
4513
4514 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4515 request->frame_number = internalFrameNumber;
4516 processCaptureRequest(request, internallyRequestedStreams);
4517
4518 itr = internallyRequestedStreams.begin();
4519 if (itr == internallyRequestedStreams.end()) {
4520 ALOGE("Error Internally Requested Stream list is empty");
4521 assert(0);
4522 } else {
4523 itr->need_metadata = 1;
4524 itr->meteringOnly = 0;
4525 }
4526
4527 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4528 request->frame_number = internalFrameNumber;
4529 processCaptureRequest(request, internallyRequestedStreams);
4530
4531 /* Capture 2X frame*/
4532 modified_meta = modified_settings;
4533 expCompensation = GB_HDR_2X_STEP_EV;
4534 aeLock = 1;
4535 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4536 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4537 modified_settings = modified_meta.release();
4538 request->settings = modified_settings;
4539
4540 itr = internallyRequestedStreams.begin();
4541 if (itr == internallyRequestedStreams.end()) {
4542 ALOGE("Error Internally Requested Stream list is empty");
4543 assert(0);
4544 } else {
4545 itr->need_metadata = 0;
4546 itr->meteringOnly = 1;
4547 }
4548 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4549 request->frame_number = internalFrameNumber;
4550 processCaptureRequest(request, internallyRequestedStreams);
4551
4552 itr = internallyRequestedStreams.begin();
4553 if (itr == internallyRequestedStreams.end()) {
4554 ALOGE("Error Internally Requested Stream list is empty");
4555 assert(0);
4556 } else {
4557 itr->need_metadata = 1;
4558 itr->meteringOnly = 0;
4559 }
4560
4561 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4562 request->frame_number = internalFrameNumber;
4563 processCaptureRequest(request, internallyRequestedStreams);
4564
4565
4566 /* Capture 2X on original streaming config*/
4567 internallyRequestedStreams.clear();
4568
4569 /* Restore original settings pointer */
4570 request->settings = original_settings;
4571 } else {
4572 uint32_t internalFrameNumber;
4573 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4574 request->frame_number = internalFrameNumber;
4575 return processCaptureRequest(request, internallyRequestedStreams);
4576 }
4577
4578 return NO_ERROR;
4579}
4580
4581/*===========================================================================
4582 * FUNCTION : orchestrateResult
4583 *
4584 * DESCRIPTION: Orchestrates a capture result to camera service
4585 *
4586 * PARAMETERS :
4587 * @request : request from framework to process
4588 *
4589 * RETURN :
4590 *
4591 *==========================================================================*/
4592void QCamera3HardwareInterface::orchestrateResult(
4593 camera3_capture_result_t *result)
4594{
4595 uint32_t frameworkFrameNumber;
4596 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4597 frameworkFrameNumber);
4598 if (rc != NO_ERROR) {
4599 LOGE("Cannot find translated frameworkFrameNumber");
4600 assert(0);
4601 } else {
4602 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004603 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004604 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004605 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004606 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4607 camera_metadata_entry_t entry;
4608 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4609 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004610 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004611 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4612 if (ret != OK)
4613 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004614 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004615 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004616 result->frame_number = frameworkFrameNumber;
4617 mCallbackOps->process_capture_result(mCallbackOps, result);
4618 }
4619 }
4620}
4621
4622/*===========================================================================
4623 * FUNCTION : orchestrateNotify
4624 *
4625 * DESCRIPTION: Orchestrates a notify to camera service
4626 *
4627 * PARAMETERS :
4628 * @request : request from framework to process
4629 *
4630 * RETURN :
4631 *
4632 *==========================================================================*/
4633void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4634{
4635 uint32_t frameworkFrameNumber;
4636 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004637 int32_t rc = NO_ERROR;
4638
4639 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004640 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004641
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004642 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004643 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4644 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4645 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004646 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004647 LOGE("Cannot find translated frameworkFrameNumber");
4648 assert(0);
4649 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004650 }
4651 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004652
4653 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4654 LOGD("Internal Request drop the notifyCb");
4655 } else {
4656 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4657 mCallbackOps->notify(mCallbackOps, notify_msg);
4658 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004659}
4660
4661/*===========================================================================
4662 * FUNCTION : FrameNumberRegistry
4663 *
4664 * DESCRIPTION: Constructor
4665 *
4666 * PARAMETERS :
4667 *
4668 * RETURN :
4669 *
4670 *==========================================================================*/
4671FrameNumberRegistry::FrameNumberRegistry()
4672{
4673 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4674}
4675
4676/*===========================================================================
4677 * FUNCTION : ~FrameNumberRegistry
4678 *
4679 * DESCRIPTION: Destructor
4680 *
4681 * PARAMETERS :
4682 *
4683 * RETURN :
4684 *
4685 *==========================================================================*/
4686FrameNumberRegistry::~FrameNumberRegistry()
4687{
4688}
4689
4690/*===========================================================================
4691 * FUNCTION : PurgeOldEntriesLocked
4692 *
4693 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4694 *
4695 * PARAMETERS :
4696 *
4697 * RETURN : NONE
4698 *
4699 *==========================================================================*/
4700void FrameNumberRegistry::purgeOldEntriesLocked()
4701{
4702 while (_register.begin() != _register.end()) {
4703 auto itr = _register.begin();
4704 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4705 _register.erase(itr);
4706 } else {
4707 return;
4708 }
4709 }
4710}
4711
4712/*===========================================================================
4713 * FUNCTION : allocStoreInternalFrameNumber
4714 *
4715 * DESCRIPTION: Method to note down a framework request and associate a new
4716 * internal request number against it
4717 *
4718 * PARAMETERS :
4719 * @fFrameNumber: Identifier given by framework
4720 * @internalFN : Output parameter which will have the newly generated internal
4721 * entry
4722 *
4723 * RETURN : Error code
4724 *
4725 *==========================================================================*/
4726int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4727 uint32_t &internalFrameNumber)
4728{
4729 Mutex::Autolock lock(mRegistryLock);
4730 internalFrameNumber = _nextFreeInternalNumber++;
4731 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4732 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4733 purgeOldEntriesLocked();
4734 return NO_ERROR;
4735}
4736
4737/*===========================================================================
4738 * FUNCTION : generateStoreInternalFrameNumber
4739 *
4740 * DESCRIPTION: Method to associate a new internal request number independent
4741 * of any associate with framework requests
4742 *
4743 * PARAMETERS :
4744 * @internalFrame#: Output parameter which will have the newly generated internal
4745 *
4746 *
4747 * RETURN : Error code
4748 *
4749 *==========================================================================*/
4750int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4751{
4752 Mutex::Autolock lock(mRegistryLock);
4753 internalFrameNumber = _nextFreeInternalNumber++;
4754 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4755 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4756 purgeOldEntriesLocked();
4757 return NO_ERROR;
4758}
4759
4760/*===========================================================================
4761 * FUNCTION : getFrameworkFrameNumber
4762 *
4763 * DESCRIPTION: Method to query the framework framenumber given an internal #
4764 *
4765 * PARAMETERS :
4766 * @internalFrame#: Internal reference
4767 * @frameworkframenumber: Output parameter holding framework frame entry
4768 *
4769 * RETURN : Error code
4770 *
4771 *==========================================================================*/
4772int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4773 uint32_t &frameworkFrameNumber)
4774{
4775 Mutex::Autolock lock(mRegistryLock);
4776 auto itr = _register.find(internalFrameNumber);
4777 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004778 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004779 return -ENOENT;
4780 }
4781
4782 frameworkFrameNumber = itr->second;
4783 purgeOldEntriesLocked();
4784 return NO_ERROR;
4785}
Thierry Strudel3d639192016-09-09 11:52:26 -07004786
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004787status_t QCamera3HardwareInterface::fillPbStreamConfig(
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004788 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
4789 uint32_t streamIndex) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004790 if (config == nullptr) {
4791 LOGE("%s: config is null", __FUNCTION__);
4792 return BAD_VALUE;
4793 }
4794
4795 if (channel == nullptr) {
4796 LOGE("%s: channel is null", __FUNCTION__);
4797 return BAD_VALUE;
4798 }
4799
4800 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4801 if (stream == nullptr) {
4802 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4803 return NAME_NOT_FOUND;
4804 }
4805
4806 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4807 if (streamInfo == nullptr) {
4808 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4809 return NAME_NOT_FOUND;
4810 }
4811
4812 config->id = pbStreamId;
4813 config->image.width = streamInfo->dim.width;
4814 config->image.height = streamInfo->dim.height;
4815 config->image.padding = 0;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004816
4817 int bytesPerPixel = 0;
4818
4819 switch (streamInfo->fmt) {
4820 case CAM_FORMAT_YUV_420_NV21:
4821 config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
4822 bytesPerPixel = 1;
4823 break;
4824 case CAM_FORMAT_YUV_420_NV12:
4825 case CAM_FORMAT_YUV_420_NV12_VENUS:
4826 config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
4827 bytesPerPixel = 1;
4828 break;
4829 default:
4830 ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
4831 return BAD_VALUE;
4832 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004833
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004834 uint32_t totalPlaneSize = 0;
4835
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004836 // Fill plane information.
4837 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4838 pbcamera::PlaneConfiguration plane;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004839 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004840 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4841 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004842
4843 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004844 }
4845
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004846 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004847 return OK;
4848}
4849
Thierry Strudel3d639192016-09-09 11:52:26 -07004850/*===========================================================================
4851 * FUNCTION : processCaptureRequest
4852 *
4853 * DESCRIPTION: process a capture request from camera service
4854 *
4855 * PARAMETERS :
4856 * @request : request from framework to process
4857 *
4858 * RETURN :
4859 *
4860 *==========================================================================*/
4861int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004862 camera3_capture_request_t *request,
4863 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004864{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004865 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004866 int rc = NO_ERROR;
4867 int32_t request_id;
4868 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004869 bool isVidBufRequested = false;
4870 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004871 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004872
4873 pthread_mutex_lock(&mMutex);
4874
4875 // Validate current state
4876 switch (mState) {
4877 case CONFIGURED:
4878 case STARTED:
4879 /* valid state */
4880 break;
4881
4882 case ERROR:
4883 pthread_mutex_unlock(&mMutex);
4884 handleCameraDeviceError();
4885 return -ENODEV;
4886
4887 default:
4888 LOGE("Invalid state %d", mState);
4889 pthread_mutex_unlock(&mMutex);
4890 return -ENODEV;
4891 }
4892
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004893 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004894 if (rc != NO_ERROR) {
4895 LOGE("incoming request is not valid");
4896 pthread_mutex_unlock(&mMutex);
4897 return rc;
4898 }
4899
4900 meta = request->settings;
4901
4902 // For first capture request, send capture intent, and
4903 // stream on all streams
4904 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004905 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004906 // send an unconfigure to the backend so that the isp
4907 // resources are deallocated
4908 if (!mFirstConfiguration) {
4909 cam_stream_size_info_t stream_config_info;
4910 int32_t hal_version = CAM_HAL_V3;
4911 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4912 stream_config_info.buffer_info.min_buffers =
4913 MIN_INFLIGHT_REQUESTS;
4914 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004915 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004916 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004917 clear_metadata_buffer(mParameters);
4918 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4919 CAM_INTF_PARM_HAL_VERSION, hal_version);
4920 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4921 CAM_INTF_META_STREAM_INFO, stream_config_info);
4922 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4923 mParameters);
4924 if (rc < 0) {
4925 LOGE("set_parms for unconfigure failed");
4926 pthread_mutex_unlock(&mMutex);
4927 return rc;
4928 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004929
Thierry Strudel3d639192016-09-09 11:52:26 -07004930 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004931 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004932 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004933 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004934 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004935 property_get("persist.camera.is_type", is_type_value, "4");
4936 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4937 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4938 property_get("persist.camera.is_type_preview", is_type_value, "4");
4939 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4940 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004941
4942 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4943 int32_t hal_version = CAM_HAL_V3;
4944 uint8_t captureIntent =
4945 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4946 mCaptureIntent = captureIntent;
4947 clear_metadata_buffer(mParameters);
4948 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4949 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4950 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004951 if (mFirstConfiguration) {
4952 // configure instant AEC
4953 // Instant AEC is a session based parameter and it is needed only
4954 // once per complete session after open camera.
4955 // i.e. This is set only once for the first capture request, after open camera.
4956 setInstantAEC(meta);
4957 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004958 uint8_t fwkVideoStabMode=0;
4959 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4960 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4961 }
4962
Xue Tuecac74e2017-04-17 13:58:15 -07004963 // If EIS setprop is enabled then only turn it on for video/preview
4964 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004965 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004966 int32_t vsMode;
4967 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4968 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4969 rc = BAD_VALUE;
4970 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004971 LOGD("setEis %d", setEis);
4972 bool eis3Supported = false;
4973 size_t count = IS_TYPE_MAX;
4974 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4975 for (size_t i = 0; i < count; i++) {
4976 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4977 eis3Supported = true;
4978 break;
4979 }
4980 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004981
4982 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004983 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004984 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4985 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004986 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4987 is_type = isTypePreview;
4988 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4989 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4990 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004991 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004992 } else {
4993 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004994 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004995 } else {
4996 is_type = IS_TYPE_NONE;
4997 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004998 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004999 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005000 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
5001 }
5002 }
5003
5004 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5005 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
5006
Thierry Strudel54dc9782017-02-15 12:12:10 -08005007 //Disable tintless only if the property is set to 0
5008 memset(prop, 0, sizeof(prop));
5009 property_get("persist.camera.tintless.enable", prop, "1");
5010 int32_t tintless_value = atoi(prop);
5011
Thierry Strudel3d639192016-09-09 11:52:26 -07005012 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5013 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08005014
Thierry Strudel3d639192016-09-09 11:52:26 -07005015 //Disable CDS for HFR mode or if DIS/EIS is on.
5016 //CDS is a session parameter in the backend/ISP, so need to be set/reset
5017 //after every configure_stream
5018 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
5019 (m_bIsVideo)) {
5020 int32_t cds = CAM_CDS_MODE_OFF;
5021 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5022 CAM_INTF_PARM_CDS_MODE, cds))
5023 LOGE("Failed to disable CDS for HFR mode");
5024
5025 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005026
5027 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
5028 uint8_t* use_av_timer = NULL;
5029
5030 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005031 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005032 use_av_timer = &m_debug_avtimer;
Binhao Lin09245482017-08-31 18:25:29 -07005033 m_bAVTimerEnabled = true;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005034 }
5035 else{
5036 use_av_timer =
5037 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005038 if (use_av_timer) {
Binhao Lin09245482017-08-31 18:25:29 -07005039 m_bAVTimerEnabled = true;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005040 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
5041 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005042 }
5043
5044 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
5045 rc = BAD_VALUE;
5046 }
5047 }
5048
Thierry Strudel3d639192016-09-09 11:52:26 -07005049 setMobicat();
5050
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005051 uint8_t nrMode = 0;
5052 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5053 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5054 }
5055
Thierry Strudel3d639192016-09-09 11:52:26 -07005056 /* Set fps and hfr mode while sending meta stream info so that sensor
5057 * can configure appropriate streaming mode */
5058 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005059 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5060 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005061 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5062 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005063 if (rc == NO_ERROR) {
5064 int32_t max_fps =
5065 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005066 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005067 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5068 }
5069 /* For HFR, more buffers are dequeued upfront to improve the performance */
5070 if (mBatchSize) {
5071 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5072 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5073 }
5074 }
5075 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005076 LOGE("setHalFpsRange failed");
5077 }
5078 }
5079 if (meta.exists(ANDROID_CONTROL_MODE)) {
5080 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5081 rc = extractSceneMode(meta, metaMode, mParameters);
5082 if (rc != NO_ERROR) {
5083 LOGE("extractSceneMode failed");
5084 }
5085 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005086 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005087
Thierry Strudel04e026f2016-10-10 11:27:36 -07005088 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5089 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5090 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5091 rc = setVideoHdrMode(mParameters, vhdr);
5092 if (rc != NO_ERROR) {
5093 LOGE("setVideoHDR is failed");
5094 }
5095 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005096
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005097 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005098 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005099 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005100 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5101 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5102 sensorModeFullFov)) {
5103 rc = BAD_VALUE;
5104 }
5105 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005106 //TODO: validate the arguments, HSV scenemode should have only the
5107 //advertised fps ranges
5108
5109 /*set the capture intent, hal version, tintless, stream info,
5110 *and disenable parameters to the backend*/
5111 LOGD("set_parms META_STREAM_INFO " );
5112 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005113 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5114 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005115 mStreamConfigInfo.type[i],
5116 mStreamConfigInfo.stream_sizes[i].width,
5117 mStreamConfigInfo.stream_sizes[i].height,
5118 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005119 mStreamConfigInfo.format[i],
5120 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005121 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005122
Thierry Strudel3d639192016-09-09 11:52:26 -07005123 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5124 mParameters);
5125 if (rc < 0) {
5126 LOGE("set_parms failed for hal version, stream info");
5127 }
5128
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005129 cam_sensor_mode_info_t sensorModeInfo = {};
5130 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005131 if (rc != NO_ERROR) {
5132 LOGE("Failed to get sensor output size");
5133 pthread_mutex_unlock(&mMutex);
5134 goto error_exit;
5135 }
5136
5137 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5138 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005139 sensorModeInfo.active_array_size.width,
5140 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005141
5142 /* Set batchmode before initializing channel. Since registerBuffer
5143 * internally initializes some of the channels, better set batchmode
5144 * even before first register buffer */
5145 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5146 it != mStreamInfo.end(); it++) {
5147 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5148 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5149 && mBatchSize) {
5150 rc = channel->setBatchSize(mBatchSize);
5151 //Disable per frame map unmap for HFR/batchmode case
5152 rc |= channel->setPerFrameMapUnmap(false);
5153 if (NO_ERROR != rc) {
5154 LOGE("Channel init failed %d", rc);
5155 pthread_mutex_unlock(&mMutex);
5156 goto error_exit;
5157 }
5158 }
5159 }
5160
5161 //First initialize all streams
5162 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5163 it != mStreamInfo.end(); it++) {
5164 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005165
5166 /* Initial value of NR mode is needed before stream on */
5167 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005168 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5169 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005170 setEis) {
5171 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5172 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5173 is_type = mStreamConfigInfo.is_type[i];
5174 break;
5175 }
5176 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005177 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005178 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005179 rc = channel->initialize(IS_TYPE_NONE);
5180 }
5181 if (NO_ERROR != rc) {
5182 LOGE("Channel initialization failed %d", rc);
5183 pthread_mutex_unlock(&mMutex);
5184 goto error_exit;
5185 }
5186 }
5187
5188 if (mRawDumpChannel) {
5189 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5190 if (rc != NO_ERROR) {
5191 LOGE("Error: Raw Dump Channel init failed");
5192 pthread_mutex_unlock(&mMutex);
5193 goto error_exit;
5194 }
5195 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005196 if (mHdrPlusRawSrcChannel) {
5197 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5198 if (rc != NO_ERROR) {
5199 LOGE("Error: HDR+ RAW Source Channel init failed");
5200 pthread_mutex_unlock(&mMutex);
5201 goto error_exit;
5202 }
5203 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005204 if (mSupportChannel) {
5205 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5206 if (rc < 0) {
5207 LOGE("Support channel initialization failed");
5208 pthread_mutex_unlock(&mMutex);
5209 goto error_exit;
5210 }
5211 }
5212 if (mAnalysisChannel) {
5213 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5214 if (rc < 0) {
5215 LOGE("Analysis channel initialization failed");
5216 pthread_mutex_unlock(&mMutex);
5217 goto error_exit;
5218 }
5219 }
5220 if (mDummyBatchChannel) {
5221 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5222 if (rc < 0) {
5223 LOGE("mDummyBatchChannel setBatchSize failed");
5224 pthread_mutex_unlock(&mMutex);
5225 goto error_exit;
5226 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005227 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005228 if (rc < 0) {
5229 LOGE("mDummyBatchChannel initialization failed");
5230 pthread_mutex_unlock(&mMutex);
5231 goto error_exit;
5232 }
5233 }
5234
5235 // Set bundle info
5236 rc = setBundleInfo();
5237 if (rc < 0) {
5238 LOGE("setBundleInfo failed %d", rc);
5239 pthread_mutex_unlock(&mMutex);
5240 goto error_exit;
5241 }
5242
5243 //update settings from app here
5244 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5245 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5246 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5247 }
5248 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5249 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5250 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5251 }
5252 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5253 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5254 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5255
5256 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5257 (mLinkedCameraId != mCameraId) ) {
5258 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5259 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005260 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005261 goto error_exit;
5262 }
5263 }
5264
5265 // add bundle related cameras
5266 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5267 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005268 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5269 &m_pDualCamCmdPtr->bundle_info;
5270 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005271 if (mIsDeviceLinked)
5272 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5273 else
5274 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5275
5276 pthread_mutex_lock(&gCamLock);
5277
5278 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5279 LOGE("Dualcam: Invalid Session Id ");
5280 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005281 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005282 goto error_exit;
5283 }
5284
5285 if (mIsMainCamera == 1) {
5286 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5287 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005288 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005289 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005290 // related session id should be session id of linked session
5291 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5292 } else {
5293 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5294 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005295 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005296 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005297 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5298 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005299 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005300 pthread_mutex_unlock(&gCamLock);
5301
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005302 rc = mCameraHandle->ops->set_dual_cam_cmd(
5303 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005304 if (rc < 0) {
5305 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005306 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005307 goto error_exit;
5308 }
5309 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005310 goto no_error;
5311error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005312 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005313 return rc;
5314no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005315 mWokenUpByDaemon = false;
5316 mPendingLiveRequest = 0;
5317 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005318 }
5319
5320 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005321 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005322
5323 if (mFlushPerf) {
5324 //we cannot accept any requests during flush
5325 LOGE("process_capture_request cannot proceed during flush");
5326 pthread_mutex_unlock(&mMutex);
5327 return NO_ERROR; //should return an error
5328 }
5329
5330 if (meta.exists(ANDROID_REQUEST_ID)) {
5331 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5332 mCurrentRequestId = request_id;
5333 LOGD("Received request with id: %d", request_id);
5334 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5335 LOGE("Unable to find request id field, \
5336 & no previous id available");
5337 pthread_mutex_unlock(&mMutex);
5338 return NAME_NOT_FOUND;
5339 } else {
5340 LOGD("Re-using old request id");
5341 request_id = mCurrentRequestId;
5342 }
5343
5344 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5345 request->num_output_buffers,
5346 request->input_buffer,
5347 frameNumber);
5348 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005349 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005350 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005351 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005352 uint32_t snapshotStreamId = 0;
5353 for (size_t i = 0; i < request->num_output_buffers; i++) {
5354 const camera3_stream_buffer_t& output = request->output_buffers[i];
5355 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5356
Emilian Peev7650c122017-01-19 08:24:33 -08005357 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5358 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005359 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005360 blob_request = 1;
5361 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5362 }
5363
5364 if (output.acquire_fence != -1) {
5365 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5366 close(output.acquire_fence);
5367 if (rc != OK) {
5368 LOGE("sync wait failed %d", rc);
5369 pthread_mutex_unlock(&mMutex);
5370 return rc;
5371 }
5372 }
5373
Emilian Peev0f3c3162017-03-15 12:57:46 +00005374 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5375 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005376 depthRequestPresent = true;
5377 continue;
5378 }
5379
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005380 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005381 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005382
5383 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5384 isVidBufRequested = true;
5385 }
5386 }
5387
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005388 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5389 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5390 itr++) {
5391 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5392 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5393 channel->getStreamID(channel->getStreamTypeMask());
5394
5395 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5396 isVidBufRequested = true;
5397 }
5398 }
5399
Thierry Strudel3d639192016-09-09 11:52:26 -07005400 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005401 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005402 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005403 }
5404 if (blob_request && mRawDumpChannel) {
5405 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005406 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005407 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005408 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005409 }
5410
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005411 {
5412 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5413 // Request a RAW buffer if
5414 // 1. mHdrPlusRawSrcChannel is valid.
5415 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5416 // 3. There is no pending HDR+ request.
5417 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5418 mHdrPlusPendingRequests.size() == 0) {
5419 streamsArray.stream_request[streamsArray.num_streams].streamID =
5420 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5421 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5422 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005423 }
5424
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005425 //extract capture intent
5426 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5427 mCaptureIntent =
5428 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5429 }
5430
5431 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5432 mCacMode =
5433 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5434 }
5435
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005436 uint8_t requestedLensShadingMapMode;
5437 // Get the shading map mode.
5438 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5439 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5440 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5441 } else {
5442 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5443 }
5444
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005445 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005446 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005447
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005448 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005449 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005450 // If this request has a still capture intent, try to submit an HDR+ request.
5451 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5452 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5453 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5454 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005455 }
5456
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005457 if (hdrPlusRequest) {
5458 // For a HDR+ request, just set the frame parameters.
5459 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5460 if (rc < 0) {
5461 LOGE("fail to set frame parameters");
5462 pthread_mutex_unlock(&mMutex);
5463 return rc;
5464 }
5465 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005466 /* Parse the settings:
5467 * - For every request in NORMAL MODE
5468 * - For every request in HFR mode during preview only case
5469 * - For first request of every batch in HFR mode during video
5470 * recording. In batchmode the same settings except frame number is
5471 * repeated in each request of the batch.
5472 */
5473 if (!mBatchSize ||
5474 (mBatchSize && !isVidBufRequested) ||
5475 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005476 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005477 if (rc < 0) {
5478 LOGE("fail to set frame parameters");
5479 pthread_mutex_unlock(&mMutex);
5480 return rc;
5481 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005482
5483 {
5484 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5485 // will be reported in result metadata.
5486 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5487 if (mHdrPlusModeEnabled) {
5488 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5489 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5490 }
5491 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005492 }
5493 /* For batchMode HFR, setFrameParameters is not called for every
5494 * request. But only frame number of the latest request is parsed.
5495 * Keep track of first and last frame numbers in a batch so that
5496 * metadata for the frame numbers of batch can be duplicated in
5497 * handleBatchMetadta */
5498 if (mBatchSize) {
5499 if (!mToBeQueuedVidBufs) {
5500 //start of the batch
5501 mFirstFrameNumberInBatch = request->frame_number;
5502 }
5503 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5504 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5505 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005506 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005507 return BAD_VALUE;
5508 }
5509 }
5510 if (mNeedSensorRestart) {
5511 /* Unlock the mutex as restartSensor waits on the channels to be
5512 * stopped, which in turn calls stream callback functions -
5513 * handleBufferWithLock and handleMetadataWithLock */
5514 pthread_mutex_unlock(&mMutex);
5515 rc = dynamicUpdateMetaStreamInfo();
5516 if (rc != NO_ERROR) {
5517 LOGE("Restarting the sensor failed");
5518 return BAD_VALUE;
5519 }
5520 mNeedSensorRestart = false;
5521 pthread_mutex_lock(&mMutex);
5522 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005523 if(mResetInstantAEC) {
5524 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5525 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5526 mResetInstantAEC = false;
5527 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005528 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005529 if (request->input_buffer->acquire_fence != -1) {
5530 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5531 close(request->input_buffer->acquire_fence);
5532 if (rc != OK) {
5533 LOGE("input buffer sync wait failed %d", rc);
5534 pthread_mutex_unlock(&mMutex);
5535 return rc;
5536 }
5537 }
5538 }
5539
5540 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5541 mLastCustIntentFrmNum = frameNumber;
5542 }
5543 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005544 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005545 pendingRequestIterator latestRequest;
5546 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005547 pendingRequest.num_buffers = depthRequestPresent ?
5548 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005549 pendingRequest.request_id = request_id;
5550 pendingRequest.blob_request = blob_request;
5551 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005552 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005553 if (request->input_buffer) {
5554 pendingRequest.input_buffer =
5555 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5556 *(pendingRequest.input_buffer) = *(request->input_buffer);
5557 pInputBuffer = pendingRequest.input_buffer;
5558 } else {
5559 pendingRequest.input_buffer = NULL;
5560 pInputBuffer = NULL;
5561 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005562 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005563
5564 pendingRequest.pipeline_depth = 0;
5565 pendingRequest.partial_result_cnt = 0;
5566 extractJpegMetadata(mCurJpegMeta, request);
5567 pendingRequest.jpegMetadata = mCurJpegMeta;
5568 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005569 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005570 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
Shuzhen Wang77b049a2017-08-30 12:24:36 -07005571 pendingRequest.hybrid_ae_enable =
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005572 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5573 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005574
Samuel Ha68ba5172016-12-15 18:41:12 -08005575 /* DevCamDebug metadata processCaptureRequest */
5576 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5577 mDevCamDebugMetaEnable =
5578 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5579 }
5580 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5581 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005582
5583 //extract CAC info
5584 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5585 mCacMode =
5586 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5587 }
5588 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005589 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005590 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5591 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005592
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005593 // extract enableZsl info
5594 if (gExposeEnableZslKey) {
5595 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5596 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5597 mZslEnabled = pendingRequest.enableZsl;
5598 } else {
5599 pendingRequest.enableZsl = mZslEnabled;
5600 }
5601 }
5602
Thierry Strudel3d639192016-09-09 11:52:26 -07005603 PendingBuffersInRequest bufsForCurRequest;
5604 bufsForCurRequest.frame_number = frameNumber;
5605 // Mark current timestamp for the new request
5606 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Binhao Lin09245482017-08-31 18:25:29 -07005607 bufsForCurRequest.av_timestamp = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005608 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005609
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005610 if (hdrPlusRequest) {
5611 // Save settings for this request.
5612 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5613 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5614
5615 // Add to pending HDR+ request queue.
5616 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5617 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5618
5619 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5620 }
5621
Thierry Strudel3d639192016-09-09 11:52:26 -07005622 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005623 if ((request->output_buffers[i].stream->data_space ==
5624 HAL_DATASPACE_DEPTH) &&
5625 (HAL_PIXEL_FORMAT_BLOB ==
5626 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005627 continue;
5628 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005629 RequestedBufferInfo requestedBuf;
5630 memset(&requestedBuf, 0, sizeof(requestedBuf));
5631 requestedBuf.stream = request->output_buffers[i].stream;
5632 requestedBuf.buffer = NULL;
5633 pendingRequest.buffers.push_back(requestedBuf);
5634
5635 // Add to buffer handle the pending buffers list
5636 PendingBufferInfo bufferInfo;
5637 bufferInfo.buffer = request->output_buffers[i].buffer;
5638 bufferInfo.stream = request->output_buffers[i].stream;
5639 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5640 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5641 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5642 frameNumber, bufferInfo.buffer,
5643 channel->getStreamTypeMask(), bufferInfo.stream->format);
5644 }
5645 // Add this request packet into mPendingBuffersMap
5646 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5647 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5648 mPendingBuffersMap.get_num_overall_buffers());
5649
5650 latestRequest = mPendingRequestsList.insert(
5651 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005652
5653 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5654 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005655 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005656 for (size_t i = 0; i < request->num_output_buffers; i++) {
5657 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5658 }
5659
Thierry Strudel3d639192016-09-09 11:52:26 -07005660 if(mFlush) {
5661 LOGI("mFlush is true");
5662 pthread_mutex_unlock(&mMutex);
5663 return NO_ERROR;
5664 }
5665
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005666 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5667 // channel.
5668 if (!hdrPlusRequest) {
5669 int indexUsed;
5670 // Notify metadata channel we receive a request
5671 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005672
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005673 if(request->input_buffer != NULL){
5674 LOGD("Input request, frame_number %d", frameNumber);
5675 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5676 if (NO_ERROR != rc) {
5677 LOGE("fail to set reproc parameters");
5678 pthread_mutex_unlock(&mMutex);
5679 return rc;
5680 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005681 }
5682
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005683 // Call request on other streams
5684 uint32_t streams_need_metadata = 0;
5685 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5686 for (size_t i = 0; i < request->num_output_buffers; i++) {
5687 const camera3_stream_buffer_t& output = request->output_buffers[i];
5688 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5689
5690 if (channel == NULL) {
5691 LOGW("invalid channel pointer for stream");
5692 continue;
5693 }
5694
5695 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5696 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5697 output.buffer, request->input_buffer, frameNumber);
5698 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005699 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005700 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5701 if (rc < 0) {
5702 LOGE("Fail to request on picture channel");
5703 pthread_mutex_unlock(&mMutex);
5704 return rc;
5705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005706 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005707 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5708 assert(NULL != mDepthChannel);
5709 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005710
Emilian Peev7650c122017-01-19 08:24:33 -08005711 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5712 if (rc < 0) {
5713 LOGE("Fail to map on depth buffer");
5714 pthread_mutex_unlock(&mMutex);
5715 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005716 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005717 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005718 } else {
5719 LOGD("snapshot request with buffer %p, frame_number %d",
5720 output.buffer, frameNumber);
5721 if (!request->settings) {
5722 rc = channel->request(output.buffer, frameNumber,
5723 NULL, mPrevParameters, indexUsed);
5724 } else {
5725 rc = channel->request(output.buffer, frameNumber,
5726 NULL, mParameters, indexUsed);
5727 }
5728 if (rc < 0) {
5729 LOGE("Fail to request on picture channel");
5730 pthread_mutex_unlock(&mMutex);
5731 return rc;
5732 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005733
Emilian Peev7650c122017-01-19 08:24:33 -08005734 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5735 uint32_t j = 0;
5736 for (j = 0; j < streamsArray.num_streams; j++) {
5737 if (streamsArray.stream_request[j].streamID == streamId) {
5738 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5739 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5740 else
5741 streamsArray.stream_request[j].buf_index = indexUsed;
5742 break;
5743 }
5744 }
5745 if (j == streamsArray.num_streams) {
5746 LOGE("Did not find matching stream to update index");
5747 assert(0);
5748 }
5749
5750 pendingBufferIter->need_metadata = true;
5751 streams_need_metadata++;
5752 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005753 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005754 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5755 bool needMetadata = false;
5756 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5757 rc = yuvChannel->request(output.buffer, frameNumber,
5758 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5759 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005760 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005761 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005762 pthread_mutex_unlock(&mMutex);
5763 return rc;
5764 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005765
5766 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5767 uint32_t j = 0;
5768 for (j = 0; j < streamsArray.num_streams; j++) {
5769 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005770 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5771 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5772 else
5773 streamsArray.stream_request[j].buf_index = indexUsed;
5774 break;
5775 }
5776 }
5777 if (j == streamsArray.num_streams) {
5778 LOGE("Did not find matching stream to update index");
5779 assert(0);
5780 }
5781
5782 pendingBufferIter->need_metadata = needMetadata;
5783 if (needMetadata)
5784 streams_need_metadata += 1;
5785 LOGD("calling YUV channel request, need_metadata is %d",
5786 needMetadata);
5787 } else {
5788 LOGD("request with buffer %p, frame_number %d",
5789 output.buffer, frameNumber);
5790
5791 rc = channel->request(output.buffer, frameNumber, indexUsed);
5792
5793 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5794 uint32_t j = 0;
5795 for (j = 0; j < streamsArray.num_streams; j++) {
5796 if (streamsArray.stream_request[j].streamID == streamId) {
5797 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5798 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5799 else
5800 streamsArray.stream_request[j].buf_index = indexUsed;
5801 break;
5802 }
5803 }
5804 if (j == streamsArray.num_streams) {
5805 LOGE("Did not find matching stream to update index");
5806 assert(0);
5807 }
5808
5809 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5810 && mBatchSize) {
5811 mToBeQueuedVidBufs++;
5812 if (mToBeQueuedVidBufs == mBatchSize) {
5813 channel->queueBatchBuf();
5814 }
5815 }
5816 if (rc < 0) {
5817 LOGE("request failed");
5818 pthread_mutex_unlock(&mMutex);
5819 return rc;
5820 }
5821 }
5822 pendingBufferIter++;
5823 }
5824
5825 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5826 itr++) {
5827 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5828
5829 if (channel == NULL) {
5830 LOGE("invalid channel pointer for stream");
5831 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005832 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005833 return BAD_VALUE;
5834 }
5835
5836 InternalRequest requestedStream;
5837 requestedStream = (*itr);
5838
5839
5840 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5841 LOGD("snapshot request internally input buffer %p, frame_number %d",
5842 request->input_buffer, frameNumber);
5843 if(request->input_buffer != NULL){
5844 rc = channel->request(NULL, frameNumber,
5845 pInputBuffer, &mReprocMeta, indexUsed, true,
5846 requestedStream.meteringOnly);
5847 if (rc < 0) {
5848 LOGE("Fail to request on picture channel");
5849 pthread_mutex_unlock(&mMutex);
5850 return rc;
5851 }
5852 } else {
5853 LOGD("snapshot request with frame_number %d", frameNumber);
5854 if (!request->settings) {
5855 rc = channel->request(NULL, frameNumber,
5856 NULL, mPrevParameters, indexUsed, true,
5857 requestedStream.meteringOnly);
5858 } else {
5859 rc = channel->request(NULL, frameNumber,
5860 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5861 }
5862 if (rc < 0) {
5863 LOGE("Fail to request on picture channel");
5864 pthread_mutex_unlock(&mMutex);
5865 return rc;
5866 }
5867
5868 if ((*itr).meteringOnly != 1) {
5869 requestedStream.need_metadata = 1;
5870 streams_need_metadata++;
5871 }
5872 }
5873
5874 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5875 uint32_t j = 0;
5876 for (j = 0; j < streamsArray.num_streams; j++) {
5877 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005878 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5879 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5880 else
5881 streamsArray.stream_request[j].buf_index = indexUsed;
5882 break;
5883 }
5884 }
5885 if (j == streamsArray.num_streams) {
5886 LOGE("Did not find matching stream to update index");
5887 assert(0);
5888 }
5889
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005890 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005891 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005892 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005893 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005894 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005895 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005896 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005897 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005898
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005899 //If 2 streams have need_metadata set to true, fail the request, unless
5900 //we copy/reference count the metadata buffer
5901 if (streams_need_metadata > 1) {
5902 LOGE("not supporting request in which two streams requires"
5903 " 2 HAL metadata for reprocessing");
5904 pthread_mutex_unlock(&mMutex);
5905 return -EINVAL;
5906 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005907
Emilian Peev656e4fa2017-06-02 16:47:04 +01005908 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5909 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5910 if (depthRequestPresent && mDepthChannel) {
5911 if (request->settings) {
5912 camera_metadata_ro_entry entry;
5913 if (find_camera_metadata_ro_entry(request->settings,
5914 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5915 if (entry.data.u8[0]) {
5916 pdafEnable = CAM_PD_DATA_ENABLED;
5917 } else {
5918 pdafEnable = CAM_PD_DATA_SKIP;
5919 }
5920 mDepthCloudMode = pdafEnable;
5921 } else {
5922 pdafEnable = mDepthCloudMode;
5923 }
5924 } else {
5925 pdafEnable = mDepthCloudMode;
5926 }
5927 }
5928
Emilian Peev7650c122017-01-19 08:24:33 -08005929 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5930 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5931 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5932 pthread_mutex_unlock(&mMutex);
5933 return BAD_VALUE;
5934 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005935
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005936 if (request->input_buffer == NULL) {
5937 /* Set the parameters to backend:
5938 * - For every request in NORMAL MODE
5939 * - For every request in HFR mode during preview only case
5940 * - Once every batch in HFR mode during video recording
5941 */
5942 if (!mBatchSize ||
5943 (mBatchSize && !isVidBufRequested) ||
5944 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5945 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5946 mBatchSize, isVidBufRequested,
5947 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005948
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005949 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5950 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5951 uint32_t m = 0;
5952 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5953 if (streamsArray.stream_request[k].streamID ==
5954 mBatchedStreamsArray.stream_request[m].streamID)
5955 break;
5956 }
5957 if (m == mBatchedStreamsArray.num_streams) {
5958 mBatchedStreamsArray.stream_request\
5959 [mBatchedStreamsArray.num_streams].streamID =
5960 streamsArray.stream_request[k].streamID;
5961 mBatchedStreamsArray.stream_request\
5962 [mBatchedStreamsArray.num_streams].buf_index =
5963 streamsArray.stream_request[k].buf_index;
5964 mBatchedStreamsArray.num_streams =
5965 mBatchedStreamsArray.num_streams + 1;
5966 }
5967 }
5968 streamsArray = mBatchedStreamsArray;
5969 }
5970 /* Update stream id of all the requested buffers */
5971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5972 streamsArray)) {
5973 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005974 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005975 return BAD_VALUE;
5976 }
5977
5978 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5979 mParameters);
5980 if (rc < 0) {
5981 LOGE("set_parms failed");
5982 }
5983 /* reset to zero coz, the batch is queued */
5984 mToBeQueuedVidBufs = 0;
5985 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5986 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5987 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005988 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5989 uint32_t m = 0;
5990 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5991 if (streamsArray.stream_request[k].streamID ==
5992 mBatchedStreamsArray.stream_request[m].streamID)
5993 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005994 }
5995 if (m == mBatchedStreamsArray.num_streams) {
5996 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5997 streamID = streamsArray.stream_request[k].streamID;
5998 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5999 buf_index = streamsArray.stream_request[k].buf_index;
6000 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
6001 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08006002 }
6003 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006004 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006005
6006 // Start all streams after the first setting is sent, so that the
6007 // setting can be applied sooner: (0 + apply_delay)th frame.
6008 if (mState == CONFIGURED && mChannelHandle) {
6009 //Then start them.
6010 LOGH("Start META Channel");
6011 rc = mMetadataChannel->start();
6012 if (rc < 0) {
6013 LOGE("META channel start failed");
6014 pthread_mutex_unlock(&mMutex);
6015 return rc;
6016 }
6017
6018 if (mAnalysisChannel) {
6019 rc = mAnalysisChannel->start();
6020 if (rc < 0) {
6021 LOGE("Analysis channel start failed");
6022 mMetadataChannel->stop();
6023 pthread_mutex_unlock(&mMutex);
6024 return rc;
6025 }
6026 }
6027
6028 if (mSupportChannel) {
6029 rc = mSupportChannel->start();
6030 if (rc < 0) {
6031 LOGE("Support channel start failed");
6032 mMetadataChannel->stop();
6033 /* Although support and analysis are mutually exclusive today
6034 adding it in anycase for future proofing */
6035 if (mAnalysisChannel) {
6036 mAnalysisChannel->stop();
6037 }
6038 pthread_mutex_unlock(&mMutex);
6039 return rc;
6040 }
6041 }
6042 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6043 it != mStreamInfo.end(); it++) {
6044 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
6045 LOGH("Start Processing Channel mask=%d",
6046 channel->getStreamTypeMask());
6047 rc = channel->start();
6048 if (rc < 0) {
6049 LOGE("channel start failed");
6050 pthread_mutex_unlock(&mMutex);
6051 return rc;
6052 }
6053 }
6054
6055 if (mRawDumpChannel) {
6056 LOGD("Starting raw dump stream");
6057 rc = mRawDumpChannel->start();
6058 if (rc != NO_ERROR) {
6059 LOGE("Error Starting Raw Dump Channel");
6060 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6061 it != mStreamInfo.end(); it++) {
6062 QCamera3Channel *channel =
6063 (QCamera3Channel *)(*it)->stream->priv;
6064 LOGH("Stopping Processing Channel mask=%d",
6065 channel->getStreamTypeMask());
6066 channel->stop();
6067 }
6068 if (mSupportChannel)
6069 mSupportChannel->stop();
6070 if (mAnalysisChannel) {
6071 mAnalysisChannel->stop();
6072 }
6073 mMetadataChannel->stop();
6074 pthread_mutex_unlock(&mMutex);
6075 return rc;
6076 }
6077 }
6078
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006079 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006080 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006081 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006082 if (rc != NO_ERROR) {
6083 LOGE("start_channel failed %d", rc);
6084 pthread_mutex_unlock(&mMutex);
6085 return rc;
6086 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006087
6088 {
6089 // Configure Easel for stream on.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006090 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07006091
6092 // Now that sensor mode should have been selected, get the selected sensor mode
6093 // info.
6094 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6095 getCurrentSensorModeInfo(mSensorModeInfo);
6096
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006097 if (EaselManagerClientOpened) {
6098 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006099 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6100 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006101 if (rc != OK) {
6102 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6103 mCameraId, mSensorModeInfo.op_pixel_clk);
6104 pthread_mutex_unlock(&mMutex);
6105 return rc;
6106 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07006107 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006108 }
6109 }
6110
6111 // Start sensor streaming.
6112 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6113 mChannelHandle);
6114 if (rc != NO_ERROR) {
6115 LOGE("start_sensor_stream_on failed %d", rc);
6116 pthread_mutex_unlock(&mMutex);
6117 return rc;
6118 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006119 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006120 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006121 }
6122
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006123 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chenjie Luo4a761802017-06-13 17:35:54 +00006124 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006125 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006126 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006127 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6128 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6129 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6130 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006131
6132 if (isSessionHdrPlusModeCompatible()) {
6133 rc = enableHdrPlusModeLocked();
6134 if (rc != OK) {
6135 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6136 pthread_mutex_unlock(&mMutex);
6137 return rc;
6138 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006139 }
6140
6141 mFirstPreviewIntentSeen = true;
6142 }
6143 }
6144
Thierry Strudel3d639192016-09-09 11:52:26 -07006145 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6146
6147 mState = STARTED;
6148 // Added a timed condition wait
6149 struct timespec ts;
6150 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006151 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006152 if (rc < 0) {
6153 isValidTimeout = 0;
6154 LOGE("Error reading the real time clock!!");
6155 }
6156 else {
6157 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006158 int64_t timeout = 5;
6159 {
6160 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6161 // If there is a pending HDR+ request, the following requests may be blocked until the
6162 // HDR+ request is done. So allow a longer timeout.
6163 if (mHdrPlusPendingRequests.size() > 0) {
6164 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6165 }
6166 }
6167 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006168 }
6169 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006170 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006171 (mState != ERROR) && (mState != DEINIT)) {
6172 if (!isValidTimeout) {
6173 LOGD("Blocking on conditional wait");
6174 pthread_cond_wait(&mRequestCond, &mMutex);
6175 }
6176 else {
6177 LOGD("Blocking on timed conditional wait");
6178 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6179 if (rc == ETIMEDOUT) {
6180 rc = -ENODEV;
6181 LOGE("Unblocked on timeout!!!!");
6182 break;
6183 }
6184 }
6185 LOGD("Unblocked");
6186 if (mWokenUpByDaemon) {
6187 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006188 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006189 break;
6190 }
6191 }
6192 pthread_mutex_unlock(&mMutex);
6193
6194 return rc;
6195}
6196
6197/*===========================================================================
6198 * FUNCTION : dump
6199 *
6200 * DESCRIPTION:
6201 *
6202 * PARAMETERS :
6203 *
6204 *
6205 * RETURN :
6206 *==========================================================================*/
6207void QCamera3HardwareInterface::dump(int fd)
6208{
6209 pthread_mutex_lock(&mMutex);
6210 dprintf(fd, "\n Camera HAL3 information Begin \n");
6211
6212 dprintf(fd, "\nNumber of pending requests: %zu \n",
6213 mPendingRequestsList.size());
6214 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6215 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6216 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6217 for(pendingRequestIterator i = mPendingRequestsList.begin();
6218 i != mPendingRequestsList.end(); i++) {
6219 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6220 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6221 i->input_buffer);
6222 }
6223 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6224 mPendingBuffersMap.get_num_overall_buffers());
6225 dprintf(fd, "-------+------------------\n");
6226 dprintf(fd, " Frame | Stream type mask \n");
6227 dprintf(fd, "-------+------------------\n");
6228 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6229 for(auto &j : req.mPendingBufferList) {
6230 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6231 dprintf(fd, " %5d | %11d \n",
6232 req.frame_number, channel->getStreamTypeMask());
6233 }
6234 }
6235 dprintf(fd, "-------+------------------\n");
6236
6237 dprintf(fd, "\nPending frame drop list: %zu\n",
6238 mPendingFrameDropList.size());
6239 dprintf(fd, "-------+-----------\n");
6240 dprintf(fd, " Frame | Stream ID \n");
6241 dprintf(fd, "-------+-----------\n");
6242 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6243 i != mPendingFrameDropList.end(); i++) {
6244 dprintf(fd, " %5d | %9d \n",
6245 i->frame_number, i->stream_ID);
6246 }
6247 dprintf(fd, "-------+-----------\n");
6248
6249 dprintf(fd, "\n Camera HAL3 information End \n");
6250
6251 /* use dumpsys media.camera as trigger to send update debug level event */
6252 mUpdateDebugLevel = true;
6253 pthread_mutex_unlock(&mMutex);
6254 return;
6255}
6256
6257/*===========================================================================
6258 * FUNCTION : flush
6259 *
6260 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6261 * conditionally restarts channels
6262 *
6263 * PARAMETERS :
6264 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006265 * @ stopChannelImmediately: stop the channel immediately. This should be used
6266 * when device encountered an error and MIPI may has
6267 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006268 *
6269 * RETURN :
6270 * 0 on success
6271 * Error code on failure
6272 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006273int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006274{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006275 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006276 int32_t rc = NO_ERROR;
6277
6278 LOGD("Unblocking Process Capture Request");
6279 pthread_mutex_lock(&mMutex);
6280 mFlush = true;
6281 pthread_mutex_unlock(&mMutex);
6282
6283 rc = stopAllChannels();
6284 // unlink of dualcam
6285 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006286 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6287 &m_pDualCamCmdPtr->bundle_info;
6288 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006289 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6290 pthread_mutex_lock(&gCamLock);
6291
6292 if (mIsMainCamera == 1) {
6293 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6294 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006295 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006296 // related session id should be session id of linked session
6297 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6298 } else {
6299 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6300 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006301 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006302 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6303 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006304 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006305 pthread_mutex_unlock(&gCamLock);
6306
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006307 rc = mCameraHandle->ops->set_dual_cam_cmd(
6308 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006309 if (rc < 0) {
6310 LOGE("Dualcam: Unlink failed, but still proceed to close");
6311 }
6312 }
6313
6314 if (rc < 0) {
6315 LOGE("stopAllChannels failed");
6316 return rc;
6317 }
6318 if (mChannelHandle) {
6319 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006320 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006321 }
6322
6323 // Reset bundle info
6324 rc = setBundleInfo();
6325 if (rc < 0) {
6326 LOGE("setBundleInfo failed %d", rc);
6327 return rc;
6328 }
6329
6330 // Mutex Lock
6331 pthread_mutex_lock(&mMutex);
6332
6333 // Unblock process_capture_request
6334 mPendingLiveRequest = 0;
6335 pthread_cond_signal(&mRequestCond);
6336
6337 rc = notifyErrorForPendingRequests();
6338 if (rc < 0) {
6339 LOGE("notifyErrorForPendingRequests failed");
6340 pthread_mutex_unlock(&mMutex);
6341 return rc;
6342 }
6343
6344 mFlush = false;
6345
6346 // Start the Streams/Channels
6347 if (restartChannels) {
6348 rc = startAllChannels();
6349 if (rc < 0) {
6350 LOGE("startAllChannels failed");
6351 pthread_mutex_unlock(&mMutex);
6352 return rc;
6353 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006354 if (mChannelHandle) {
6355 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006356 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006357 if (rc < 0) {
6358 LOGE("start_channel failed");
6359 pthread_mutex_unlock(&mMutex);
6360 return rc;
6361 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006362 }
6363 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006364 pthread_mutex_unlock(&mMutex);
6365
6366 return 0;
6367}
6368
6369/*===========================================================================
6370 * FUNCTION : flushPerf
6371 *
6372 * DESCRIPTION: This is the performance optimization version of flush that does
6373 * not use stream off, rather flushes the system
6374 *
6375 * PARAMETERS :
6376 *
6377 *
6378 * RETURN : 0 : success
6379 * -EINVAL: input is malformed (device is not valid)
6380 * -ENODEV: if the device has encountered a serious error
6381 *==========================================================================*/
6382int QCamera3HardwareInterface::flushPerf()
6383{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006384 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006385 int32_t rc = 0;
6386 struct timespec timeout;
6387 bool timed_wait = false;
6388
6389 pthread_mutex_lock(&mMutex);
6390 mFlushPerf = true;
6391 mPendingBuffersMap.numPendingBufsAtFlush =
6392 mPendingBuffersMap.get_num_overall_buffers();
6393 LOGD("Calling flush. Wait for %d buffers to return",
6394 mPendingBuffersMap.numPendingBufsAtFlush);
6395
6396 /* send the flush event to the backend */
6397 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6398 if (rc < 0) {
6399 LOGE("Error in flush: IOCTL failure");
6400 mFlushPerf = false;
6401 pthread_mutex_unlock(&mMutex);
6402 return -ENODEV;
6403 }
6404
6405 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6406 LOGD("No pending buffers in HAL, return flush");
6407 mFlushPerf = false;
6408 pthread_mutex_unlock(&mMutex);
6409 return rc;
6410 }
6411
6412 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006413 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006414 if (rc < 0) {
6415 LOGE("Error reading the real time clock, cannot use timed wait");
6416 } else {
6417 timeout.tv_sec += FLUSH_TIMEOUT;
6418 timed_wait = true;
6419 }
6420
6421 //Block on conditional variable
6422 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6423 LOGD("Waiting on mBuffersCond");
6424 if (!timed_wait) {
6425 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6426 if (rc != 0) {
6427 LOGE("pthread_cond_wait failed due to rc = %s",
6428 strerror(rc));
6429 break;
6430 }
6431 } else {
6432 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6433 if (rc != 0) {
6434 LOGE("pthread_cond_timedwait failed due to rc = %s",
6435 strerror(rc));
6436 break;
6437 }
6438 }
6439 }
6440 if (rc != 0) {
6441 mFlushPerf = false;
6442 pthread_mutex_unlock(&mMutex);
6443 return -ENODEV;
6444 }
6445
6446 LOGD("Received buffers, now safe to return them");
6447
6448 //make sure the channels handle flush
6449 //currently only required for the picture channel to release snapshot resources
6450 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6451 it != mStreamInfo.end(); it++) {
6452 QCamera3Channel *channel = (*it)->channel;
6453 if (channel) {
6454 rc = channel->flush();
6455 if (rc) {
6456 LOGE("Flushing the channels failed with error %d", rc);
6457 // even though the channel flush failed we need to continue and
6458 // return the buffers we have to the framework, however the return
6459 // value will be an error
6460 rc = -ENODEV;
6461 }
6462 }
6463 }
6464
6465 /* notify the frameworks and send errored results */
6466 rc = notifyErrorForPendingRequests();
6467 if (rc < 0) {
6468 LOGE("notifyErrorForPendingRequests failed");
6469 pthread_mutex_unlock(&mMutex);
6470 return rc;
6471 }
6472
6473 //unblock process_capture_request
6474 mPendingLiveRequest = 0;
6475 unblockRequestIfNecessary();
6476
6477 mFlushPerf = false;
6478 pthread_mutex_unlock(&mMutex);
6479 LOGD ("Flush Operation complete. rc = %d", rc);
6480 return rc;
6481}
6482
6483/*===========================================================================
6484 * FUNCTION : handleCameraDeviceError
6485 *
6486 * DESCRIPTION: This function calls internal flush and notifies the error to
6487 * framework and updates the state variable.
6488 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006489 * PARAMETERS :
6490 * @stopChannelImmediately : stop channels immediately without waiting for
6491 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006492 *
6493 * RETURN : NO_ERROR on Success
6494 * Error code on failure
6495 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006496int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006497{
6498 int32_t rc = NO_ERROR;
6499
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006500 {
6501 Mutex::Autolock lock(mFlushLock);
6502 pthread_mutex_lock(&mMutex);
6503 if (mState != ERROR) {
6504 //if mState != ERROR, nothing to be done
6505 pthread_mutex_unlock(&mMutex);
6506 return NO_ERROR;
6507 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006508 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006509
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006510 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006511 if (NO_ERROR != rc) {
6512 LOGE("internal flush to handle mState = ERROR failed");
6513 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006514
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006515 pthread_mutex_lock(&mMutex);
6516 mState = DEINIT;
6517 pthread_mutex_unlock(&mMutex);
6518 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006519
6520 camera3_notify_msg_t notify_msg;
6521 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6522 notify_msg.type = CAMERA3_MSG_ERROR;
6523 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6524 notify_msg.message.error.error_stream = NULL;
6525 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006526 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006527
6528 return rc;
6529}
6530
6531/*===========================================================================
6532 * FUNCTION : captureResultCb
6533 *
6534 * DESCRIPTION: Callback handler for all capture result
6535 * (streams, as well as metadata)
6536 *
6537 * PARAMETERS :
6538 * @metadata : metadata information
6539 * @buffer : actual gralloc buffer to be returned to frameworks.
6540 * NULL if metadata.
6541 *
6542 * RETURN : NONE
6543 *==========================================================================*/
6544void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6545 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6546{
6547 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006548 pthread_mutex_lock(&mMutex);
6549 uint8_t batchSize = mBatchSize;
6550 pthread_mutex_unlock(&mMutex);
6551 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006552 handleBatchMetadata(metadata_buf,
6553 true /* free_and_bufdone_meta_buf */);
6554 } else { /* mBatchSize = 0 */
6555 hdrPlusPerfLock(metadata_buf);
6556 pthread_mutex_lock(&mMutex);
6557 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006558 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006559 true /* last urgent frame of batch metadata */,
6560 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006561 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006562 pthread_mutex_unlock(&mMutex);
6563 }
6564 } else if (isInputBuffer) {
6565 pthread_mutex_lock(&mMutex);
6566 handleInputBufferWithLock(frame_number);
6567 pthread_mutex_unlock(&mMutex);
6568 } else {
6569 pthread_mutex_lock(&mMutex);
6570 handleBufferWithLock(buffer, frame_number);
6571 pthread_mutex_unlock(&mMutex);
6572 }
6573 return;
6574}
6575
6576/*===========================================================================
6577 * FUNCTION : getReprocessibleOutputStreamId
6578 *
6579 * DESCRIPTION: Get source output stream id for the input reprocess stream
6580 * based on size and format, which would be the largest
6581 * output stream if an input stream exists.
6582 *
6583 * PARAMETERS :
6584 * @id : return the stream id if found
6585 *
6586 * RETURN : int32_t type of status
6587 * NO_ERROR -- success
6588 * none-zero failure code
6589 *==========================================================================*/
6590int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6591{
6592 /* check if any output or bidirectional stream with the same size and format
6593 and return that stream */
6594 if ((mInputStreamInfo.dim.width > 0) &&
6595 (mInputStreamInfo.dim.height > 0)) {
6596 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6597 it != mStreamInfo.end(); it++) {
6598
6599 camera3_stream_t *stream = (*it)->stream;
6600 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6601 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6602 (stream->format == mInputStreamInfo.format)) {
6603 // Usage flag for an input stream and the source output stream
6604 // may be different.
6605 LOGD("Found reprocessible output stream! %p", *it);
6606 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6607 stream->usage, mInputStreamInfo.usage);
6608
6609 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6610 if (channel != NULL && channel->mStreams[0]) {
6611 id = channel->mStreams[0]->getMyServerID();
6612 return NO_ERROR;
6613 }
6614 }
6615 }
6616 } else {
6617 LOGD("No input stream, so no reprocessible output stream");
6618 }
6619 return NAME_NOT_FOUND;
6620}
6621
6622/*===========================================================================
6623 * FUNCTION : lookupFwkName
6624 *
6625 * DESCRIPTION: In case the enum is not same in fwk and backend
6626 * make sure the parameter is correctly propogated
6627 *
6628 * PARAMETERS :
6629 * @arr : map between the two enums
6630 * @len : len of the map
6631 * @hal_name : name of the hal_parm to map
6632 *
6633 * RETURN : int type of status
6634 * fwk_name -- success
6635 * none-zero failure code
6636 *==========================================================================*/
6637template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6638 size_t len, halType hal_name)
6639{
6640
6641 for (size_t i = 0; i < len; i++) {
6642 if (arr[i].hal_name == hal_name) {
6643 return arr[i].fwk_name;
6644 }
6645 }
6646
6647 /* Not able to find matching framework type is not necessarily
6648 * an error case. This happens when mm-camera supports more attributes
6649 * than the frameworks do */
6650 LOGH("Cannot find matching framework type");
6651 return NAME_NOT_FOUND;
6652}
6653
6654/*===========================================================================
6655 * FUNCTION : lookupHalName
6656 *
6657 * DESCRIPTION: In case the enum is not same in fwk and backend
6658 * make sure the parameter is correctly propogated
6659 *
6660 * PARAMETERS :
6661 * @arr : map between the two enums
6662 * @len : len of the map
6663 * @fwk_name : name of the hal_parm to map
6664 *
6665 * RETURN : int32_t type of status
6666 * hal_name -- success
6667 * none-zero failure code
6668 *==========================================================================*/
6669template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6670 size_t len, fwkType fwk_name)
6671{
6672 for (size_t i = 0; i < len; i++) {
6673 if (arr[i].fwk_name == fwk_name) {
6674 return arr[i].hal_name;
6675 }
6676 }
6677
6678 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6679 return NAME_NOT_FOUND;
6680}
6681
6682/*===========================================================================
6683 * FUNCTION : lookupProp
6684 *
6685 * DESCRIPTION: lookup a value by its name
6686 *
6687 * PARAMETERS :
6688 * @arr : map between the two enums
6689 * @len : size of the map
6690 * @name : name to be looked up
6691 *
6692 * RETURN : Value if found
6693 * CAM_CDS_MODE_MAX if not found
6694 *==========================================================================*/
6695template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6696 size_t len, const char *name)
6697{
6698 if (name) {
6699 for (size_t i = 0; i < len; i++) {
6700 if (!strcmp(arr[i].desc, name)) {
6701 return arr[i].val;
6702 }
6703 }
6704 }
6705 return CAM_CDS_MODE_MAX;
6706}
6707
6708/*===========================================================================
6709 *
6710 * DESCRIPTION:
6711 *
6712 * PARAMETERS :
6713 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006714 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006715 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006716 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6717 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006718 *
6719 * RETURN : camera_metadata_t*
6720 * metadata in a format specified by fwk
6721 *==========================================================================*/
6722camera_metadata_t*
6723QCamera3HardwareInterface::translateFromHalMetadata(
6724 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006725 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006726 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006727 bool lastMetadataInBatch,
6728 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006729{
6730 CameraMetadata camMetadata;
6731 camera_metadata_t *resultMetadata;
6732
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006733 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006734 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6735 * Timestamp is needed because it's used for shutter notify calculation.
6736 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006737 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006738 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006739 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006740 }
6741
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006742 if (pendingRequest.jpegMetadata.entryCount())
6743 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006744
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006745 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6746 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6747 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6748 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6749 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006750 if (mBatchSize == 0) {
6751 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006752 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006753 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006754
Samuel Ha68ba5172016-12-15 18:41:12 -08006755 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6756 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006757 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006758 // DevCamDebug metadata translateFromHalMetadata AF
6759 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6760 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6761 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6762 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6763 }
6764 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6765 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6766 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6767 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6768 }
6769 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6770 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6771 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6772 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6773 }
6774 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6775 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6776 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6777 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6778 }
6779 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6780 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6781 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6782 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6783 }
6784 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6785 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6786 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6787 *DevCamDebug_af_monitor_pdaf_target_pos;
6788 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6789 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6790 }
6791 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6792 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6793 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6794 *DevCamDebug_af_monitor_pdaf_confidence;
6795 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6796 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6797 }
6798 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6799 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6800 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6801 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6802 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6803 }
6804 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6805 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6806 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6807 *DevCamDebug_af_monitor_tof_target_pos;
6808 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6809 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6810 }
6811 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6812 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6813 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6814 *DevCamDebug_af_monitor_tof_confidence;
6815 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6816 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6817 }
6818 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6819 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6820 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6821 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6822 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6823 }
6824 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6825 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6826 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6827 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6828 &fwk_DevCamDebug_af_monitor_type_select, 1);
6829 }
6830 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6831 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6832 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6833 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6834 &fwk_DevCamDebug_af_monitor_refocus, 1);
6835 }
6836 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6837 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6838 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6839 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6840 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6841 }
6842 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6843 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6844 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6845 *DevCamDebug_af_search_pdaf_target_pos;
6846 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6847 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6848 }
6849 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6850 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6851 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6852 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6853 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6854 }
6855 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6856 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6857 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6858 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6859 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6860 }
6861 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6862 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6863 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6864 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6865 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6866 }
6867 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6868 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6869 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6870 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6871 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6872 }
6873 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6874 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6875 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6876 *DevCamDebug_af_search_tof_target_pos;
6877 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6878 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6879 }
6880 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6881 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6882 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6883 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6884 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6885 }
6886 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6887 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6888 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6889 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6890 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6891 }
6892 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6893 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6894 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6895 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6896 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6897 }
6898 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6899 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6900 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6901 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6902 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6903 }
6904 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6905 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6906 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6907 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6908 &fwk_DevCamDebug_af_search_type_select, 1);
6909 }
6910 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6911 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6912 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6913 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6914 &fwk_DevCamDebug_af_search_next_pos, 1);
6915 }
6916 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6917 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6918 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6919 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6920 &fwk_DevCamDebug_af_search_target_pos, 1);
6921 }
6922 // DevCamDebug metadata translateFromHalMetadata AEC
6923 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6924 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6925 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6926 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6927 }
6928 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6929 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6930 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6931 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6932 }
6933 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6934 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6935 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6936 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6937 }
6938 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6939 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6940 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6941 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6942 }
6943 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6944 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6945 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6946 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6947 }
6948 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6949 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6950 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6951 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6952 }
6953 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6954 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6955 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6956 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6957 }
6958 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6959 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6960 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6961 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6962 }
Samuel Ha34229982017-02-17 13:51:11 -08006963 // DevCamDebug metadata translateFromHalMetadata zzHDR
6964 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6965 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6966 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6967 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6968 }
6969 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6970 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006971 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006972 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6973 }
6974 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6975 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6976 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6977 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6978 }
6979 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6980 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006981 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006982 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6983 }
6984 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6985 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6986 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6987 *DevCamDebug_aec_hdr_sensitivity_ratio;
6988 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6989 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6990 }
6991 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6992 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6993 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6994 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6995 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6996 }
6997 // DevCamDebug metadata translateFromHalMetadata ADRC
6998 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6999 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
7000 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
7001 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
7002 &fwk_DevCamDebug_aec_total_drc_gain, 1);
7003 }
7004 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
7005 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
7006 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
7007 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
7008 &fwk_DevCamDebug_aec_color_drc_gain, 1);
7009 }
7010 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
7011 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
7012 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
7013 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
7014 }
7015 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
7016 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
7017 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
7018 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
7019 }
7020 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
7021 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
7022 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
7023 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
7024 }
7025 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
7026 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
7027 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
7028 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
7029 }
Samuel Habdf4fac2017-07-28 17:21:18 -07007030 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
7031 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
7032 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
7033 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
7034 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
7035 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
7036 }
7037 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
7038 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
7039 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
7040 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
7041 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
7042 }
7043 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
7044 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
7045 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
7046 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
7047 &fwk_DevCamDebug_aec_subject_motion, 1);
7048 }
Samuel Ha68ba5172016-12-15 18:41:12 -08007049 // DevCamDebug metadata translateFromHalMetadata AWB
7050 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
7051 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
7052 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
7053 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7054 }
7055 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7056 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7057 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7058 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7059 }
7060 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7061 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7062 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7063 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7064 }
7065 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7066 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7067 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7068 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7069 }
7070 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7071 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7072 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7073 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7074 }
7075 }
7076 // atrace_end(ATRACE_TAG_ALWAYS);
7077
Thierry Strudel3d639192016-09-09 11:52:26 -07007078 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7079 int64_t fwk_frame_number = *frame_number;
7080 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7081 }
7082
7083 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7084 int32_t fps_range[2];
7085 fps_range[0] = (int32_t)float_range->min_fps;
7086 fps_range[1] = (int32_t)float_range->max_fps;
7087 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7088 fps_range, 2);
7089 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7090 fps_range[0], fps_range[1]);
7091 }
7092
7093 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7094 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7095 }
7096
7097 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7098 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7099 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7100 *sceneMode);
7101 if (NAME_NOT_FOUND != val) {
7102 uint8_t fwkSceneMode = (uint8_t)val;
7103 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7104 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7105 fwkSceneMode);
7106 }
7107 }
7108
7109 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7110 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7111 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7112 }
7113
7114 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7115 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7116 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7117 }
7118
7119 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7120 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7121 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7122 }
7123
7124 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7125 CAM_INTF_META_EDGE_MODE, metadata) {
7126 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7127 }
7128
7129 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7130 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7131 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7132 }
7133
7134 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7135 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7136 }
7137
7138 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7139 if (0 <= *flashState) {
7140 uint8_t fwk_flashState = (uint8_t) *flashState;
7141 if (!gCamCapability[mCameraId]->flash_available) {
7142 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7143 }
7144 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7145 }
7146 }
7147
7148 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7149 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7150 if (NAME_NOT_FOUND != val) {
7151 uint8_t fwk_flashMode = (uint8_t)val;
7152 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7153 }
7154 }
7155
7156 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7157 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7158 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7159 }
7160
7161 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7162 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7163 }
7164
7165 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7166 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7167 }
7168
7169 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7170 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7171 }
7172
7173 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7174 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7175 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7176 }
7177
7178 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7179 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7180 LOGD("fwk_videoStab = %d", fwk_videoStab);
7181 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7182 } else {
7183 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7184 // and so hardcoding the Video Stab result to OFF mode.
7185 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7186 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007187 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007188 }
7189
7190 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7191 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7192 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7193 }
7194
7195 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7196 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7197 }
7198
Thierry Strudel3d639192016-09-09 11:52:26 -07007199 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7200 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007201 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007202
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007203 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7204 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007205
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007206 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007207 blackLevelAppliedPattern->cam_black_level[0],
7208 blackLevelAppliedPattern->cam_black_level[1],
7209 blackLevelAppliedPattern->cam_black_level[2],
7210 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007211 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7212 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007213
7214#ifndef USE_HAL_3_3
7215 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307216 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007217 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307218 fwk_blackLevelInd[0] /= 16.0;
7219 fwk_blackLevelInd[1] /= 16.0;
7220 fwk_blackLevelInd[2] /= 16.0;
7221 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007222 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7223 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007224#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007225 }
7226
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007227#ifndef USE_HAL_3_3
7228 // Fixed whitelevel is used by ISP/Sensor
7229 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7230 &gCamCapability[mCameraId]->white_level, 1);
7231#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007232
7233 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7234 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7235 int32_t scalerCropRegion[4];
7236 scalerCropRegion[0] = hScalerCropRegion->left;
7237 scalerCropRegion[1] = hScalerCropRegion->top;
7238 scalerCropRegion[2] = hScalerCropRegion->width;
7239 scalerCropRegion[3] = hScalerCropRegion->height;
7240
7241 // Adjust crop region from sensor output coordinate system to active
7242 // array coordinate system.
7243 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7244 scalerCropRegion[2], scalerCropRegion[3]);
7245
7246 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7247 }
7248
7249 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7250 LOGD("sensorExpTime = %lld", *sensorExpTime);
7251 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7252 }
7253
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007254 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7255 LOGD("expTimeBoost = %f", *expTimeBoost);
7256 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7257 }
7258
Thierry Strudel3d639192016-09-09 11:52:26 -07007259 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7260 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7261 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7262 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7263 }
7264
7265 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7266 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7267 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7268 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7269 sensorRollingShutterSkew, 1);
7270 }
7271
7272 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7273 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7274 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7275
7276 //calculate the noise profile based on sensitivity
7277 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7278 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7279 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7280 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7281 noise_profile[i] = noise_profile_S;
7282 noise_profile[i+1] = noise_profile_O;
7283 }
7284 LOGD("noise model entry (S, O) is (%f, %f)",
7285 noise_profile_S, noise_profile_O);
7286 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7287 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7288 }
7289
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007290#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007291 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007292 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007293 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007294 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007295 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7296 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7297 }
7298 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007299#endif
7300
Thierry Strudel3d639192016-09-09 11:52:26 -07007301 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7302 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7303 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7304 }
7305
7306 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7307 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7308 *faceDetectMode);
7309 if (NAME_NOT_FOUND != val) {
7310 uint8_t fwk_faceDetectMode = (uint8_t)val;
7311 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7312
7313 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7314 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7315 CAM_INTF_META_FACE_DETECTION, metadata) {
7316 uint8_t numFaces = MIN(
7317 faceDetectionInfo->num_faces_detected, MAX_ROI);
7318 int32_t faceIds[MAX_ROI];
7319 uint8_t faceScores[MAX_ROI];
7320 int32_t faceRectangles[MAX_ROI * 4];
7321 int32_t faceLandmarks[MAX_ROI * 6];
7322 size_t j = 0, k = 0;
7323
7324 for (size_t i = 0; i < numFaces; i++) {
7325 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7326 // Adjust crop region from sensor output coordinate system to active
7327 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007328 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007329 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7330 rect.width, rect.height);
7331
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007332 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007333
Jason Lee8ce36fa2017-04-19 19:40:37 -07007334 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7335 "bottom-right (%d, %d)",
7336 faceDetectionInfo->frame_id, i,
7337 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7338 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7339
Thierry Strudel3d639192016-09-09 11:52:26 -07007340 j+= 4;
7341 }
7342 if (numFaces <= 0) {
7343 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7344 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7345 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7346 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7347 }
7348
7349 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7350 numFaces);
7351 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7352 faceRectangles, numFaces * 4U);
7353 if (fwk_faceDetectMode ==
7354 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7355 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7356 CAM_INTF_META_FACE_LANDMARK, metadata) {
7357
7358 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007359 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007360 // Map the co-ordinate sensor output coordinate system to active
7361 // array coordinate system.
7362 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007363 face_landmarks.left_eye_center.x,
7364 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007365 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007366 face_landmarks.right_eye_center.x,
7367 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007368 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007369 face_landmarks.mouth_center.x,
7370 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007371
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007372 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007373
7374 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7375 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7376 faceDetectionInfo->frame_id, i,
7377 faceLandmarks[k + LEFT_EYE_X],
7378 faceLandmarks[k + LEFT_EYE_Y],
7379 faceLandmarks[k + RIGHT_EYE_X],
7380 faceLandmarks[k + RIGHT_EYE_Y],
7381 faceLandmarks[k + MOUTH_X],
7382 faceLandmarks[k + MOUTH_Y]);
7383
Thierry Strudel04e026f2016-10-10 11:27:36 -07007384 k+= TOTAL_LANDMARK_INDICES;
7385 }
7386 } else {
7387 for (size_t i = 0; i < numFaces; i++) {
7388 setInvalidLandmarks(faceLandmarks+k);
7389 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007390 }
7391 }
7392
Jason Lee49619db2017-04-13 12:07:22 -07007393 for (size_t i = 0; i < numFaces; i++) {
7394 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7395
7396 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7397 faceDetectionInfo->frame_id, i, faceIds[i]);
7398 }
7399
Thierry Strudel3d639192016-09-09 11:52:26 -07007400 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7401 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7402 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007403 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007404 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7405 CAM_INTF_META_FACE_BLINK, metadata) {
7406 uint8_t detected[MAX_ROI];
7407 uint8_t degree[MAX_ROI * 2];
7408 for (size_t i = 0; i < numFaces; i++) {
7409 detected[i] = blinks->blink[i].blink_detected;
7410 degree[2 * i] = blinks->blink[i].left_blink;
7411 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007412
Jason Lee49619db2017-04-13 12:07:22 -07007413 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7414 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7415 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7416 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007417 }
7418 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7419 detected, numFaces);
7420 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7421 degree, numFaces * 2);
7422 }
7423 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7424 CAM_INTF_META_FACE_SMILE, metadata) {
7425 uint8_t degree[MAX_ROI];
7426 uint8_t confidence[MAX_ROI];
7427 for (size_t i = 0; i < numFaces; i++) {
7428 degree[i] = smiles->smile[i].smile_degree;
7429 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007430
Jason Lee49619db2017-04-13 12:07:22 -07007431 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7432 "smile_degree=%d, smile_score=%d",
7433 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007434 }
7435 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7436 degree, numFaces);
7437 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7438 confidence, numFaces);
7439 }
7440 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7441 CAM_INTF_META_FACE_GAZE, metadata) {
7442 int8_t angle[MAX_ROI];
7443 int32_t direction[MAX_ROI * 3];
7444 int8_t degree[MAX_ROI * 2];
7445 for (size_t i = 0; i < numFaces; i++) {
7446 angle[i] = gazes->gaze[i].gaze_angle;
7447 direction[3 * i] = gazes->gaze[i].updown_dir;
7448 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7449 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7450 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7451 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007452
7453 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7454 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7455 "left_right_gaze=%d, top_bottom_gaze=%d",
7456 faceDetectionInfo->frame_id, i, angle[i],
7457 direction[3 * i], direction[3 * i + 1],
7458 direction[3 * i + 2],
7459 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007460 }
7461 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7462 (uint8_t *)angle, numFaces);
7463 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7464 direction, numFaces * 3);
7465 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7466 (uint8_t *)degree, numFaces * 2);
7467 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007468 }
7469 }
7470 }
7471 }
7472
7473 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7474 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007475 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007476 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007477 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007478
Shuzhen Wang14415f52016-11-16 18:26:18 -08007479 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7480 histogramBins = *histBins;
7481 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7482 }
7483
7484 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007485 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7486 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007487 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007488
7489 switch (stats_data->type) {
7490 case CAM_HISTOGRAM_TYPE_BAYER:
7491 switch (stats_data->bayer_stats.data_type) {
7492 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007493 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7494 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007495 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007496 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7497 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007498 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007499 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7500 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007501 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007502 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007503 case CAM_STATS_CHANNEL_R:
7504 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007505 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7506 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007507 }
7508 break;
7509 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007510 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007511 break;
7512 }
7513
Shuzhen Wang14415f52016-11-16 18:26:18 -08007514 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007515 }
7516 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007517 }
7518
7519 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7520 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7521 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7522 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7523 }
7524
7525 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7526 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7527 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7528 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7529 }
7530
7531 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7532 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7533 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7534 CAM_MAX_SHADING_MAP_HEIGHT);
7535 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7536 CAM_MAX_SHADING_MAP_WIDTH);
7537 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7538 lensShadingMap->lens_shading, 4U * map_width * map_height);
7539 }
7540
7541 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7542 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7543 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7544 }
7545
7546 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7547 //Populate CAM_INTF_META_TONEMAP_CURVES
7548 /* ch0 = G, ch 1 = B, ch 2 = R*/
7549 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7550 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7551 tonemap->tonemap_points_cnt,
7552 CAM_MAX_TONEMAP_CURVE_SIZE);
7553 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7554 }
7555
7556 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7557 &tonemap->curves[0].tonemap_points[0][0],
7558 tonemap->tonemap_points_cnt * 2);
7559
7560 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7561 &tonemap->curves[1].tonemap_points[0][0],
7562 tonemap->tonemap_points_cnt * 2);
7563
7564 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7565 &tonemap->curves[2].tonemap_points[0][0],
7566 tonemap->tonemap_points_cnt * 2);
7567 }
7568
7569 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7570 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7571 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7572 CC_GAIN_MAX);
7573 }
7574
7575 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7576 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7577 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7578 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7579 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7580 }
7581
7582 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7583 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7584 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7585 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7586 toneCurve->tonemap_points_cnt,
7587 CAM_MAX_TONEMAP_CURVE_SIZE);
7588 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7589 }
7590 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7591 (float*)toneCurve->curve.tonemap_points,
7592 toneCurve->tonemap_points_cnt * 2);
7593 }
7594
7595 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7596 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7597 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7598 predColorCorrectionGains->gains, 4);
7599 }
7600
7601 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7602 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7603 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7604 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7605 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7606 }
7607
7608 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7609 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7610 }
7611
7612 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7613 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7614 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7615 }
7616
7617 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7618 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7619 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7620 }
7621
7622 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7623 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7624 *effectMode);
7625 if (NAME_NOT_FOUND != val) {
7626 uint8_t fwk_effectMode = (uint8_t)val;
7627 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7628 }
7629 }
7630
7631 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7632 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7633 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7634 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7635 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7636 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7637 }
7638 int32_t fwk_testPatternData[4];
7639 fwk_testPatternData[0] = testPatternData->r;
7640 fwk_testPatternData[3] = testPatternData->b;
7641 switch (gCamCapability[mCameraId]->color_arrangement) {
7642 case CAM_FILTER_ARRANGEMENT_RGGB:
7643 case CAM_FILTER_ARRANGEMENT_GRBG:
7644 fwk_testPatternData[1] = testPatternData->gr;
7645 fwk_testPatternData[2] = testPatternData->gb;
7646 break;
7647 case CAM_FILTER_ARRANGEMENT_GBRG:
7648 case CAM_FILTER_ARRANGEMENT_BGGR:
7649 fwk_testPatternData[2] = testPatternData->gr;
7650 fwk_testPatternData[1] = testPatternData->gb;
7651 break;
7652 default:
7653 LOGE("color arrangement %d is not supported",
7654 gCamCapability[mCameraId]->color_arrangement);
7655 break;
7656 }
7657 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7658 }
7659
7660 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7661 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7662 }
7663
7664 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7665 String8 str((const char *)gps_methods);
7666 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7667 }
7668
7669 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7670 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7671 }
7672
7673 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7674 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7675 }
7676
7677 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7678 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7679 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7680 }
7681
7682 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7683 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7684 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7685 }
7686
7687 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7688 int32_t fwk_thumb_size[2];
7689 fwk_thumb_size[0] = thumb_size->width;
7690 fwk_thumb_size[1] = thumb_size->height;
7691 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7692 }
7693
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007694 // Skip reprocess metadata if there is no input stream.
7695 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7696 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7697 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7698 privateData,
7699 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7700 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007701 }
7702
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007703 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007704 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007705 meteringMode, 1);
7706 }
7707
Thierry Strudel54dc9782017-02-15 12:12:10 -08007708 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7709 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7710 LOGD("hdr_scene_data: %d %f\n",
7711 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7712 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7713 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7714 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7715 &isHdr, 1);
7716 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7717 &isHdrConfidence, 1);
7718 }
7719
7720
7721
Thierry Strudel3d639192016-09-09 11:52:26 -07007722 if (metadata->is_tuning_params_valid) {
7723 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7724 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7725 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7726
7727
7728 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7729 sizeof(uint32_t));
7730 data += sizeof(uint32_t);
7731
7732 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7733 sizeof(uint32_t));
7734 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7735 data += sizeof(uint32_t);
7736
7737 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7738 sizeof(uint32_t));
7739 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7740 data += sizeof(uint32_t);
7741
7742 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7743 sizeof(uint32_t));
7744 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7745 data += sizeof(uint32_t);
7746
7747 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7748 sizeof(uint32_t));
7749 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7750 data += sizeof(uint32_t);
7751
7752 metadata->tuning_params.tuning_mod3_data_size = 0;
7753 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7754 sizeof(uint32_t));
7755 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7756 data += sizeof(uint32_t);
7757
7758 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7759 TUNING_SENSOR_DATA_MAX);
7760 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7761 count);
7762 data += count;
7763
7764 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7765 TUNING_VFE_DATA_MAX);
7766 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7767 count);
7768 data += count;
7769
7770 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7771 TUNING_CPP_DATA_MAX);
7772 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7773 count);
7774 data += count;
7775
7776 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7777 TUNING_CAC_DATA_MAX);
7778 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7779 count);
7780 data += count;
7781
7782 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7783 (int32_t *)(void *)tuning_meta_data_blob,
7784 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7785 }
7786
7787 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7788 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7789 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7790 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7791 NEUTRAL_COL_POINTS);
7792 }
7793
7794 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7795 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7796 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7797 }
7798
7799 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7800 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7801 // Adjust crop region from sensor output coordinate system to active
7802 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007803 cam_rect_t hAeRect = hAeRegions->rect;
7804 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7805 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007806
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007807 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007808 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7809 REGIONS_TUPLE_COUNT);
7810 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7811 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007812 hAeRect.left, hAeRect.top, hAeRect.width,
7813 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007814 }
7815
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007816 if (!pendingRequest.focusStateSent) {
7817 if (pendingRequest.focusStateValid) {
7818 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7819 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007820 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007821 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7822 uint8_t fwk_afState = (uint8_t) *afState;
7823 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7824 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7825 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007826 }
7827 }
7828
Thierry Strudel3d639192016-09-09 11:52:26 -07007829 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7830 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7831 }
7832
7833 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7834 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7835 }
7836
7837 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7838 uint8_t fwk_lensState = *lensState;
7839 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7840 }
7841
Thierry Strudel3d639192016-09-09 11:52:26 -07007842 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007843 uint32_t ab_mode = *hal_ab_mode;
7844 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7845 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7846 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7847 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007848 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007849 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007850 if (NAME_NOT_FOUND != val) {
7851 uint8_t fwk_ab_mode = (uint8_t)val;
7852 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7853 }
7854 }
7855
7856 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7857 int val = lookupFwkName(SCENE_MODES_MAP,
7858 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7859 if (NAME_NOT_FOUND != val) {
7860 uint8_t fwkBestshotMode = (uint8_t)val;
7861 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7862 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7863 } else {
7864 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7865 }
7866 }
7867
7868 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7869 uint8_t fwk_mode = (uint8_t) *mode;
7870 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7871 }
7872
7873 /* Constant metadata values to be update*/
7874 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7875 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7876
7877 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7878 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7879
7880 int32_t hotPixelMap[2];
7881 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7882
7883 // CDS
7884 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7885 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7886 }
7887
Thierry Strudel04e026f2016-10-10 11:27:36 -07007888 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7889 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007890 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007891 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7892 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7893 } else {
7894 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7895 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007896
7897 if(fwk_hdr != curr_hdr_state) {
7898 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7899 if(fwk_hdr)
7900 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7901 else
7902 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7903 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007904 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7905 }
7906
Thierry Strudel54dc9782017-02-15 12:12:10 -08007907 //binning correction
7908 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7909 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7910 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7911 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7912 }
7913
Thierry Strudel04e026f2016-10-10 11:27:36 -07007914 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007915 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007916 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7917 int8_t is_ir_on = 0;
7918
7919 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7920 if(is_ir_on != curr_ir_state) {
7921 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7922 if(is_ir_on)
7923 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7924 else
7925 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7926 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007927 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007928 }
7929
Thierry Strudel269c81a2016-10-12 12:13:59 -07007930 // AEC SPEED
7931 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7932 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7933 }
7934
7935 // AWB SPEED
7936 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7937 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7938 }
7939
Thierry Strudel3d639192016-09-09 11:52:26 -07007940 // TNR
7941 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7942 uint8_t tnr_enable = tnr->denoise_enable;
7943 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007944 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7945 int8_t is_tnr_on = 0;
7946
7947 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7948 if(is_tnr_on != curr_tnr_state) {
7949 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7950 if(is_tnr_on)
7951 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7952 else
7953 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7954 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007955
7956 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7957 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7958 }
7959
7960 // Reprocess crop data
7961 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7962 uint8_t cnt = crop_data->num_of_streams;
7963 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7964 // mm-qcamera-daemon only posts crop_data for streams
7965 // not linked to pproc. So no valid crop metadata is not
7966 // necessarily an error case.
7967 LOGD("No valid crop metadata entries");
7968 } else {
7969 uint32_t reproc_stream_id;
7970 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7971 LOGD("No reprocessible stream found, ignore crop data");
7972 } else {
7973 int rc = NO_ERROR;
7974 Vector<int32_t> roi_map;
7975 int32_t *crop = new int32_t[cnt*4];
7976 if (NULL == crop) {
7977 rc = NO_MEMORY;
7978 }
7979 if (NO_ERROR == rc) {
7980 int32_t streams_found = 0;
7981 for (size_t i = 0; i < cnt; i++) {
7982 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7983 if (pprocDone) {
7984 // HAL already does internal reprocessing,
7985 // either via reprocessing before JPEG encoding,
7986 // or offline postprocessing for pproc bypass case.
7987 crop[0] = 0;
7988 crop[1] = 0;
7989 crop[2] = mInputStreamInfo.dim.width;
7990 crop[3] = mInputStreamInfo.dim.height;
7991 } else {
7992 crop[0] = crop_data->crop_info[i].crop.left;
7993 crop[1] = crop_data->crop_info[i].crop.top;
7994 crop[2] = crop_data->crop_info[i].crop.width;
7995 crop[3] = crop_data->crop_info[i].crop.height;
7996 }
7997 roi_map.add(crop_data->crop_info[i].roi_map.left);
7998 roi_map.add(crop_data->crop_info[i].roi_map.top);
7999 roi_map.add(crop_data->crop_info[i].roi_map.width);
8000 roi_map.add(crop_data->crop_info[i].roi_map.height);
8001 streams_found++;
8002 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
8003 crop[0], crop[1], crop[2], crop[3]);
8004 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
8005 crop_data->crop_info[i].roi_map.left,
8006 crop_data->crop_info[i].roi_map.top,
8007 crop_data->crop_info[i].roi_map.width,
8008 crop_data->crop_info[i].roi_map.height);
8009 break;
8010
8011 }
8012 }
8013 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
8014 &streams_found, 1);
8015 camMetadata.update(QCAMERA3_CROP_REPROCESS,
8016 crop, (size_t)(streams_found * 4));
8017 if (roi_map.array()) {
8018 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
8019 roi_map.array(), roi_map.size());
8020 }
8021 }
8022 if (crop) {
8023 delete [] crop;
8024 }
8025 }
8026 }
8027 }
8028
8029 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8030 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
8031 // so hardcoding the CAC result to OFF mode.
8032 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8033 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
8034 } else {
8035 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
8036 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8037 *cacMode);
8038 if (NAME_NOT_FOUND != val) {
8039 uint8_t resultCacMode = (uint8_t)val;
8040 // check whether CAC result from CB is equal to Framework set CAC mode
8041 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008042 if (pendingRequest.fwkCacMode != resultCacMode) {
8043 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07008044 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08008045 //Check if CAC is disabled by property
8046 if (m_cacModeDisabled) {
8047 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8048 }
8049
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008050 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07008051 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
8052 } else {
8053 LOGE("Invalid CAC camera parameter: %d", *cacMode);
8054 }
8055 }
8056 }
8057
8058 // Post blob of cam_cds_data through vendor tag.
8059 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8060 uint8_t cnt = cdsInfo->num_of_streams;
8061 cam_cds_data_t cdsDataOverride;
8062 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8063 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8064 cdsDataOverride.num_of_streams = 1;
8065 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8066 uint32_t reproc_stream_id;
8067 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8068 LOGD("No reprocessible stream found, ignore cds data");
8069 } else {
8070 for (size_t i = 0; i < cnt; i++) {
8071 if (cdsInfo->cds_info[i].stream_id ==
8072 reproc_stream_id) {
8073 cdsDataOverride.cds_info[0].cds_enable =
8074 cdsInfo->cds_info[i].cds_enable;
8075 break;
8076 }
8077 }
8078 }
8079 } else {
8080 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8081 }
8082 camMetadata.update(QCAMERA3_CDS_INFO,
8083 (uint8_t *)&cdsDataOverride,
8084 sizeof(cam_cds_data_t));
8085 }
8086
8087 // Ldaf calibration data
8088 if (!mLdafCalibExist) {
8089 IF_META_AVAILABLE(uint32_t, ldafCalib,
8090 CAM_INTF_META_LDAF_EXIF, metadata) {
8091 mLdafCalibExist = true;
8092 mLdafCalib[0] = ldafCalib[0];
8093 mLdafCalib[1] = ldafCalib[1];
8094 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8095 ldafCalib[0], ldafCalib[1]);
8096 }
8097 }
8098
Thierry Strudel54dc9782017-02-15 12:12:10 -08008099 // EXIF debug data through vendor tag
8100 /*
8101 * Mobicat Mask can assume 3 values:
8102 * 1 refers to Mobicat data,
8103 * 2 refers to Stats Debug and Exif Debug Data
8104 * 3 refers to Mobicat and Stats Debug Data
8105 * We want to make sure that we are sending Exif debug data
8106 * only when Mobicat Mask is 2.
8107 */
8108 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8109 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8110 (uint8_t *)(void *)mExifParams.debug_params,
8111 sizeof(mm_jpeg_debug_exif_params_t));
8112 }
8113
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008114 // Reprocess and DDM debug data through vendor tag
8115 cam_reprocess_info_t repro_info;
8116 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008117 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8118 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008119 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008120 }
8121 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8122 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008123 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008124 }
8125 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8126 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008127 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008128 }
8129 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8130 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008131 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008132 }
8133 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8134 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008135 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008136 }
8137 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008138 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008139 }
8140 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8141 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008142 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008143 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008144 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8145 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8146 }
8147 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8148 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8149 }
8150 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8151 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008152
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008153 // INSTANT AEC MODE
8154 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8155 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8156 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8157 }
8158
Shuzhen Wange763e802016-03-31 10:24:29 -07008159 // AF scene change
8160 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8161 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8162 }
8163
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008164 // Enable ZSL
8165 if (enableZsl != nullptr) {
8166 uint8_t value = *enableZsl ?
8167 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8168 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8169 }
8170
Xu Han821ea9c2017-05-23 09:00:40 -07008171 // OIS Data
8172 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8173 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8174 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8175 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8176 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8177 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8178 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8179 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8180 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8181 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8182 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
Xue Tu2c3e9142017-08-18 16:23:52 -07008183 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8184 frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8185 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8186 frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
Xu Han821ea9c2017-05-23 09:00:40 -07008187 }
8188
Thierry Strudel3d639192016-09-09 11:52:26 -07008189 resultMetadata = camMetadata.release();
8190 return resultMetadata;
8191}
8192
8193/*===========================================================================
8194 * FUNCTION : saveExifParams
8195 *
8196 * DESCRIPTION:
8197 *
8198 * PARAMETERS :
8199 * @metadata : metadata information from callback
8200 *
8201 * RETURN : none
8202 *
8203 *==========================================================================*/
8204void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8205{
8206 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8207 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8208 if (mExifParams.debug_params) {
8209 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8210 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8211 }
8212 }
8213 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8214 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8215 if (mExifParams.debug_params) {
8216 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8217 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8218 }
8219 }
8220 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8221 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8222 if (mExifParams.debug_params) {
8223 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8224 mExifParams.debug_params->af_debug_params_valid = TRUE;
8225 }
8226 }
8227 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8228 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8229 if (mExifParams.debug_params) {
8230 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8231 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8232 }
8233 }
8234 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8235 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8236 if (mExifParams.debug_params) {
8237 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8238 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8239 }
8240 }
8241 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8242 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8243 if (mExifParams.debug_params) {
8244 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8245 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8246 }
8247 }
8248 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8249 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8250 if (mExifParams.debug_params) {
8251 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8252 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8253 }
8254 }
8255 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8256 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8257 if (mExifParams.debug_params) {
8258 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8259 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8260 }
8261 }
8262}
8263
8264/*===========================================================================
8265 * FUNCTION : get3AExifParams
8266 *
8267 * DESCRIPTION:
8268 *
8269 * PARAMETERS : none
8270 *
8271 *
8272 * RETURN : mm_jpeg_exif_params_t
8273 *
8274 *==========================================================================*/
8275mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8276{
8277 return mExifParams;
8278}
8279
8280/*===========================================================================
8281 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8282 *
8283 * DESCRIPTION:
8284 *
8285 * PARAMETERS :
8286 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008287 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8288 * urgent metadata in a batch. Always true for
8289 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008290 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008291 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8292 * i.e. even though it doesn't map to a valid partial
8293 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008294 * RETURN : camera_metadata_t*
8295 * metadata in a format specified by fwk
8296 *==========================================================================*/
8297camera_metadata_t*
8298QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008299 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008300 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008301{
8302 CameraMetadata camMetadata;
8303 camera_metadata_t *resultMetadata;
8304
Shuzhen Wang485e2442017-08-02 12:21:08 -07008305 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008306 /* In batch mode, use empty metadata if this is not the last in batch
8307 */
8308 resultMetadata = allocate_camera_metadata(0, 0);
8309 return resultMetadata;
8310 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008311
8312 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8313 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8314 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8315 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8316 }
8317
8318 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8319 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8320 &aecTrigger->trigger, 1);
8321 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8322 &aecTrigger->trigger_id, 1);
8323 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8324 aecTrigger->trigger);
8325 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8326 aecTrigger->trigger_id);
8327 }
8328
8329 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8330 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8331 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8332 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8333 }
8334
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008335 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8336 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8337 if (NAME_NOT_FOUND != val) {
8338 uint8_t fwkAfMode = (uint8_t)val;
8339 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8340 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8341 } else {
8342 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8343 val);
8344 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008345 }
8346
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008347 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8348 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8349 af_trigger->trigger);
8350 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8351 af_trigger->trigger_id);
8352
8353 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8354 mAfTrigger = *af_trigger;
8355 uint32_t fwk_AfState = (uint32_t) *afState;
8356
8357 // If this is the result for a new trigger, check if there is new early
8358 // af state. If there is, use the last af state for all results
8359 // preceding current partial frame number.
8360 for (auto & pendingRequest : mPendingRequestsList) {
8361 if (pendingRequest.frame_number < frame_number) {
8362 pendingRequest.focusStateValid = true;
8363 pendingRequest.focusState = fwk_AfState;
8364 } else if (pendingRequest.frame_number == frame_number) {
8365 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8366 // Check if early AF state for trigger exists. If yes, send AF state as
8367 // partial result for better latency.
8368 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8369 pendingRequest.focusStateSent = true;
8370 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8371 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8372 frame_number, fwkEarlyAfState);
8373 }
8374 }
8375 }
8376 }
8377 }
8378 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8379 &mAfTrigger.trigger, 1);
8380 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8381
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008382 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8383 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008384 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008385 int32_t afRegions[REGIONS_TUPLE_COUNT];
8386 // Adjust crop region from sensor output coordinate system to active
8387 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008388 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8389 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008390
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008391 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008392 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8393 REGIONS_TUPLE_COUNT);
8394 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8395 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008396 hAfRect.left, hAfRect.top, hAfRect.width,
8397 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008398 }
8399
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008400 // AF region confidence
8401 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8402 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8403 }
8404
Thierry Strudel3d639192016-09-09 11:52:26 -07008405 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8406 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8407 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8408 if (NAME_NOT_FOUND != val) {
8409 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8410 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8411 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8412 } else {
8413 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8414 }
8415 }
8416
8417 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8418 uint32_t aeMode = CAM_AE_MODE_MAX;
8419 int32_t flashMode = CAM_FLASH_MODE_MAX;
8420 int32_t redeye = -1;
8421 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8422 aeMode = *pAeMode;
8423 }
8424 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8425 flashMode = *pFlashMode;
8426 }
8427 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8428 redeye = *pRedeye;
8429 }
8430
8431 if (1 == redeye) {
8432 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8433 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8434 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8435 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8436 flashMode);
8437 if (NAME_NOT_FOUND != val) {
8438 fwk_aeMode = (uint8_t)val;
8439 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8440 } else {
8441 LOGE("Unsupported flash mode %d", flashMode);
8442 }
8443 } else if (aeMode == CAM_AE_MODE_ON) {
8444 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8445 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8446 } else if (aeMode == CAM_AE_MODE_OFF) {
8447 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8448 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008449 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8450 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8451 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008452 } else {
8453 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8454 "flashMode:%d, aeMode:%u!!!",
8455 redeye, flashMode, aeMode);
8456 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008457 if (mInstantAEC) {
8458 // Increment frame Idx count untill a bound reached for instant AEC.
8459 mInstantAecFrameIdxCount++;
8460 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8461 CAM_INTF_META_AEC_INFO, metadata) {
8462 LOGH("ae_params->settled = %d",ae_params->settled);
8463 // If AEC settled, or if number of frames reached bound value,
8464 // should reset instant AEC.
8465 if (ae_params->settled ||
8466 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8467 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8468 mInstantAEC = false;
8469 mResetInstantAEC = true;
8470 mInstantAecFrameIdxCount = 0;
8471 }
8472 }
8473 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008474 resultMetadata = camMetadata.release();
8475 return resultMetadata;
8476}
8477
8478/*===========================================================================
8479 * FUNCTION : dumpMetadataToFile
8480 *
8481 * DESCRIPTION: Dumps tuning metadata to file system
8482 *
8483 * PARAMETERS :
8484 * @meta : tuning metadata
8485 * @dumpFrameCount : current dump frame count
8486 * @enabled : Enable mask
8487 *
8488 *==========================================================================*/
8489void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8490 uint32_t &dumpFrameCount,
8491 bool enabled,
8492 const char *type,
8493 uint32_t frameNumber)
8494{
8495 //Some sanity checks
8496 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8497 LOGE("Tuning sensor data size bigger than expected %d: %d",
8498 meta.tuning_sensor_data_size,
8499 TUNING_SENSOR_DATA_MAX);
8500 return;
8501 }
8502
8503 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8504 LOGE("Tuning VFE data size bigger than expected %d: %d",
8505 meta.tuning_vfe_data_size,
8506 TUNING_VFE_DATA_MAX);
8507 return;
8508 }
8509
8510 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8511 LOGE("Tuning CPP data size bigger than expected %d: %d",
8512 meta.tuning_cpp_data_size,
8513 TUNING_CPP_DATA_MAX);
8514 return;
8515 }
8516
8517 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8518 LOGE("Tuning CAC data size bigger than expected %d: %d",
8519 meta.tuning_cac_data_size,
8520 TUNING_CAC_DATA_MAX);
8521 return;
8522 }
8523 //
8524
8525 if(enabled){
8526 char timeBuf[FILENAME_MAX];
8527 char buf[FILENAME_MAX];
8528 memset(buf, 0, sizeof(buf));
8529 memset(timeBuf, 0, sizeof(timeBuf));
8530 time_t current_time;
8531 struct tm * timeinfo;
8532 time (&current_time);
8533 timeinfo = localtime (&current_time);
8534 if (timeinfo != NULL) {
8535 strftime (timeBuf, sizeof(timeBuf),
8536 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8537 }
8538 String8 filePath(timeBuf);
8539 snprintf(buf,
8540 sizeof(buf),
8541 "%dm_%s_%d.bin",
8542 dumpFrameCount,
8543 type,
8544 frameNumber);
8545 filePath.append(buf);
8546 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8547 if (file_fd >= 0) {
8548 ssize_t written_len = 0;
8549 meta.tuning_data_version = TUNING_DATA_VERSION;
8550 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8551 written_len += write(file_fd, data, sizeof(uint32_t));
8552 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8553 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8554 written_len += write(file_fd, data, sizeof(uint32_t));
8555 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8556 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8557 written_len += write(file_fd, data, sizeof(uint32_t));
8558 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8559 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8560 written_len += write(file_fd, data, sizeof(uint32_t));
8561 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8562 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8563 written_len += write(file_fd, data, sizeof(uint32_t));
8564 meta.tuning_mod3_data_size = 0;
8565 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8566 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8567 written_len += write(file_fd, data, sizeof(uint32_t));
8568 size_t total_size = meta.tuning_sensor_data_size;
8569 data = (void *)((uint8_t *)&meta.data);
8570 written_len += write(file_fd, data, total_size);
8571 total_size = meta.tuning_vfe_data_size;
8572 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8573 written_len += write(file_fd, data, total_size);
8574 total_size = meta.tuning_cpp_data_size;
8575 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8576 written_len += write(file_fd, data, total_size);
8577 total_size = meta.tuning_cac_data_size;
8578 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8579 written_len += write(file_fd, data, total_size);
8580 close(file_fd);
8581 }else {
8582 LOGE("fail to open file for metadata dumping");
8583 }
8584 }
8585}
8586
8587/*===========================================================================
8588 * FUNCTION : cleanAndSortStreamInfo
8589 *
8590 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8591 * and sort them such that raw stream is at the end of the list
8592 * This is a workaround for camera daemon constraint.
8593 *
8594 * PARAMETERS : None
8595 *
8596 *==========================================================================*/
8597void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8598{
8599 List<stream_info_t *> newStreamInfo;
8600
8601 /*clean up invalid streams*/
8602 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8603 it != mStreamInfo.end();) {
8604 if(((*it)->status) == INVALID){
8605 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8606 delete channel;
8607 free(*it);
8608 it = mStreamInfo.erase(it);
8609 } else {
8610 it++;
8611 }
8612 }
8613
8614 // Move preview/video/callback/snapshot streams into newList
8615 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8616 it != mStreamInfo.end();) {
8617 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8618 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8619 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8620 newStreamInfo.push_back(*it);
8621 it = mStreamInfo.erase(it);
8622 } else
8623 it++;
8624 }
8625 // Move raw streams into newList
8626 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8627 it != mStreamInfo.end();) {
8628 newStreamInfo.push_back(*it);
8629 it = mStreamInfo.erase(it);
8630 }
8631
8632 mStreamInfo = newStreamInfo;
8633}
8634
8635/*===========================================================================
8636 * FUNCTION : extractJpegMetadata
8637 *
8638 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8639 * JPEG metadata is cached in HAL, and return as part of capture
8640 * result when metadata is returned from camera daemon.
8641 *
8642 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8643 * @request: capture request
8644 *
8645 *==========================================================================*/
8646void QCamera3HardwareInterface::extractJpegMetadata(
8647 CameraMetadata& jpegMetadata,
8648 const camera3_capture_request_t *request)
8649{
8650 CameraMetadata frame_settings;
8651 frame_settings = request->settings;
8652
8653 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8654 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8655 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8656 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8657
8658 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8659 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8660 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8661 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8662
8663 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8664 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8665 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8666 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8667
8668 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8669 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8670 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8671 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8672
8673 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8674 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8675 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8676 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8677
8678 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8679 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8680 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8681 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8682
8683 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8684 int32_t thumbnail_size[2];
8685 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8686 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8687 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8688 int32_t orientation =
8689 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008690 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008691 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8692 int32_t temp;
8693 temp = thumbnail_size[0];
8694 thumbnail_size[0] = thumbnail_size[1];
8695 thumbnail_size[1] = temp;
8696 }
8697 }
8698 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8699 thumbnail_size,
8700 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8701 }
8702
8703}
8704
8705/*===========================================================================
8706 * FUNCTION : convertToRegions
8707 *
8708 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8709 *
8710 * PARAMETERS :
8711 * @rect : cam_rect_t struct to convert
8712 * @region : int32_t destination array
8713 * @weight : if we are converting from cam_area_t, weight is valid
8714 * else weight = -1
8715 *
8716 *==========================================================================*/
8717void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8718 int32_t *region, int weight)
8719{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008720 region[FACE_LEFT] = rect.left;
8721 region[FACE_TOP] = rect.top;
8722 region[FACE_RIGHT] = rect.left + rect.width;
8723 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008724 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008725 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008726 }
8727}
8728
8729/*===========================================================================
8730 * FUNCTION : convertFromRegions
8731 *
8732 * DESCRIPTION: helper method to convert from array to cam_rect_t
8733 *
8734 * PARAMETERS :
8735 * @rect : cam_rect_t struct to convert
8736 * @region : int32_t destination array
8737 * @weight : if we are converting from cam_area_t, weight is valid
8738 * else weight = -1
8739 *
8740 *==========================================================================*/
8741void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008742 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008743{
Thierry Strudel3d639192016-09-09 11:52:26 -07008744 int32_t x_min = frame_settings.find(tag).data.i32[0];
8745 int32_t y_min = frame_settings.find(tag).data.i32[1];
8746 int32_t x_max = frame_settings.find(tag).data.i32[2];
8747 int32_t y_max = frame_settings.find(tag).data.i32[3];
8748 roi.weight = frame_settings.find(tag).data.i32[4];
8749 roi.rect.left = x_min;
8750 roi.rect.top = y_min;
8751 roi.rect.width = x_max - x_min;
8752 roi.rect.height = y_max - y_min;
8753}
8754
8755/*===========================================================================
8756 * FUNCTION : resetIfNeededROI
8757 *
8758 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8759 * crop region
8760 *
8761 * PARAMETERS :
8762 * @roi : cam_area_t struct to resize
8763 * @scalerCropRegion : cam_crop_region_t region to compare against
8764 *
8765 *
8766 *==========================================================================*/
8767bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8768 const cam_crop_region_t* scalerCropRegion)
8769{
8770 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8771 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8772 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8773 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8774
8775 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8776 * without having this check the calculations below to validate if the roi
8777 * is inside scalar crop region will fail resulting in the roi not being
8778 * reset causing algorithm to continue to use stale roi window
8779 */
8780 if (roi->weight == 0) {
8781 return true;
8782 }
8783
8784 if ((roi_x_max < scalerCropRegion->left) ||
8785 // right edge of roi window is left of scalar crop's left edge
8786 (roi_y_max < scalerCropRegion->top) ||
8787 // bottom edge of roi window is above scalar crop's top edge
8788 (roi->rect.left > crop_x_max) ||
8789 // left edge of roi window is beyond(right) of scalar crop's right edge
8790 (roi->rect.top > crop_y_max)){
8791 // top edge of roi windo is above scalar crop's top edge
8792 return false;
8793 }
8794 if (roi->rect.left < scalerCropRegion->left) {
8795 roi->rect.left = scalerCropRegion->left;
8796 }
8797 if (roi->rect.top < scalerCropRegion->top) {
8798 roi->rect.top = scalerCropRegion->top;
8799 }
8800 if (roi_x_max > crop_x_max) {
8801 roi_x_max = crop_x_max;
8802 }
8803 if (roi_y_max > crop_y_max) {
8804 roi_y_max = crop_y_max;
8805 }
8806 roi->rect.width = roi_x_max - roi->rect.left;
8807 roi->rect.height = roi_y_max - roi->rect.top;
8808 return true;
8809}
8810
8811/*===========================================================================
8812 * FUNCTION : convertLandmarks
8813 *
8814 * DESCRIPTION: helper method to extract the landmarks from face detection info
8815 *
8816 * PARAMETERS :
8817 * @landmark_data : input landmark data to be converted
8818 * @landmarks : int32_t destination array
8819 *
8820 *
8821 *==========================================================================*/
8822void QCamera3HardwareInterface::convertLandmarks(
8823 cam_face_landmarks_info_t landmark_data,
8824 int32_t *landmarks)
8825{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008826 if (landmark_data.is_left_eye_valid) {
8827 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8828 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8829 } else {
8830 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8831 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8832 }
8833
8834 if (landmark_data.is_right_eye_valid) {
8835 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8836 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8837 } else {
8838 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8839 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8840 }
8841
8842 if (landmark_data.is_mouth_valid) {
8843 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8844 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8845 } else {
8846 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8847 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8848 }
8849}
8850
8851/*===========================================================================
8852 * FUNCTION : setInvalidLandmarks
8853 *
8854 * DESCRIPTION: helper method to set invalid landmarks
8855 *
8856 * PARAMETERS :
8857 * @landmarks : int32_t destination array
8858 *
8859 *
8860 *==========================================================================*/
8861void QCamera3HardwareInterface::setInvalidLandmarks(
8862 int32_t *landmarks)
8863{
8864 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8865 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8866 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8867 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8868 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8869 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008870}
8871
8872#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008873
8874/*===========================================================================
8875 * FUNCTION : getCapabilities
8876 *
8877 * DESCRIPTION: query camera capability from back-end
8878 *
8879 * PARAMETERS :
8880 * @ops : mm-interface ops structure
8881 * @cam_handle : camera handle for which we need capability
8882 *
8883 * RETURN : ptr type of capability structure
8884 * capability for success
8885 * NULL for failure
8886 *==========================================================================*/
8887cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8888 uint32_t cam_handle)
8889{
8890 int rc = NO_ERROR;
8891 QCamera3HeapMemory *capabilityHeap = NULL;
8892 cam_capability_t *cap_ptr = NULL;
8893
8894 if (ops == NULL) {
8895 LOGE("Invalid arguments");
8896 return NULL;
8897 }
8898
8899 capabilityHeap = new QCamera3HeapMemory(1);
8900 if (capabilityHeap == NULL) {
8901 LOGE("creation of capabilityHeap failed");
8902 return NULL;
8903 }
8904
8905 /* Allocate memory for capability buffer */
8906 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8907 if(rc != OK) {
8908 LOGE("No memory for cappability");
8909 goto allocate_failed;
8910 }
8911
8912 /* Map memory for capability buffer */
8913 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8914
8915 rc = ops->map_buf(cam_handle,
8916 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8917 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8918 if(rc < 0) {
8919 LOGE("failed to map capability buffer");
8920 rc = FAILED_TRANSACTION;
8921 goto map_failed;
8922 }
8923
8924 /* Query Capability */
8925 rc = ops->query_capability(cam_handle);
8926 if(rc < 0) {
8927 LOGE("failed to query capability");
8928 rc = FAILED_TRANSACTION;
8929 goto query_failed;
8930 }
8931
8932 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8933 if (cap_ptr == NULL) {
8934 LOGE("out of memory");
8935 rc = NO_MEMORY;
8936 goto query_failed;
8937 }
8938
8939 memset(cap_ptr, 0, sizeof(cam_capability_t));
8940 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8941
8942 int index;
8943 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8944 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8945 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8946 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8947 }
8948
8949query_failed:
8950 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8951map_failed:
8952 capabilityHeap->deallocate();
8953allocate_failed:
8954 delete capabilityHeap;
8955
8956 if (rc != NO_ERROR) {
8957 return NULL;
8958 } else {
8959 return cap_ptr;
8960 }
8961}
8962
Thierry Strudel3d639192016-09-09 11:52:26 -07008963/*===========================================================================
8964 * FUNCTION : initCapabilities
8965 *
8966 * DESCRIPTION: initialize camera capabilities in static data struct
8967 *
8968 * PARAMETERS :
8969 * @cameraId : camera Id
8970 *
8971 * RETURN : int32_t type of status
8972 * NO_ERROR -- success
8973 * none-zero failure code
8974 *==========================================================================*/
8975int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8976{
8977 int rc = 0;
8978 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008979 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008980
8981 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8982 if (rc) {
8983 LOGE("camera_open failed. rc = %d", rc);
8984 goto open_failed;
8985 }
8986 if (!cameraHandle) {
8987 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8988 goto open_failed;
8989 }
8990
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008991 handle = get_main_camera_handle(cameraHandle->camera_handle);
8992 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8993 if (gCamCapability[cameraId] == NULL) {
8994 rc = FAILED_TRANSACTION;
8995 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008996 }
8997
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008998 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008999 if (is_dual_camera_by_idx(cameraId)) {
9000 handle = get_aux_camera_handle(cameraHandle->camera_handle);
9001 gCamCapability[cameraId]->aux_cam_cap =
9002 getCapabilities(cameraHandle->ops, handle);
9003 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
9004 rc = FAILED_TRANSACTION;
9005 free(gCamCapability[cameraId]);
9006 goto failed_op;
9007 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009008
9009 // Copy the main camera capability to main_cam_cap struct
9010 gCamCapability[cameraId]->main_cam_cap =
9011 (cam_capability_t *)malloc(sizeof(cam_capability_t));
9012 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
9013 LOGE("out of memory");
9014 rc = NO_MEMORY;
9015 goto failed_op;
9016 }
9017 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
9018 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07009019 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009020failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07009021 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
9022 cameraHandle = NULL;
9023open_failed:
9024 return rc;
9025}
9026
9027/*==========================================================================
9028 * FUNCTION : get3Aversion
9029 *
9030 * DESCRIPTION: get the Q3A S/W version
9031 *
9032 * PARAMETERS :
9033 * @sw_version: Reference of Q3A structure which will hold version info upon
9034 * return
9035 *
9036 * RETURN : None
9037 *
9038 *==========================================================================*/
9039void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
9040{
9041 if(gCamCapability[mCameraId])
9042 sw_version = gCamCapability[mCameraId]->q3a_version;
9043 else
9044 LOGE("Capability structure NULL!");
9045}
9046
9047
9048/*===========================================================================
9049 * FUNCTION : initParameters
9050 *
9051 * DESCRIPTION: initialize camera parameters
9052 *
9053 * PARAMETERS :
9054 *
9055 * RETURN : int32_t type of status
9056 * NO_ERROR -- success
9057 * none-zero failure code
9058 *==========================================================================*/
9059int QCamera3HardwareInterface::initParameters()
9060{
9061 int rc = 0;
9062
9063 //Allocate Set Param Buffer
9064 mParamHeap = new QCamera3HeapMemory(1);
9065 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9066 if(rc != OK) {
9067 rc = NO_MEMORY;
9068 LOGE("Failed to allocate SETPARM Heap memory");
9069 delete mParamHeap;
9070 mParamHeap = NULL;
9071 return rc;
9072 }
9073
9074 //Map memory for parameters buffer
9075 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9076 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9077 mParamHeap->getFd(0),
9078 sizeof(metadata_buffer_t),
9079 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9080 if(rc < 0) {
9081 LOGE("failed to map SETPARM buffer");
9082 rc = FAILED_TRANSACTION;
9083 mParamHeap->deallocate();
9084 delete mParamHeap;
9085 mParamHeap = NULL;
9086 return rc;
9087 }
9088
9089 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9090
9091 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9092 return rc;
9093}
9094
9095/*===========================================================================
9096 * FUNCTION : deinitParameters
9097 *
9098 * DESCRIPTION: de-initialize camera parameters
9099 *
9100 * PARAMETERS :
9101 *
9102 * RETURN : NONE
9103 *==========================================================================*/
9104void QCamera3HardwareInterface::deinitParameters()
9105{
9106 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9107 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9108
9109 mParamHeap->deallocate();
9110 delete mParamHeap;
9111 mParamHeap = NULL;
9112
9113 mParameters = NULL;
9114
9115 free(mPrevParameters);
9116 mPrevParameters = NULL;
9117}
9118
9119/*===========================================================================
9120 * FUNCTION : calcMaxJpegSize
9121 *
9122 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9123 *
9124 * PARAMETERS :
9125 *
9126 * RETURN : max_jpeg_size
9127 *==========================================================================*/
9128size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9129{
9130 size_t max_jpeg_size = 0;
9131 size_t temp_width, temp_height;
9132 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9133 MAX_SIZES_CNT);
9134 for (size_t i = 0; i < count; i++) {
9135 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9136 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9137 if (temp_width * temp_height > max_jpeg_size ) {
9138 max_jpeg_size = temp_width * temp_height;
9139 }
9140 }
9141 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9142 return max_jpeg_size;
9143}
9144
9145/*===========================================================================
9146 * FUNCTION : getMaxRawSize
9147 *
9148 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9149 *
9150 * PARAMETERS :
9151 *
9152 * RETURN : Largest supported Raw Dimension
9153 *==========================================================================*/
9154cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9155{
9156 int max_width = 0;
9157 cam_dimension_t maxRawSize;
9158
9159 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9160 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9161 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9162 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9163 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9164 }
9165 }
9166 return maxRawSize;
9167}
9168
9169
9170/*===========================================================================
9171 * FUNCTION : calcMaxJpegDim
9172 *
9173 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9174 *
9175 * PARAMETERS :
9176 *
9177 * RETURN : max_jpeg_dim
9178 *==========================================================================*/
9179cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9180{
9181 cam_dimension_t max_jpeg_dim;
9182 cam_dimension_t curr_jpeg_dim;
9183 max_jpeg_dim.width = 0;
9184 max_jpeg_dim.height = 0;
9185 curr_jpeg_dim.width = 0;
9186 curr_jpeg_dim.height = 0;
9187 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9188 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9189 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9190 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9191 max_jpeg_dim.width * max_jpeg_dim.height ) {
9192 max_jpeg_dim.width = curr_jpeg_dim.width;
9193 max_jpeg_dim.height = curr_jpeg_dim.height;
9194 }
9195 }
9196 return max_jpeg_dim;
9197}
9198
9199/*===========================================================================
9200 * FUNCTION : addStreamConfig
9201 *
9202 * DESCRIPTION: adds the stream configuration to the array
9203 *
9204 * PARAMETERS :
9205 * @available_stream_configs : pointer to stream configuration array
9206 * @scalar_format : scalar format
9207 * @dim : configuration dimension
9208 * @config_type : input or output configuration type
9209 *
9210 * RETURN : NONE
9211 *==========================================================================*/
9212void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9213 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9214{
9215 available_stream_configs.add(scalar_format);
9216 available_stream_configs.add(dim.width);
9217 available_stream_configs.add(dim.height);
9218 available_stream_configs.add(config_type);
9219}
9220
9221/*===========================================================================
9222 * FUNCTION : suppportBurstCapture
9223 *
9224 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9225 *
9226 * PARAMETERS :
9227 * @cameraId : camera Id
9228 *
9229 * RETURN : true if camera supports BURST_CAPTURE
9230 * false otherwise
9231 *==========================================================================*/
9232bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9233{
9234 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9235 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9236 const int32_t highResWidth = 3264;
9237 const int32_t highResHeight = 2448;
9238
9239 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9240 // Maximum resolution images cannot be captured at >= 10fps
9241 // -> not supporting BURST_CAPTURE
9242 return false;
9243 }
9244
9245 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9246 // Maximum resolution images can be captured at >= 20fps
9247 // --> supporting BURST_CAPTURE
9248 return true;
9249 }
9250
9251 // Find the smallest highRes resolution, or largest resolution if there is none
9252 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9253 MAX_SIZES_CNT);
9254 size_t highRes = 0;
9255 while ((highRes + 1 < totalCnt) &&
9256 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9257 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9258 highResWidth * highResHeight)) {
9259 highRes++;
9260 }
9261 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9262 return true;
9263 } else {
9264 return false;
9265 }
9266}
9267
9268/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009269 * FUNCTION : getPDStatIndex
9270 *
9271 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9272 *
9273 * PARAMETERS :
9274 * @caps : camera capabilities
9275 *
9276 * RETURN : int32_t type
9277 * non-negative - on success
9278 * -1 - on failure
9279 *==========================================================================*/
9280int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9281 if (nullptr == caps) {
9282 return -1;
9283 }
9284
9285 uint32_t metaRawCount = caps->meta_raw_channel_count;
9286 int32_t ret = -1;
9287 for (size_t i = 0; i < metaRawCount; i++) {
9288 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9289 ret = i;
9290 break;
9291 }
9292 }
9293
9294 return ret;
9295}
9296
9297/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009298 * FUNCTION : initStaticMetadata
9299 *
9300 * DESCRIPTION: initialize the static metadata
9301 *
9302 * PARAMETERS :
9303 * @cameraId : camera Id
9304 *
9305 * RETURN : int32_t type of status
9306 * 0 -- success
9307 * non-zero failure code
9308 *==========================================================================*/
9309int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9310{
9311 int rc = 0;
9312 CameraMetadata staticInfo;
9313 size_t count = 0;
9314 bool limitedDevice = false;
9315 char prop[PROPERTY_VALUE_MAX];
9316 bool supportBurst = false;
9317
9318 supportBurst = supportBurstCapture(cameraId);
9319
9320 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9321 * guaranteed or if min fps of max resolution is less than 20 fps, its
9322 * advertised as limited device*/
9323 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9324 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9325 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9326 !supportBurst;
9327
9328 uint8_t supportedHwLvl = limitedDevice ?
9329 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009330#ifndef USE_HAL_3_3
9331 // LEVEL_3 - This device will support level 3.
9332 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9333#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009334 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009335#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009336
9337 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9338 &supportedHwLvl, 1);
9339
9340 bool facingBack = false;
9341 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9342 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9343 facingBack = true;
9344 }
9345 /*HAL 3 only*/
9346 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9347 &gCamCapability[cameraId]->min_focus_distance, 1);
9348
9349 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9350 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9351
9352 /*should be using focal lengths but sensor doesn't provide that info now*/
9353 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9354 &gCamCapability[cameraId]->focal_length,
9355 1);
9356
9357 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9358 gCamCapability[cameraId]->apertures,
9359 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9360
9361 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9362 gCamCapability[cameraId]->filter_densities,
9363 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9364
9365
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009366 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9367 size_t mode_count =
9368 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9369 for (size_t i = 0; i < mode_count; i++) {
9370 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9371 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009372 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009373 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009374
9375 int32_t lens_shading_map_size[] = {
9376 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9377 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9378 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9379 lens_shading_map_size,
9380 sizeof(lens_shading_map_size)/sizeof(int32_t));
9381
9382 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9383 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9384
9385 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9386 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9387
9388 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9389 &gCamCapability[cameraId]->max_frame_duration, 1);
9390
9391 camera_metadata_rational baseGainFactor = {
9392 gCamCapability[cameraId]->base_gain_factor.numerator,
9393 gCamCapability[cameraId]->base_gain_factor.denominator};
9394 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9395 &baseGainFactor, 1);
9396
9397 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9398 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9399
9400 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9401 gCamCapability[cameraId]->pixel_array_size.height};
9402 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9403 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9404
9405 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9406 gCamCapability[cameraId]->active_array_size.top,
9407 gCamCapability[cameraId]->active_array_size.width,
9408 gCamCapability[cameraId]->active_array_size.height};
9409 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9410 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9411
9412 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9413 &gCamCapability[cameraId]->white_level, 1);
9414
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009415 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9416 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9417 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009418 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009419 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009420
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009421#ifndef USE_HAL_3_3
9422 bool hasBlackRegions = false;
9423 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9424 LOGW("black_region_count: %d is bounded to %d",
9425 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9426 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9427 }
9428 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9429 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9430 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9431 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9432 }
9433 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9434 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9435 hasBlackRegions = true;
9436 }
9437#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009438 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9439 &gCamCapability[cameraId]->flash_charge_duration, 1);
9440
9441 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9442 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9443
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009444 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9445 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9446 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009447 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9448 &timestampSource, 1);
9449
Thierry Strudel54dc9782017-02-15 12:12:10 -08009450 //update histogram vendor data
9451 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009452 &gCamCapability[cameraId]->histogram_size, 1);
9453
Thierry Strudel54dc9782017-02-15 12:12:10 -08009454 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009455 &gCamCapability[cameraId]->max_histogram_count, 1);
9456
Shuzhen Wang14415f52016-11-16 18:26:18 -08009457 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9458 //so that app can request fewer number of bins than the maximum supported.
9459 std::vector<int32_t> histBins;
9460 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9461 histBins.push_back(maxHistBins);
9462 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9463 (maxHistBins & 0x1) == 0) {
9464 histBins.push_back(maxHistBins >> 1);
9465 maxHistBins >>= 1;
9466 }
9467 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9468 histBins.data(), histBins.size());
9469
Thierry Strudel3d639192016-09-09 11:52:26 -07009470 int32_t sharpness_map_size[] = {
9471 gCamCapability[cameraId]->sharpness_map_size.width,
9472 gCamCapability[cameraId]->sharpness_map_size.height};
9473
9474 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9475 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9476
9477 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9478 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9479
Emilian Peev0f3c3162017-03-15 12:57:46 +00009480 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9481 if (0 <= indexPD) {
9482 // Advertise PD stats data as part of the Depth capabilities
9483 int32_t depthWidth =
9484 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9485 int32_t depthHeight =
9486 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009487 int32_t depthStride =
9488 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009489 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9490 assert(0 < depthSamplesCount);
9491 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9492 &depthSamplesCount, 1);
9493
9494 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9495 depthHeight,
9496 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9497 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9498 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9499 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9500 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9501
9502 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9503 depthHeight, 33333333,
9504 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9505 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9506 depthMinDuration,
9507 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9508
9509 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9510 depthHeight, 0,
9511 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9512 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9513 depthStallDuration,
9514 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9515
9516 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9517 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009518
9519 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9520 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9521 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009522 }
9523
Thierry Strudel3d639192016-09-09 11:52:26 -07009524 int32_t scalar_formats[] = {
9525 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9526 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9527 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9528 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9529 HAL_PIXEL_FORMAT_RAW10,
9530 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009531 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9532 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9533 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009534
9535 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9536 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9537 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9538 count, MAX_SIZES_CNT, available_processed_sizes);
9539 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9540 available_processed_sizes, count * 2);
9541
9542 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9543 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9544 makeTable(gCamCapability[cameraId]->raw_dim,
9545 count, MAX_SIZES_CNT, available_raw_sizes);
9546 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9547 available_raw_sizes, count * 2);
9548
9549 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9550 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9551 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9552 count, MAX_SIZES_CNT, available_fps_ranges);
9553 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9554 available_fps_ranges, count * 2);
9555
9556 camera_metadata_rational exposureCompensationStep = {
9557 gCamCapability[cameraId]->exp_compensation_step.numerator,
9558 gCamCapability[cameraId]->exp_compensation_step.denominator};
9559 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9560 &exposureCompensationStep, 1);
9561
9562 Vector<uint8_t> availableVstabModes;
9563 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9564 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009565 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009566 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009567 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009568 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009569 count = IS_TYPE_MAX;
9570 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9571 for (size_t i = 0; i < count; i++) {
9572 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9573 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9574 eisSupported = true;
9575 break;
9576 }
9577 }
9578 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009579 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9580 }
9581 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9582 availableVstabModes.array(), availableVstabModes.size());
9583
9584 /*HAL 1 and HAL 3 common*/
9585 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9586 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9587 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009588 // Cap the max zoom to the max preferred value
9589 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009590 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9591 &maxZoom, 1);
9592
9593 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9594 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9595
9596 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9597 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9598 max3aRegions[2] = 0; /* AF not supported */
9599 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9600 max3aRegions, 3);
9601
9602 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9603 memset(prop, 0, sizeof(prop));
9604 property_get("persist.camera.facedetect", prop, "1");
9605 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9606 LOGD("Support face detection mode: %d",
9607 supportedFaceDetectMode);
9608
9609 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009610 /* support mode should be OFF if max number of face is 0 */
9611 if (maxFaces <= 0) {
9612 supportedFaceDetectMode = 0;
9613 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009614 Vector<uint8_t> availableFaceDetectModes;
9615 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9616 if (supportedFaceDetectMode == 1) {
9617 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9618 } else if (supportedFaceDetectMode == 2) {
9619 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9620 } else if (supportedFaceDetectMode == 3) {
9621 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9622 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9623 } else {
9624 maxFaces = 0;
9625 }
9626 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9627 availableFaceDetectModes.array(),
9628 availableFaceDetectModes.size());
9629 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9630 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009631 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9632 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9633 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009634
9635 int32_t exposureCompensationRange[] = {
9636 gCamCapability[cameraId]->exposure_compensation_min,
9637 gCamCapability[cameraId]->exposure_compensation_max};
9638 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9639 exposureCompensationRange,
9640 sizeof(exposureCompensationRange)/sizeof(int32_t));
9641
9642 uint8_t lensFacing = (facingBack) ?
9643 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9644 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9645
9646 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9647 available_thumbnail_sizes,
9648 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9649
9650 /*all sizes will be clubbed into this tag*/
9651 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9652 /*android.scaler.availableStreamConfigurations*/
9653 Vector<int32_t> available_stream_configs;
9654 cam_dimension_t active_array_dim;
9655 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9656 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009657
9658 /*advertise list of input dimensions supported based on below property.
9659 By default all sizes upto 5MP will be advertised.
9660 Note that the setprop resolution format should be WxH.
9661 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9662 To list all supported sizes, setprop needs to be set with "0x0" */
9663 cam_dimension_t minInputSize = {2592,1944}; //5MP
9664 memset(prop, 0, sizeof(prop));
9665 property_get("persist.camera.input.minsize", prop, "2592x1944");
9666 if (strlen(prop) > 0) {
9667 char *saveptr = NULL;
9668 char *token = strtok_r(prop, "x", &saveptr);
9669 if (token != NULL) {
9670 minInputSize.width = atoi(token);
9671 }
9672 token = strtok_r(NULL, "x", &saveptr);
9673 if (token != NULL) {
9674 minInputSize.height = atoi(token);
9675 }
9676 }
9677
Thierry Strudel3d639192016-09-09 11:52:26 -07009678 /* Add input/output stream configurations for each scalar formats*/
9679 for (size_t j = 0; j < scalar_formats_count; j++) {
9680 switch (scalar_formats[j]) {
9681 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9682 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9683 case HAL_PIXEL_FORMAT_RAW10:
9684 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9685 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9686 addStreamConfig(available_stream_configs, scalar_formats[j],
9687 gCamCapability[cameraId]->raw_dim[i],
9688 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9689 }
9690 break;
9691 case HAL_PIXEL_FORMAT_BLOB:
9692 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9693 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9694 addStreamConfig(available_stream_configs, scalar_formats[j],
9695 gCamCapability[cameraId]->picture_sizes_tbl[i],
9696 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9697 }
9698 break;
9699 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9700 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9701 default:
9702 cam_dimension_t largest_picture_size;
9703 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9704 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9705 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9706 addStreamConfig(available_stream_configs, scalar_formats[j],
9707 gCamCapability[cameraId]->picture_sizes_tbl[i],
9708 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009709 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009710 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9711 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009712 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9713 >= minInputSize.width) || (gCamCapability[cameraId]->
9714 picture_sizes_tbl[i].height >= minInputSize.height)) {
9715 addStreamConfig(available_stream_configs, scalar_formats[j],
9716 gCamCapability[cameraId]->picture_sizes_tbl[i],
9717 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9718 }
9719 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009720 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009721
Thierry Strudel3d639192016-09-09 11:52:26 -07009722 break;
9723 }
9724 }
9725
9726 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9727 available_stream_configs.array(), available_stream_configs.size());
9728 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9729 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9730
9731 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9732 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9733
9734 /* android.scaler.availableMinFrameDurations */
9735 Vector<int64_t> available_min_durations;
9736 for (size_t j = 0; j < scalar_formats_count; j++) {
9737 switch (scalar_formats[j]) {
9738 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9739 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9740 case HAL_PIXEL_FORMAT_RAW10:
9741 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9742 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9743 available_min_durations.add(scalar_formats[j]);
9744 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9745 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9746 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9747 }
9748 break;
9749 default:
9750 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9751 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9752 available_min_durations.add(scalar_formats[j]);
9753 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9754 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9755 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9756 }
9757 break;
9758 }
9759 }
9760 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9761 available_min_durations.array(), available_min_durations.size());
9762
9763 Vector<int32_t> available_hfr_configs;
9764 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9765 int32_t fps = 0;
9766 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9767 case CAM_HFR_MODE_60FPS:
9768 fps = 60;
9769 break;
9770 case CAM_HFR_MODE_90FPS:
9771 fps = 90;
9772 break;
9773 case CAM_HFR_MODE_120FPS:
9774 fps = 120;
9775 break;
9776 case CAM_HFR_MODE_150FPS:
9777 fps = 150;
9778 break;
9779 case CAM_HFR_MODE_180FPS:
9780 fps = 180;
9781 break;
9782 case CAM_HFR_MODE_210FPS:
9783 fps = 210;
9784 break;
9785 case CAM_HFR_MODE_240FPS:
9786 fps = 240;
9787 break;
9788 case CAM_HFR_MODE_480FPS:
9789 fps = 480;
9790 break;
9791 case CAM_HFR_MODE_OFF:
9792 case CAM_HFR_MODE_MAX:
9793 default:
9794 break;
9795 }
9796
9797 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9798 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9799 /* For each HFR frame rate, need to advertise one variable fps range
9800 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9801 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9802 * set by the app. When video recording is started, [120, 120] is
9803 * set. This way sensor configuration does not change when recording
9804 * is started */
9805
9806 /* (width, height, fps_min, fps_max, batch_size_max) */
9807 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9808 j < MAX_SIZES_CNT; j++) {
9809 available_hfr_configs.add(
9810 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9811 available_hfr_configs.add(
9812 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9813 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9814 available_hfr_configs.add(fps);
9815 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9816
9817 /* (width, height, fps_min, fps_max, batch_size_max) */
9818 available_hfr_configs.add(
9819 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9820 available_hfr_configs.add(
9821 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9822 available_hfr_configs.add(fps);
9823 available_hfr_configs.add(fps);
9824 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9825 }
9826 }
9827 }
9828 //Advertise HFR capability only if the property is set
9829 memset(prop, 0, sizeof(prop));
9830 property_get("persist.camera.hal3hfr.enable", prop, "1");
9831 uint8_t hfrEnable = (uint8_t)atoi(prop);
9832
9833 if(hfrEnable && available_hfr_configs.array()) {
9834 staticInfo.update(
9835 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9836 available_hfr_configs.array(), available_hfr_configs.size());
9837 }
9838
9839 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9840 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9841 &max_jpeg_size, 1);
9842
9843 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9844 size_t size = 0;
9845 count = CAM_EFFECT_MODE_MAX;
9846 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9847 for (size_t i = 0; i < count; i++) {
9848 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9849 gCamCapability[cameraId]->supported_effects[i]);
9850 if (NAME_NOT_FOUND != val) {
9851 avail_effects[size] = (uint8_t)val;
9852 size++;
9853 }
9854 }
9855 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9856 avail_effects,
9857 size);
9858
9859 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9860 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9861 size_t supported_scene_modes_cnt = 0;
9862 count = CAM_SCENE_MODE_MAX;
9863 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9864 for (size_t i = 0; i < count; i++) {
9865 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9866 CAM_SCENE_MODE_OFF) {
9867 int val = lookupFwkName(SCENE_MODES_MAP,
9868 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9869 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009870
Thierry Strudel3d639192016-09-09 11:52:26 -07009871 if (NAME_NOT_FOUND != val) {
9872 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9873 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9874 supported_scene_modes_cnt++;
9875 }
9876 }
9877 }
9878 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9879 avail_scene_modes,
9880 supported_scene_modes_cnt);
9881
9882 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9883 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9884 supported_scene_modes_cnt,
9885 CAM_SCENE_MODE_MAX,
9886 scene_mode_overrides,
9887 supported_indexes,
9888 cameraId);
9889
9890 if (supported_scene_modes_cnt == 0) {
9891 supported_scene_modes_cnt = 1;
9892 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9893 }
9894
9895 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9896 scene_mode_overrides, supported_scene_modes_cnt * 3);
9897
9898 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9899 ANDROID_CONTROL_MODE_AUTO,
9900 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9901 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9902 available_control_modes,
9903 3);
9904
9905 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9906 size = 0;
9907 count = CAM_ANTIBANDING_MODE_MAX;
9908 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9909 for (size_t i = 0; i < count; i++) {
9910 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9911 gCamCapability[cameraId]->supported_antibandings[i]);
9912 if (NAME_NOT_FOUND != val) {
9913 avail_antibanding_modes[size] = (uint8_t)val;
9914 size++;
9915 }
9916
9917 }
9918 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9919 avail_antibanding_modes,
9920 size);
9921
9922 uint8_t avail_abberation_modes[] = {
9923 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9924 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9925 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9926 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9927 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9928 if (0 == count) {
9929 // If no aberration correction modes are available for a device, this advertise OFF mode
9930 size = 1;
9931 } else {
9932 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9933 // So, advertize all 3 modes if atleast any one mode is supported as per the
9934 // new M requirement
9935 size = 3;
9936 }
9937 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9938 avail_abberation_modes,
9939 size);
9940
9941 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9942 size = 0;
9943 count = CAM_FOCUS_MODE_MAX;
9944 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9945 for (size_t i = 0; i < count; i++) {
9946 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9947 gCamCapability[cameraId]->supported_focus_modes[i]);
9948 if (NAME_NOT_FOUND != val) {
9949 avail_af_modes[size] = (uint8_t)val;
9950 size++;
9951 }
9952 }
9953 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9954 avail_af_modes,
9955 size);
9956
9957 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9958 size = 0;
9959 count = CAM_WB_MODE_MAX;
9960 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9961 for (size_t i = 0; i < count; i++) {
9962 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9963 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9964 gCamCapability[cameraId]->supported_white_balances[i]);
9965 if (NAME_NOT_FOUND != val) {
9966 avail_awb_modes[size] = (uint8_t)val;
9967 size++;
9968 }
9969 }
9970 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9971 avail_awb_modes,
9972 size);
9973
9974 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9975 count = CAM_FLASH_FIRING_LEVEL_MAX;
9976 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9977 count);
9978 for (size_t i = 0; i < count; i++) {
9979 available_flash_levels[i] =
9980 gCamCapability[cameraId]->supported_firing_levels[i];
9981 }
9982 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9983 available_flash_levels, count);
9984
9985 uint8_t flashAvailable;
9986 if (gCamCapability[cameraId]->flash_available)
9987 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9988 else
9989 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9990 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9991 &flashAvailable, 1);
9992
9993 Vector<uint8_t> avail_ae_modes;
9994 count = CAM_AE_MODE_MAX;
9995 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9996 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009997 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9998 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9999 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
10000 }
10001 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070010002 }
10003 if (flashAvailable) {
10004 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
10005 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
10006 }
10007 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
10008 avail_ae_modes.array(),
10009 avail_ae_modes.size());
10010
10011 int32_t sensitivity_range[2];
10012 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
10013 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
10014 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
10015 sensitivity_range,
10016 sizeof(sensitivity_range) / sizeof(int32_t));
10017
10018 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10019 &gCamCapability[cameraId]->max_analog_sensitivity,
10020 1);
10021
10022 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
10023 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
10024 &sensor_orientation,
10025 1);
10026
10027 int32_t max_output_streams[] = {
10028 MAX_STALLING_STREAMS,
10029 MAX_PROCESSED_STREAMS,
10030 MAX_RAW_STREAMS};
10031 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
10032 max_output_streams,
10033 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
10034
10035 uint8_t avail_leds = 0;
10036 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
10037 &avail_leds, 0);
10038
10039 uint8_t focus_dist_calibrated;
10040 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
10041 gCamCapability[cameraId]->focus_dist_calibrated);
10042 if (NAME_NOT_FOUND != val) {
10043 focus_dist_calibrated = (uint8_t)val;
10044 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10045 &focus_dist_calibrated, 1);
10046 }
10047
10048 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
10049 size = 0;
10050 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
10051 MAX_TEST_PATTERN_CNT);
10052 for (size_t i = 0; i < count; i++) {
10053 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10054 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10055 if (NAME_NOT_FOUND != testpatternMode) {
10056 avail_testpattern_modes[size] = testpatternMode;
10057 size++;
10058 }
10059 }
10060 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10061 avail_testpattern_modes,
10062 size);
10063
10064 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10065 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10066 &max_pipeline_depth,
10067 1);
10068
10069 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10070 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10071 &partial_result_count,
10072 1);
10073
10074 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10075 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10076
10077 Vector<uint8_t> available_capabilities;
10078 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10079 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10080 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10081 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10082 if (supportBurst) {
10083 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10084 }
10085 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10086 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10087 if (hfrEnable && available_hfr_configs.array()) {
10088 available_capabilities.add(
10089 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10090 }
10091
10092 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10093 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10094 }
10095 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10096 available_capabilities.array(),
10097 available_capabilities.size());
10098
10099 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10100 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10101 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10102 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10103
10104 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10105 &aeLockAvailable, 1);
10106
10107 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10108 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10109 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10110 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10111
10112 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10113 &awbLockAvailable, 1);
10114
10115 int32_t max_input_streams = 1;
10116 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10117 &max_input_streams,
10118 1);
10119
10120 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10121 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10122 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10123 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10124 HAL_PIXEL_FORMAT_YCbCr_420_888};
10125 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10126 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10127
10128 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10129 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10130 &max_latency,
10131 1);
10132
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010133#ifndef USE_HAL_3_3
10134 int32_t isp_sensitivity_range[2];
10135 isp_sensitivity_range[0] =
10136 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10137 isp_sensitivity_range[1] =
10138 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10139 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10140 isp_sensitivity_range,
10141 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10142#endif
10143
Thierry Strudel3d639192016-09-09 11:52:26 -070010144 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10145 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10146 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10147 available_hot_pixel_modes,
10148 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10149
10150 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10151 ANDROID_SHADING_MODE_FAST,
10152 ANDROID_SHADING_MODE_HIGH_QUALITY};
10153 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10154 available_shading_modes,
10155 3);
10156
10157 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10158 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10159 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10160 available_lens_shading_map_modes,
10161 2);
10162
10163 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10164 ANDROID_EDGE_MODE_FAST,
10165 ANDROID_EDGE_MODE_HIGH_QUALITY,
10166 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10167 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10168 available_edge_modes,
10169 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10170
10171 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10172 ANDROID_NOISE_REDUCTION_MODE_FAST,
10173 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10174 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10175 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10176 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10177 available_noise_red_modes,
10178 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10179
10180 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10181 ANDROID_TONEMAP_MODE_FAST,
10182 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10183 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10184 available_tonemap_modes,
10185 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10186
10187 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10188 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10189 available_hot_pixel_map_modes,
10190 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10191
10192 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10193 gCamCapability[cameraId]->reference_illuminant1);
10194 if (NAME_NOT_FOUND != val) {
10195 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10196 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10197 }
10198
10199 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10200 gCamCapability[cameraId]->reference_illuminant2);
10201 if (NAME_NOT_FOUND != val) {
10202 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10203 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10204 }
10205
10206 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10207 (void *)gCamCapability[cameraId]->forward_matrix1,
10208 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10209
10210 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10211 (void *)gCamCapability[cameraId]->forward_matrix2,
10212 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10213
10214 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10215 (void *)gCamCapability[cameraId]->color_transform1,
10216 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10217
10218 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10219 (void *)gCamCapability[cameraId]->color_transform2,
10220 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10221
10222 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10223 (void *)gCamCapability[cameraId]->calibration_transform1,
10224 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10225
10226 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10227 (void *)gCamCapability[cameraId]->calibration_transform2,
10228 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10229
10230 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10231 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10232 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10233 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10234 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10235 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10236 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10237 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10238 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10239 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10240 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10241 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10242 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10243 ANDROID_JPEG_GPS_COORDINATES,
10244 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10245 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10246 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10247 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10248 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10249 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10250 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10251 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10252 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10253 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010254#ifndef USE_HAL_3_3
10255 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10256#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010257 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010258 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010259 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10260 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010261 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010262 /* DevCamDebug metadata request_keys_basic */
10263 DEVCAMDEBUG_META_ENABLE,
10264 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010265 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010266 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010267 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010268 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010269 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010270 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010271
10272 size_t request_keys_cnt =
10273 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10274 Vector<int32_t> available_request_keys;
10275 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10276 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10277 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10278 }
10279
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010280 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010281 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010282 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070010283 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
Chien-Yu Chenec328c82017-08-30 16:41:08 -070010284 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010285 }
10286
Thierry Strudel3d639192016-09-09 11:52:26 -070010287 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10288 available_request_keys.array(), available_request_keys.size());
10289
10290 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10291 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10292 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10293 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10294 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10295 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10296 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10297 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10298 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10299 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10300 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10301 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10302 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10303 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10304 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10305 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10306 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010307 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010308 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10309 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10310 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010311 ANDROID_STATISTICS_FACE_SCORES,
10312#ifndef USE_HAL_3_3
10313 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10314#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010315 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010316 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010317 // DevCamDebug metadata result_keys_basic
10318 DEVCAMDEBUG_META_ENABLE,
10319 // DevCamDebug metadata result_keys AF
10320 DEVCAMDEBUG_AF_LENS_POSITION,
10321 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10322 DEVCAMDEBUG_AF_TOF_DISTANCE,
10323 DEVCAMDEBUG_AF_LUMA,
10324 DEVCAMDEBUG_AF_HAF_STATE,
10325 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10326 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10327 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10328 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10329 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10330 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10331 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10332 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10333 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10334 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10335 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10336 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10337 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10338 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10339 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10340 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10341 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10342 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10343 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10344 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10345 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10346 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10347 // DevCamDebug metadata result_keys AEC
10348 DEVCAMDEBUG_AEC_TARGET_LUMA,
10349 DEVCAMDEBUG_AEC_COMP_LUMA,
10350 DEVCAMDEBUG_AEC_AVG_LUMA,
10351 DEVCAMDEBUG_AEC_CUR_LUMA,
10352 DEVCAMDEBUG_AEC_LINECOUNT,
10353 DEVCAMDEBUG_AEC_REAL_GAIN,
10354 DEVCAMDEBUG_AEC_EXP_INDEX,
10355 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010356 // DevCamDebug metadata result_keys zzHDR
10357 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10358 DEVCAMDEBUG_AEC_L_LINECOUNT,
10359 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10360 DEVCAMDEBUG_AEC_S_LINECOUNT,
10361 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10362 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10363 // DevCamDebug metadata result_keys ADRC
10364 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10365 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10366 DEVCAMDEBUG_AEC_GTM_RATIO,
10367 DEVCAMDEBUG_AEC_LTM_RATIO,
10368 DEVCAMDEBUG_AEC_LA_RATIO,
10369 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010370 // DevCamDebug metadata result_keys AEC MOTION
10371 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10372 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10373 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010374 // DevCamDebug metadata result_keys AWB
10375 DEVCAMDEBUG_AWB_R_GAIN,
10376 DEVCAMDEBUG_AWB_G_GAIN,
10377 DEVCAMDEBUG_AWB_B_GAIN,
10378 DEVCAMDEBUG_AWB_CCT,
10379 DEVCAMDEBUG_AWB_DECISION,
10380 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010381 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10382 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10383 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010384 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010385 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010386 };
10387
Thierry Strudel3d639192016-09-09 11:52:26 -070010388 size_t result_keys_cnt =
10389 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10390
10391 Vector<int32_t> available_result_keys;
10392 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10393 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10394 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10395 }
10396 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10397 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10398 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10399 }
10400 if (supportedFaceDetectMode == 1) {
10401 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10402 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10403 } else if ((supportedFaceDetectMode == 2) ||
10404 (supportedFaceDetectMode == 3)) {
10405 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10406 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10407 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010408#ifndef USE_HAL_3_3
10409 if (hasBlackRegions) {
10410 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10411 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10412 }
10413#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010414
10415 if (gExposeEnableZslKey) {
10416 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chendaf68892017-08-25 12:56:40 -070010417 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010418 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
10419 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010420 }
10421
Thierry Strudel3d639192016-09-09 11:52:26 -070010422 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10423 available_result_keys.array(), available_result_keys.size());
10424
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010425 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010426 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10427 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10428 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10429 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10430 ANDROID_SCALER_CROPPING_TYPE,
10431 ANDROID_SYNC_MAX_LATENCY,
10432 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10433 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10434 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10435 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10436 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10437 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10438 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10439 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10440 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10441 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10442 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10443 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10444 ANDROID_LENS_FACING,
10445 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10446 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10447 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10448 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10449 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10450 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10451 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10452 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10453 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10454 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10455 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10456 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10457 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10458 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10459 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10460 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10461 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10462 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10463 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10464 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010465 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010466 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10467 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10468 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10469 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10470 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10471 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10472 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10473 ANDROID_CONTROL_AVAILABLE_MODES,
10474 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10475 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10476 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10477 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010478 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10479#ifndef USE_HAL_3_3
10480 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10481 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10482#endif
10483 };
10484
10485 Vector<int32_t> available_characteristics_keys;
10486 available_characteristics_keys.appendArray(characteristics_keys_basic,
10487 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10488#ifndef USE_HAL_3_3
10489 if (hasBlackRegions) {
10490 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10491 }
10492#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010493
10494 if (0 <= indexPD) {
10495 int32_t depthKeys[] = {
10496 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10497 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10498 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10499 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10500 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10501 };
10502 available_characteristics_keys.appendArray(depthKeys,
10503 sizeof(depthKeys) / sizeof(depthKeys[0]));
10504 }
10505
Thierry Strudel3d639192016-09-09 11:52:26 -070010506 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010507 available_characteristics_keys.array(),
10508 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010509
10510 /*available stall durations depend on the hw + sw and will be different for different devices */
10511 /*have to add for raw after implementation*/
10512 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10513 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10514
10515 Vector<int64_t> available_stall_durations;
10516 for (uint32_t j = 0; j < stall_formats_count; j++) {
10517 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10518 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10519 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10520 available_stall_durations.add(stall_formats[j]);
10521 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10522 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10523 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10524 }
10525 } else {
10526 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10527 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10528 available_stall_durations.add(stall_formats[j]);
10529 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10530 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10531 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10532 }
10533 }
10534 }
10535 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10536 available_stall_durations.array(),
10537 available_stall_durations.size());
10538
10539 //QCAMERA3_OPAQUE_RAW
10540 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10541 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10542 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10543 case LEGACY_RAW:
10544 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10545 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10546 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10547 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10548 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10549 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10550 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10551 break;
10552 case MIPI_RAW:
10553 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10554 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10555 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10556 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10557 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10558 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10559 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10560 break;
10561 default:
10562 LOGE("unknown opaque_raw_format %d",
10563 gCamCapability[cameraId]->opaque_raw_fmt);
10564 break;
10565 }
10566 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10567
10568 Vector<int32_t> strides;
10569 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10570 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10571 cam_stream_buf_plane_info_t buf_planes;
10572 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10573 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10574 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10575 &gCamCapability[cameraId]->padding_info, &buf_planes);
10576 strides.add(buf_planes.plane_info.mp[0].stride);
10577 }
10578 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10579 strides.size());
10580
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010581 //TBD: remove the following line once backend advertises zzHDR in feature mask
10582 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010583 //Video HDR default
10584 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10585 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010586 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010587 int32_t vhdr_mode[] = {
10588 QCAMERA3_VIDEO_HDR_MODE_OFF,
10589 QCAMERA3_VIDEO_HDR_MODE_ON};
10590
10591 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10592 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10593 vhdr_mode, vhdr_mode_count);
10594 }
10595
Thierry Strudel3d639192016-09-09 11:52:26 -070010596 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10597 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10598 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10599
10600 uint8_t isMonoOnly =
10601 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10602 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10603 &isMonoOnly, 1);
10604
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010605#ifndef USE_HAL_3_3
10606 Vector<int32_t> opaque_size;
10607 for (size_t j = 0; j < scalar_formats_count; j++) {
10608 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10609 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10610 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10611 cam_stream_buf_plane_info_t buf_planes;
10612
10613 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10614 &gCamCapability[cameraId]->padding_info, &buf_planes);
10615
10616 if (rc == 0) {
10617 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10618 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10619 opaque_size.add(buf_planes.plane_info.frame_len);
10620 }else {
10621 LOGE("raw frame calculation failed!");
10622 }
10623 }
10624 }
10625 }
10626
10627 if ((opaque_size.size() > 0) &&
10628 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10629 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10630 else
10631 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10632#endif
10633
Thierry Strudel04e026f2016-10-10 11:27:36 -070010634 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10635 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10636 size = 0;
10637 count = CAM_IR_MODE_MAX;
10638 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10639 for (size_t i = 0; i < count; i++) {
10640 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10641 gCamCapability[cameraId]->supported_ir_modes[i]);
10642 if (NAME_NOT_FOUND != val) {
10643 avail_ir_modes[size] = (int32_t)val;
10644 size++;
10645 }
10646 }
10647 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10648 avail_ir_modes, size);
10649 }
10650
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010651 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10652 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10653 size = 0;
10654 count = CAM_AEC_CONVERGENCE_MAX;
10655 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10656 for (size_t i = 0; i < count; i++) {
10657 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10658 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10659 if (NAME_NOT_FOUND != val) {
10660 available_instant_aec_modes[size] = (int32_t)val;
10661 size++;
10662 }
10663 }
10664 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10665 available_instant_aec_modes, size);
10666 }
10667
Thierry Strudel54dc9782017-02-15 12:12:10 -080010668 int32_t sharpness_range[] = {
10669 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10670 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10671 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10672
10673 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10674 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10675 size = 0;
10676 count = CAM_BINNING_CORRECTION_MODE_MAX;
10677 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10678 for (size_t i = 0; i < count; i++) {
10679 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10680 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10681 gCamCapability[cameraId]->supported_binning_modes[i]);
10682 if (NAME_NOT_FOUND != val) {
10683 avail_binning_modes[size] = (int32_t)val;
10684 size++;
10685 }
10686 }
10687 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10688 avail_binning_modes, size);
10689 }
10690
10691 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10692 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10693 size = 0;
10694 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10695 for (size_t i = 0; i < count; i++) {
10696 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10697 gCamCapability[cameraId]->supported_aec_modes[i]);
10698 if (NAME_NOT_FOUND != val)
10699 available_aec_modes[size++] = val;
10700 }
10701 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10702 available_aec_modes, size);
10703 }
10704
10705 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10706 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10707 size = 0;
10708 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10709 for (size_t i = 0; i < count; i++) {
10710 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10711 gCamCapability[cameraId]->supported_iso_modes[i]);
10712 if (NAME_NOT_FOUND != val)
10713 available_iso_modes[size++] = val;
10714 }
10715 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10716 available_iso_modes, size);
10717 }
10718
10719 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010720 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010721 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10722 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10723 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10724
10725 int32_t available_saturation_range[4];
10726 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10727 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10728 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10729 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10730 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10731 available_saturation_range, 4);
10732
10733 uint8_t is_hdr_values[2];
10734 is_hdr_values[0] = 0;
10735 is_hdr_values[1] = 1;
10736 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10737 is_hdr_values, 2);
10738
10739 float is_hdr_confidence_range[2];
10740 is_hdr_confidence_range[0] = 0.0;
10741 is_hdr_confidence_range[1] = 1.0;
10742 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10743 is_hdr_confidence_range, 2);
10744
Emilian Peev0a972ef2017-03-16 10:25:53 +000010745 size_t eepromLength = strnlen(
10746 reinterpret_cast<const char *>(
10747 gCamCapability[cameraId]->eeprom_version_info),
10748 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10749 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010750 char easelInfo[] = ",E:N";
10751 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10752 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10753 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010754 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
Arnd Geis082a4d72017-08-24 10:33:07 -070010755 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010756 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010757 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010758 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10759 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10760 }
10761
Thierry Strudel3d639192016-09-09 11:52:26 -070010762 gStaticMetadata[cameraId] = staticInfo.release();
10763 return rc;
10764}
10765
10766/*===========================================================================
10767 * FUNCTION : makeTable
10768 *
10769 * DESCRIPTION: make a table of sizes
10770 *
10771 * PARAMETERS :
10772 *
10773 *
10774 *==========================================================================*/
10775void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10776 size_t max_size, int32_t *sizeTable)
10777{
10778 size_t j = 0;
10779 if (size > max_size) {
10780 size = max_size;
10781 }
10782 for (size_t i = 0; i < size; i++) {
10783 sizeTable[j] = dimTable[i].width;
10784 sizeTable[j+1] = dimTable[i].height;
10785 j+=2;
10786 }
10787}
10788
10789/*===========================================================================
10790 * FUNCTION : makeFPSTable
10791 *
10792 * DESCRIPTION: make a table of fps ranges
10793 *
10794 * PARAMETERS :
10795 *
10796 *==========================================================================*/
10797void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10798 size_t max_size, int32_t *fpsRangesTable)
10799{
10800 size_t j = 0;
10801 if (size > max_size) {
10802 size = max_size;
10803 }
10804 for (size_t i = 0; i < size; i++) {
10805 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10806 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10807 j+=2;
10808 }
10809}
10810
10811/*===========================================================================
10812 * FUNCTION : makeOverridesList
10813 *
10814 * DESCRIPTION: make a list of scene mode overrides
10815 *
10816 * PARAMETERS :
10817 *
10818 *
10819 *==========================================================================*/
10820void QCamera3HardwareInterface::makeOverridesList(
10821 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10822 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10823{
10824 /*daemon will give a list of overrides for all scene modes.
10825 However we should send the fwk only the overrides for the scene modes
10826 supported by the framework*/
10827 size_t j = 0;
10828 if (size > max_size) {
10829 size = max_size;
10830 }
10831 size_t focus_count = CAM_FOCUS_MODE_MAX;
10832 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10833 focus_count);
10834 for (size_t i = 0; i < size; i++) {
10835 bool supt = false;
10836 size_t index = supported_indexes[i];
10837 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10838 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10839 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10840 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10841 overridesTable[index].awb_mode);
10842 if (NAME_NOT_FOUND != val) {
10843 overridesList[j+1] = (uint8_t)val;
10844 }
10845 uint8_t focus_override = overridesTable[index].af_mode;
10846 for (size_t k = 0; k < focus_count; k++) {
10847 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10848 supt = true;
10849 break;
10850 }
10851 }
10852 if (supt) {
10853 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10854 focus_override);
10855 if (NAME_NOT_FOUND != val) {
10856 overridesList[j+2] = (uint8_t)val;
10857 }
10858 } else {
10859 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10860 }
10861 j+=3;
10862 }
10863}
10864
10865/*===========================================================================
10866 * FUNCTION : filterJpegSizes
10867 *
10868 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10869 * could be downscaled to
10870 *
10871 * PARAMETERS :
10872 *
10873 * RETURN : length of jpegSizes array
10874 *==========================================================================*/
10875
10876size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10877 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10878 uint8_t downscale_factor)
10879{
10880 if (0 == downscale_factor) {
10881 downscale_factor = 1;
10882 }
10883
10884 int32_t min_width = active_array_size.width / downscale_factor;
10885 int32_t min_height = active_array_size.height / downscale_factor;
10886 size_t jpegSizesCnt = 0;
10887 if (processedSizesCnt > maxCount) {
10888 processedSizesCnt = maxCount;
10889 }
10890 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10891 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10892 jpegSizes[jpegSizesCnt] = processedSizes[i];
10893 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10894 jpegSizesCnt += 2;
10895 }
10896 }
10897 return jpegSizesCnt;
10898}
10899
10900/*===========================================================================
10901 * FUNCTION : computeNoiseModelEntryS
10902 *
10903 * DESCRIPTION: function to map a given sensitivity to the S noise
10904 * model parameters in the DNG noise model.
10905 *
10906 * PARAMETERS : sens : the sensor sensitivity
10907 *
10908 ** RETURN : S (sensor amplification) noise
10909 *
10910 *==========================================================================*/
10911double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10912 double s = gCamCapability[mCameraId]->gradient_S * sens +
10913 gCamCapability[mCameraId]->offset_S;
10914 return ((s < 0.0) ? 0.0 : s);
10915}
10916
10917/*===========================================================================
10918 * FUNCTION : computeNoiseModelEntryO
10919 *
10920 * DESCRIPTION: function to map a given sensitivity to the O noise
10921 * model parameters in the DNG noise model.
10922 *
10923 * PARAMETERS : sens : the sensor sensitivity
10924 *
10925 ** RETURN : O (sensor readout) noise
10926 *
10927 *==========================================================================*/
10928double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10929 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10930 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10931 1.0 : (1.0 * sens / max_analog_sens);
10932 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10933 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10934 return ((o < 0.0) ? 0.0 : o);
10935}
10936
10937/*===========================================================================
10938 * FUNCTION : getSensorSensitivity
10939 *
10940 * DESCRIPTION: convert iso_mode to an integer value
10941 *
10942 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10943 *
10944 ** RETURN : sensitivity supported by sensor
10945 *
10946 *==========================================================================*/
10947int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10948{
10949 int32_t sensitivity;
10950
10951 switch (iso_mode) {
10952 case CAM_ISO_MODE_100:
10953 sensitivity = 100;
10954 break;
10955 case CAM_ISO_MODE_200:
10956 sensitivity = 200;
10957 break;
10958 case CAM_ISO_MODE_400:
10959 sensitivity = 400;
10960 break;
10961 case CAM_ISO_MODE_800:
10962 sensitivity = 800;
10963 break;
10964 case CAM_ISO_MODE_1600:
10965 sensitivity = 1600;
10966 break;
10967 default:
10968 sensitivity = -1;
10969 break;
10970 }
10971 return sensitivity;
10972}
10973
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010974int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010975 if (gEaselManagerClient == nullptr) {
10976 gEaselManagerClient = EaselManagerClient::create();
10977 if (gEaselManagerClient == nullptr) {
10978 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10979 return -ENODEV;
10980 }
10981 }
10982
10983 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010984 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10985 // to connect to Easel.
10986 bool doNotpowerOnEasel =
10987 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10988
10989 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010990 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10991 return OK;
10992 }
10993
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010994 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010995 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010996 if (res != OK) {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070010997 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
10998 res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010999 return res;
11000 }
11001
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011002 EaselManagerClientOpened = true;
11003
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011004 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011005 if (res != OK) {
11006 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
11007 }
11008
Chien-Yu Chen4d752e32017-06-07 12:13:24 -070011009 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", true);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070011010 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070011011 gEnableMultipleHdrplusOutputs =
11012 property_get_bool("persist.camera.hdrplus.multiple_outputs", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011013
11014 // Expose enableZsl key only when HDR+ mode is enabled.
11015 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011016 }
11017
11018 return OK;
11019}
11020
Thierry Strudel3d639192016-09-09 11:52:26 -070011021/*===========================================================================
11022 * FUNCTION : getCamInfo
11023 *
11024 * DESCRIPTION: query camera capabilities
11025 *
11026 * PARAMETERS :
11027 * @cameraId : camera Id
11028 * @info : camera info struct to be filled in with camera capabilities
11029 *
11030 * RETURN : int type of status
11031 * NO_ERROR -- success
11032 * none-zero failure code
11033 *==========================================================================*/
11034int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
11035 struct camera_info *info)
11036{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011037 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070011038 int rc = 0;
11039
11040 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070011041
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011042 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070011043 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011044 rc = initHdrPlusClientLocked();
11045 if (rc != OK) {
11046 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
11047 pthread_mutex_unlock(&gCamLock);
11048 return rc;
11049 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070011050 }
11051
Thierry Strudel3d639192016-09-09 11:52:26 -070011052 if (NULL == gCamCapability[cameraId]) {
11053 rc = initCapabilities(cameraId);
11054 if (rc < 0) {
11055 pthread_mutex_unlock(&gCamLock);
11056 return rc;
11057 }
11058 }
11059
11060 if (NULL == gStaticMetadata[cameraId]) {
11061 rc = initStaticMetadata(cameraId);
11062 if (rc < 0) {
11063 pthread_mutex_unlock(&gCamLock);
11064 return rc;
11065 }
11066 }
11067
11068 switch(gCamCapability[cameraId]->position) {
11069 case CAM_POSITION_BACK:
11070 case CAM_POSITION_BACK_AUX:
11071 info->facing = CAMERA_FACING_BACK;
11072 break;
11073
11074 case CAM_POSITION_FRONT:
11075 case CAM_POSITION_FRONT_AUX:
11076 info->facing = CAMERA_FACING_FRONT;
11077 break;
11078
11079 default:
11080 LOGE("Unknown position type %d for camera id:%d",
11081 gCamCapability[cameraId]->position, cameraId);
11082 rc = -1;
11083 break;
11084 }
11085
11086
11087 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011088#ifndef USE_HAL_3_3
11089 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11090#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011091 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011092#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011093 info->static_camera_characteristics = gStaticMetadata[cameraId];
11094
11095 //For now assume both cameras can operate independently.
11096 info->conflicting_devices = NULL;
11097 info->conflicting_devices_length = 0;
11098
11099 //resource cost is 100 * MIN(1.0, m/M),
11100 //where m is throughput requirement with maximum stream configuration
11101 //and M is CPP maximum throughput.
11102 float max_fps = 0.0;
11103 for (uint32_t i = 0;
11104 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11105 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11106 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11107 }
11108 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11109 gCamCapability[cameraId]->active_array_size.width *
11110 gCamCapability[cameraId]->active_array_size.height * max_fps /
11111 gCamCapability[cameraId]->max_pixel_bandwidth;
11112 info->resource_cost = 100 * MIN(1.0, ratio);
11113 LOGI("camera %d resource cost is %d", cameraId,
11114 info->resource_cost);
11115
11116 pthread_mutex_unlock(&gCamLock);
11117 return rc;
11118}
11119
11120/*===========================================================================
11121 * FUNCTION : translateCapabilityToMetadata
11122 *
11123 * DESCRIPTION: translate the capability into camera_metadata_t
11124 *
11125 * PARAMETERS : type of the request
11126 *
11127 *
11128 * RETURN : success: camera_metadata_t*
11129 * failure: NULL
11130 *
11131 *==========================================================================*/
11132camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11133{
11134 if (mDefaultMetadata[type] != NULL) {
11135 return mDefaultMetadata[type];
11136 }
11137 //first time we are handling this request
11138 //fill up the metadata structure using the wrapper class
11139 CameraMetadata settings;
11140 //translate from cam_capability_t to camera_metadata_tag_t
11141 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11142 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11143 int32_t defaultRequestID = 0;
11144 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11145
11146 /* OIS disable */
11147 char ois_prop[PROPERTY_VALUE_MAX];
11148 memset(ois_prop, 0, sizeof(ois_prop));
11149 property_get("persist.camera.ois.disable", ois_prop, "0");
11150 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11151
11152 /* Force video to use OIS */
11153 char videoOisProp[PROPERTY_VALUE_MAX];
11154 memset(videoOisProp, 0, sizeof(videoOisProp));
11155 property_get("persist.camera.ois.video", videoOisProp, "1");
11156 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011157
11158 // Hybrid AE enable/disable
11159 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11160 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11161 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011162 uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011163
Thierry Strudel3d639192016-09-09 11:52:26 -070011164 uint8_t controlIntent = 0;
11165 uint8_t focusMode;
11166 uint8_t vsMode;
11167 uint8_t optStabMode;
11168 uint8_t cacMode;
11169 uint8_t edge_mode;
11170 uint8_t noise_red_mode;
11171 uint8_t tonemap_mode;
11172 bool highQualityModeEntryAvailable = FALSE;
11173 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011174 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011175 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11176 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011177 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011178 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011179 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011180
Thierry Strudel3d639192016-09-09 11:52:26 -070011181 switch (type) {
11182 case CAMERA3_TEMPLATE_PREVIEW:
11183 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11184 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11185 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11186 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11187 edge_mode = ANDROID_EDGE_MODE_FAST;
11188 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11189 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11190 break;
11191 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11192 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11193 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11194 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11195 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11196 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11197 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11198 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11199 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11200 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11201 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11202 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11203 highQualityModeEntryAvailable = TRUE;
11204 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11205 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11206 fastModeEntryAvailable = TRUE;
11207 }
11208 }
11209 if (highQualityModeEntryAvailable) {
11210 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11211 } else if (fastModeEntryAvailable) {
11212 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11213 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011214 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11215 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11216 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011217 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011218 break;
11219 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11220 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11221 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11222 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011223 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11224 edge_mode = ANDROID_EDGE_MODE_FAST;
11225 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11226 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11227 if (forceVideoOis)
11228 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11229 break;
11230 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11231 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11232 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11233 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011234 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11235 edge_mode = ANDROID_EDGE_MODE_FAST;
11236 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11237 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11238 if (forceVideoOis)
11239 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11240 break;
11241 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11242 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11243 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11244 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11245 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11246 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11247 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11248 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11249 break;
11250 case CAMERA3_TEMPLATE_MANUAL:
11251 edge_mode = ANDROID_EDGE_MODE_FAST;
11252 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11253 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11254 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11255 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11256 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11257 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11258 break;
11259 default:
11260 edge_mode = ANDROID_EDGE_MODE_FAST;
11261 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11262 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11263 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11264 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11265 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11266 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11267 break;
11268 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011269 // Set CAC to OFF if underlying device doesn't support
11270 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11271 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11272 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011273 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11274 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11275 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11276 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11277 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11278 }
11279 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011280 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011281 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011282
11283 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11284 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11285 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11286 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11287 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11288 || ois_disable)
11289 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11290 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011291 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011292
11293 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11294 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11295
11296 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11297 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11298
11299 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11300 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11301
11302 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11303 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11304
11305 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11306 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11307
11308 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11309 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11310
11311 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11312 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11313
11314 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11315 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11316
11317 /*flash*/
11318 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11319 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11320
11321 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11322 settings.update(ANDROID_FLASH_FIRING_POWER,
11323 &flashFiringLevel, 1);
11324
11325 /* lens */
11326 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11327 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11328
11329 if (gCamCapability[mCameraId]->filter_densities_count) {
11330 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11331 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11332 gCamCapability[mCameraId]->filter_densities_count);
11333 }
11334
11335 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11336 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11337
Thierry Strudel3d639192016-09-09 11:52:26 -070011338 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11339 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11340
11341 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11342 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11343
11344 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11345 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11346
11347 /* face detection (default to OFF) */
11348 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11349 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11350
Thierry Strudel54dc9782017-02-15 12:12:10 -080011351 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11352 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011353
11354 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11355 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11356
11357 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11358 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11359
Thierry Strudel3d639192016-09-09 11:52:26 -070011360
11361 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11362 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11363
11364 /* Exposure time(Update the Min Exposure Time)*/
11365 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11366 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11367
11368 /* frame duration */
11369 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11370 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11371
11372 /* sensitivity */
11373 static const int32_t default_sensitivity = 100;
11374 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011375#ifndef USE_HAL_3_3
11376 static const int32_t default_isp_sensitivity =
11377 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11378 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11379#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011380
11381 /*edge mode*/
11382 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11383
11384 /*noise reduction mode*/
11385 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11386
11387 /*color correction mode*/
11388 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11389 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11390
11391 /*transform matrix mode*/
11392 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11393
11394 int32_t scaler_crop_region[4];
11395 scaler_crop_region[0] = 0;
11396 scaler_crop_region[1] = 0;
11397 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11398 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11399 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11400
11401 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11402 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11403
11404 /*focus distance*/
11405 float focus_distance = 0.0;
11406 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11407
11408 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011409 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011410 float max_range = 0.0;
11411 float max_fixed_fps = 0.0;
11412 int32_t fps_range[2] = {0, 0};
11413 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11414 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011415 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11416 TEMPLATE_MAX_PREVIEW_FPS) {
11417 continue;
11418 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011419 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11420 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11421 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11422 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11423 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11424 if (range > max_range) {
11425 fps_range[0] =
11426 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11427 fps_range[1] =
11428 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11429 max_range = range;
11430 }
11431 } else {
11432 if (range < 0.01 && max_fixed_fps <
11433 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11434 fps_range[0] =
11435 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11436 fps_range[1] =
11437 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11438 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11439 }
11440 }
11441 }
11442 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11443
11444 /*precapture trigger*/
11445 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11446 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11447
11448 /*af trigger*/
11449 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11450 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11451
11452 /* ae & af regions */
11453 int32_t active_region[] = {
11454 gCamCapability[mCameraId]->active_array_size.left,
11455 gCamCapability[mCameraId]->active_array_size.top,
11456 gCamCapability[mCameraId]->active_array_size.left +
11457 gCamCapability[mCameraId]->active_array_size.width,
11458 gCamCapability[mCameraId]->active_array_size.top +
11459 gCamCapability[mCameraId]->active_array_size.height,
11460 0};
11461 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11462 sizeof(active_region) / sizeof(active_region[0]));
11463 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11464 sizeof(active_region) / sizeof(active_region[0]));
11465
11466 /* black level lock */
11467 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11468 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11469
Thierry Strudel3d639192016-09-09 11:52:26 -070011470 //special defaults for manual template
11471 if (type == CAMERA3_TEMPLATE_MANUAL) {
11472 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11473 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11474
11475 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11476 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11477
11478 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11479 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11480
11481 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11482 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11483
11484 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11485 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11486
11487 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11488 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11489 }
11490
11491
11492 /* TNR
11493 * We'll use this location to determine which modes TNR will be set.
11494 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11495 * This is not to be confused with linking on a per stream basis that decision
11496 * is still on per-session basis and will be handled as part of config stream
11497 */
11498 uint8_t tnr_enable = 0;
11499
11500 if (m_bTnrPreview || m_bTnrVideo) {
11501
11502 switch (type) {
11503 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11504 tnr_enable = 1;
11505 break;
11506
11507 default:
11508 tnr_enable = 0;
11509 break;
11510 }
11511
11512 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11513 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11514 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11515
11516 LOGD("TNR:%d with process plate %d for template:%d",
11517 tnr_enable, tnr_process_type, type);
11518 }
11519
11520 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011521 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011522 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11523
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011524 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011525 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11526
Shuzhen Wang920ea402017-05-03 08:49:39 -070011527 uint8_t related_camera_id = mCameraId;
11528 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011529
11530 /* CDS default */
11531 char prop[PROPERTY_VALUE_MAX];
11532 memset(prop, 0, sizeof(prop));
11533 property_get("persist.camera.CDS", prop, "Auto");
11534 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11535 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11536 if (CAM_CDS_MODE_MAX == cds_mode) {
11537 cds_mode = CAM_CDS_MODE_AUTO;
11538 }
11539
11540 /* Disabling CDS in templates which have TNR enabled*/
11541 if (tnr_enable)
11542 cds_mode = CAM_CDS_MODE_OFF;
11543
11544 int32_t mode = cds_mode;
11545 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011546
Thierry Strudel269c81a2016-10-12 12:13:59 -070011547 /* Manual Convergence AEC Speed is disabled by default*/
11548 float default_aec_speed = 0;
11549 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11550
11551 /* Manual Convergence AWB Speed is disabled by default*/
11552 float default_awb_speed = 0;
11553 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11554
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011555 // Set instant AEC to normal convergence by default
11556 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11557 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11558
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011559 if (gExposeEnableZslKey) {
11560 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070011561 int32_t postview = 0;
11562 settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070011563 int32_t continuousZslCapture = 0;
11564 settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
Chien-Yu Chenec328c82017-08-30 16:41:08 -070011565 // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE.
11566 int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE) ? 0 : 1;
11567 settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
11568
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011569 // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
11570 // hybrid ae is enabled for 3rd party app HDR+.
11571 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11572 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
11573 hybrid_ae = 1;
11574 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011575 }
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011576 /* hybrid ae */
11577 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011578
Thierry Strudel3d639192016-09-09 11:52:26 -070011579 mDefaultMetadata[type] = settings.release();
11580
11581 return mDefaultMetadata[type];
11582}
11583
11584/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011585 * FUNCTION : getExpectedFrameDuration
11586 *
11587 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11588 * duration
11589 *
11590 * PARAMETERS :
11591 * @request : request settings
11592 * @frameDuration : The maximum frame duration in nanoseconds
11593 *
11594 * RETURN : None
11595 *==========================================================================*/
11596void QCamera3HardwareInterface::getExpectedFrameDuration(
11597 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11598 if (nullptr == frameDuration) {
11599 return;
11600 }
11601
11602 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11603 find_camera_metadata_ro_entry(request,
11604 ANDROID_SENSOR_EXPOSURE_TIME,
11605 &e);
11606 if (e.count > 0) {
11607 *frameDuration = e.data.i64[0];
11608 }
11609 find_camera_metadata_ro_entry(request,
11610 ANDROID_SENSOR_FRAME_DURATION,
11611 &e);
11612 if (e.count > 0) {
11613 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11614 }
11615}
11616
11617/*===========================================================================
11618 * FUNCTION : calculateMaxExpectedDuration
11619 *
11620 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11621 * current camera settings.
11622 *
11623 * PARAMETERS :
11624 * @request : request settings
11625 *
11626 * RETURN : Expected frame duration in nanoseconds.
11627 *==========================================================================*/
11628nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11629 const camera_metadata_t *request) {
11630 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11631 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11632 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11633 if (e.count == 0) {
11634 return maxExpectedDuration;
11635 }
11636
11637 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11638 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11639 }
11640
11641 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11642 return maxExpectedDuration;
11643 }
11644
11645 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11646 if (e.count == 0) {
11647 return maxExpectedDuration;
11648 }
11649
11650 switch (e.data.u8[0]) {
11651 case ANDROID_CONTROL_AE_MODE_OFF:
11652 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11653 break;
11654 default:
11655 find_camera_metadata_ro_entry(request,
11656 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11657 &e);
11658 if (e.count > 1) {
11659 maxExpectedDuration = 1e9 / e.data.u8[0];
11660 }
11661 break;
11662 }
11663
11664 return maxExpectedDuration;
11665}
11666
11667/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011668 * FUNCTION : setFrameParameters
11669 *
11670 * DESCRIPTION: set parameters per frame as requested in the metadata from
11671 * framework
11672 *
11673 * PARAMETERS :
11674 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011675 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011676 * @blob_request: Whether this request is a blob request or not
11677 *
11678 * RETURN : success: NO_ERROR
11679 * failure:
11680 *==========================================================================*/
11681int QCamera3HardwareInterface::setFrameParameters(
11682 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011683 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011684 int blob_request,
11685 uint32_t snapshotStreamId)
11686{
11687 /*translate from camera_metadata_t type to parm_type_t*/
11688 int rc = 0;
11689 int32_t hal_version = CAM_HAL_V3;
11690
11691 clear_metadata_buffer(mParameters);
11692 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11693 LOGE("Failed to set hal version in the parameters");
11694 return BAD_VALUE;
11695 }
11696
11697 /*we need to update the frame number in the parameters*/
11698 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11699 request->frame_number)) {
11700 LOGE("Failed to set the frame number in the parameters");
11701 return BAD_VALUE;
11702 }
11703
11704 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011705 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011706 LOGE("Failed to set stream type mask in the parameters");
11707 return BAD_VALUE;
11708 }
11709
11710 if (mUpdateDebugLevel) {
11711 uint32_t dummyDebugLevel = 0;
11712 /* The value of dummyDebugLevel is irrelavent. On
11713 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11714 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11715 dummyDebugLevel)) {
11716 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11717 return BAD_VALUE;
11718 }
11719 mUpdateDebugLevel = false;
11720 }
11721
11722 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011723 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011724 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11725 if (blob_request)
11726 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11727 }
11728
11729 return rc;
11730}
11731
11732/*===========================================================================
11733 * FUNCTION : setReprocParameters
11734 *
11735 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11736 * return it.
11737 *
11738 * PARAMETERS :
11739 * @request : request that needs to be serviced
11740 *
11741 * RETURN : success: NO_ERROR
11742 * failure:
11743 *==========================================================================*/
11744int32_t QCamera3HardwareInterface::setReprocParameters(
11745 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11746 uint32_t snapshotStreamId)
11747{
11748 /*translate from camera_metadata_t type to parm_type_t*/
11749 int rc = 0;
11750
11751 if (NULL == request->settings){
11752 LOGE("Reprocess settings cannot be NULL");
11753 return BAD_VALUE;
11754 }
11755
11756 if (NULL == reprocParam) {
11757 LOGE("Invalid reprocessing metadata buffer");
11758 return BAD_VALUE;
11759 }
11760 clear_metadata_buffer(reprocParam);
11761
11762 /*we need to update the frame number in the parameters*/
11763 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11764 request->frame_number)) {
11765 LOGE("Failed to set the frame number in the parameters");
11766 return BAD_VALUE;
11767 }
11768
11769 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11770 if (rc < 0) {
11771 LOGE("Failed to translate reproc request");
11772 return rc;
11773 }
11774
11775 CameraMetadata frame_settings;
11776 frame_settings = request->settings;
11777 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11778 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11779 int32_t *crop_count =
11780 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11781 int32_t *crop_data =
11782 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11783 int32_t *roi_map =
11784 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11785 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11786 cam_crop_data_t crop_meta;
11787 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11788 crop_meta.num_of_streams = 1;
11789 crop_meta.crop_info[0].crop.left = crop_data[0];
11790 crop_meta.crop_info[0].crop.top = crop_data[1];
11791 crop_meta.crop_info[0].crop.width = crop_data[2];
11792 crop_meta.crop_info[0].crop.height = crop_data[3];
11793
11794 crop_meta.crop_info[0].roi_map.left =
11795 roi_map[0];
11796 crop_meta.crop_info[0].roi_map.top =
11797 roi_map[1];
11798 crop_meta.crop_info[0].roi_map.width =
11799 roi_map[2];
11800 crop_meta.crop_info[0].roi_map.height =
11801 roi_map[3];
11802
11803 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11804 rc = BAD_VALUE;
11805 }
11806 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11807 request->input_buffer->stream,
11808 crop_meta.crop_info[0].crop.left,
11809 crop_meta.crop_info[0].crop.top,
11810 crop_meta.crop_info[0].crop.width,
11811 crop_meta.crop_info[0].crop.height);
11812 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11813 request->input_buffer->stream,
11814 crop_meta.crop_info[0].roi_map.left,
11815 crop_meta.crop_info[0].roi_map.top,
11816 crop_meta.crop_info[0].roi_map.width,
11817 crop_meta.crop_info[0].roi_map.height);
11818 } else {
11819 LOGE("Invalid reprocess crop count %d!", *crop_count);
11820 }
11821 } else {
11822 LOGE("No crop data from matching output stream");
11823 }
11824
11825 /* These settings are not needed for regular requests so handle them specially for
11826 reprocess requests; information needed for EXIF tags */
11827 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11828 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11829 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11830 if (NAME_NOT_FOUND != val) {
11831 uint32_t flashMode = (uint32_t)val;
11832 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11833 rc = BAD_VALUE;
11834 }
11835 } else {
11836 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11837 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11838 }
11839 } else {
11840 LOGH("No flash mode in reprocess settings");
11841 }
11842
11843 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11844 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11845 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11846 rc = BAD_VALUE;
11847 }
11848 } else {
11849 LOGH("No flash state in reprocess settings");
11850 }
11851
11852 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11853 uint8_t *reprocessFlags =
11854 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11855 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11856 *reprocessFlags)) {
11857 rc = BAD_VALUE;
11858 }
11859 }
11860
Thierry Strudel54dc9782017-02-15 12:12:10 -080011861 // Add exif debug data to internal metadata
11862 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11863 mm_jpeg_debug_exif_params_t *debug_params =
11864 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11865 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11866 // AE
11867 if (debug_params->ae_debug_params_valid == TRUE) {
11868 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11869 debug_params->ae_debug_params);
11870 }
11871 // AWB
11872 if (debug_params->awb_debug_params_valid == TRUE) {
11873 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11874 debug_params->awb_debug_params);
11875 }
11876 // AF
11877 if (debug_params->af_debug_params_valid == TRUE) {
11878 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11879 debug_params->af_debug_params);
11880 }
11881 // ASD
11882 if (debug_params->asd_debug_params_valid == TRUE) {
11883 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11884 debug_params->asd_debug_params);
11885 }
11886 // Stats
11887 if (debug_params->stats_debug_params_valid == TRUE) {
11888 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11889 debug_params->stats_debug_params);
11890 }
11891 // BE Stats
11892 if (debug_params->bestats_debug_params_valid == TRUE) {
11893 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11894 debug_params->bestats_debug_params);
11895 }
11896 // BHIST
11897 if (debug_params->bhist_debug_params_valid == TRUE) {
11898 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11899 debug_params->bhist_debug_params);
11900 }
11901 // 3A Tuning
11902 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11903 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11904 debug_params->q3a_tuning_debug_params);
11905 }
11906 }
11907
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011908 // Add metadata which reprocess needs
11909 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11910 cam_reprocess_info_t *repro_info =
11911 (cam_reprocess_info_t *)frame_settings.find
11912 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011913 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011914 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011915 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011916 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011917 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011918 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011919 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011920 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011921 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011922 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011923 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011924 repro_info->pipeline_flip);
11925 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11926 repro_info->af_roi);
11927 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11928 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011929 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11930 CAM_INTF_PARM_ROTATION metadata then has been added in
11931 translateToHalMetadata. HAL need to keep this new rotation
11932 metadata. Otherwise, the old rotation info saved in the vendor tag
11933 would be used */
11934 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11935 CAM_INTF_PARM_ROTATION, reprocParam) {
11936 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11937 } else {
11938 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011939 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011940 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011941 }
11942
11943 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11944 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11945 roi.width and roi.height would be the final JPEG size.
11946 For now, HAL only checks this for reprocess request */
11947 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11948 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11949 uint8_t *enable =
11950 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11951 if (*enable == TRUE) {
11952 int32_t *crop_data =
11953 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11954 cam_stream_crop_info_t crop_meta;
11955 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11956 crop_meta.stream_id = 0;
11957 crop_meta.crop.left = crop_data[0];
11958 crop_meta.crop.top = crop_data[1];
11959 crop_meta.crop.width = crop_data[2];
11960 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011961 // The JPEG crop roi should match cpp output size
11962 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11963 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11964 crop_meta.roi_map.left = 0;
11965 crop_meta.roi_map.top = 0;
11966 crop_meta.roi_map.width = cpp_crop->crop.width;
11967 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011968 }
11969 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11970 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011971 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011972 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011973 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11974 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011975 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011976 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11977
11978 // Add JPEG scale information
11979 cam_dimension_t scale_dim;
11980 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11981 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11982 int32_t *roi =
11983 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11984 scale_dim.width = roi[2];
11985 scale_dim.height = roi[3];
11986 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11987 scale_dim);
11988 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11989 scale_dim.width, scale_dim.height, mCameraId);
11990 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011991 }
11992 }
11993
11994 return rc;
11995}
11996
11997/*===========================================================================
11998 * FUNCTION : saveRequestSettings
11999 *
12000 * DESCRIPTION: Add any settings that might have changed to the request settings
12001 * and save the settings to be applied on the frame
12002 *
12003 * PARAMETERS :
12004 * @jpegMetadata : the extracted and/or modified jpeg metadata
12005 * @request : request with initial settings
12006 *
12007 * RETURN :
12008 * camera_metadata_t* : pointer to the saved request settings
12009 *==========================================================================*/
12010camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
12011 const CameraMetadata &jpegMetadata,
12012 camera3_capture_request_t *request)
12013{
12014 camera_metadata_t *resultMetadata;
12015 CameraMetadata camMetadata;
12016 camMetadata = request->settings;
12017
12018 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12019 int32_t thumbnail_size[2];
12020 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12021 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12022 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
12023 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
12024 }
12025
12026 if (request->input_buffer != NULL) {
12027 uint8_t reprocessFlags = 1;
12028 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
12029 (uint8_t*)&reprocessFlags,
12030 sizeof(reprocessFlags));
12031 }
12032
12033 resultMetadata = camMetadata.release();
12034 return resultMetadata;
12035}
12036
12037/*===========================================================================
12038 * FUNCTION : setHalFpsRange
12039 *
12040 * DESCRIPTION: set FPS range parameter
12041 *
12042 *
12043 * PARAMETERS :
12044 * @settings : Metadata from framework
12045 * @hal_metadata: Metadata buffer
12046 *
12047 *
12048 * RETURN : success: NO_ERROR
12049 * failure:
12050 *==========================================================================*/
12051int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
12052 metadata_buffer_t *hal_metadata)
12053{
12054 int32_t rc = NO_ERROR;
12055 cam_fps_range_t fps_range;
12056 fps_range.min_fps = (float)
12057 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12058 fps_range.max_fps = (float)
12059 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12060 fps_range.video_min_fps = fps_range.min_fps;
12061 fps_range.video_max_fps = fps_range.max_fps;
12062
12063 LOGD("aeTargetFpsRange fps: [%f %f]",
12064 fps_range.min_fps, fps_range.max_fps);
12065 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12066 * follows:
12067 * ---------------------------------------------------------------|
12068 * Video stream is absent in configure_streams |
12069 * (Camcorder preview before the first video record |
12070 * ---------------------------------------------------------------|
12071 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12072 * | | | vid_min/max_fps|
12073 * ---------------------------------------------------------------|
12074 * NO | [ 30, 240] | 240 | [240, 240] |
12075 * |-------------|-------------|----------------|
12076 * | [240, 240] | 240 | [240, 240] |
12077 * ---------------------------------------------------------------|
12078 * Video stream is present in configure_streams |
12079 * ---------------------------------------------------------------|
12080 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12081 * | | | vid_min/max_fps|
12082 * ---------------------------------------------------------------|
12083 * NO | [ 30, 240] | 240 | [240, 240] |
12084 * (camcorder prev |-------------|-------------|----------------|
12085 * after video rec | [240, 240] | 240 | [240, 240] |
12086 * is stopped) | | | |
12087 * ---------------------------------------------------------------|
12088 * YES | [ 30, 240] | 240 | [240, 240] |
12089 * |-------------|-------------|----------------|
12090 * | [240, 240] | 240 | [240, 240] |
12091 * ---------------------------------------------------------------|
12092 * When Video stream is absent in configure_streams,
12093 * preview fps = sensor_fps / batchsize
12094 * Eg: for 240fps at batchSize 4, preview = 60fps
12095 * for 120fps at batchSize 4, preview = 30fps
12096 *
12097 * When video stream is present in configure_streams, preview fps is as per
12098 * the ratio of preview buffers to video buffers requested in process
12099 * capture request
12100 */
12101 mBatchSize = 0;
12102 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12103 fps_range.min_fps = fps_range.video_max_fps;
12104 fps_range.video_min_fps = fps_range.video_max_fps;
12105 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12106 fps_range.max_fps);
12107 if (NAME_NOT_FOUND != val) {
12108 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12109 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12110 return BAD_VALUE;
12111 }
12112
12113 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12114 /* If batchmode is currently in progress and the fps changes,
12115 * set the flag to restart the sensor */
12116 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12117 (mHFRVideoFps != fps_range.max_fps)) {
12118 mNeedSensorRestart = true;
12119 }
12120 mHFRVideoFps = fps_range.max_fps;
12121 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12122 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12123 mBatchSize = MAX_HFR_BATCH_SIZE;
12124 }
12125 }
12126 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12127
12128 }
12129 } else {
12130 /* HFR mode is session param in backend/ISP. This should be reset when
12131 * in non-HFR mode */
12132 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12133 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12134 return BAD_VALUE;
12135 }
12136 }
12137 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12138 return BAD_VALUE;
12139 }
12140 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12141 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12142 return rc;
12143}
12144
12145/*===========================================================================
12146 * FUNCTION : translateToHalMetadata
12147 *
12148 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12149 *
12150 *
12151 * PARAMETERS :
12152 * @request : request sent from framework
12153 *
12154 *
12155 * RETURN : success: NO_ERROR
12156 * failure:
12157 *==========================================================================*/
12158int QCamera3HardwareInterface::translateToHalMetadata
12159 (const camera3_capture_request_t *request,
12160 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012161 uint32_t snapshotStreamId) {
12162 if (request == nullptr || hal_metadata == nullptr) {
12163 return BAD_VALUE;
12164 }
12165
12166 int64_t minFrameDuration = getMinFrameDuration(request);
12167
12168 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12169 minFrameDuration);
12170}
12171
12172int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12173 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12174 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12175
Thierry Strudel3d639192016-09-09 11:52:26 -070012176 int rc = 0;
12177 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012178 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012179
12180 /* Do not change the order of the following list unless you know what you are
12181 * doing.
12182 * The order is laid out in such a way that parameters in the front of the table
12183 * may be used to override the parameters later in the table. Examples are:
12184 * 1. META_MODE should precede AEC/AWB/AF MODE
12185 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12186 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12187 * 4. Any mode should precede it's corresponding settings
12188 */
12189 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12190 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12191 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12192 rc = BAD_VALUE;
12193 }
12194 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12195 if (rc != NO_ERROR) {
12196 LOGE("extractSceneMode failed");
12197 }
12198 }
12199
12200 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12201 uint8_t fwk_aeMode =
12202 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12203 uint8_t aeMode;
12204 int32_t redeye;
12205
12206 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12207 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012208 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12209 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012210 } else {
12211 aeMode = CAM_AE_MODE_ON;
12212 }
12213 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12214 redeye = 1;
12215 } else {
12216 redeye = 0;
12217 }
12218
12219 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12220 fwk_aeMode);
12221 if (NAME_NOT_FOUND != val) {
12222 int32_t flashMode = (int32_t)val;
12223 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12224 }
12225
12226 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12227 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12228 rc = BAD_VALUE;
12229 }
12230 }
12231
12232 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12233 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12234 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12235 fwk_whiteLevel);
12236 if (NAME_NOT_FOUND != val) {
12237 uint8_t whiteLevel = (uint8_t)val;
12238 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12239 rc = BAD_VALUE;
12240 }
12241 }
12242 }
12243
12244 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12245 uint8_t fwk_cacMode =
12246 frame_settings.find(
12247 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12248 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12249 fwk_cacMode);
12250 if (NAME_NOT_FOUND != val) {
12251 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12252 bool entryAvailable = FALSE;
12253 // Check whether Frameworks set CAC mode is supported in device or not
12254 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12255 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12256 entryAvailable = TRUE;
12257 break;
12258 }
12259 }
12260 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12261 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12262 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12263 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12264 if (entryAvailable == FALSE) {
12265 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12266 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12267 } else {
12268 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12269 // High is not supported and so set the FAST as spec say's underlying
12270 // device implementation can be the same for both modes.
12271 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12272 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12273 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12274 // in order to avoid the fps drop due to high quality
12275 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12276 } else {
12277 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12278 }
12279 }
12280 }
12281 LOGD("Final cacMode is %d", cacMode);
12282 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12283 rc = BAD_VALUE;
12284 }
12285 } else {
12286 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12287 }
12288 }
12289
Jason Lee84ae9972017-02-24 13:24:24 -080012290 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012291 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012292 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012293 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012294 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12295 fwk_focusMode);
12296 if (NAME_NOT_FOUND != val) {
12297 uint8_t focusMode = (uint8_t)val;
12298 LOGD("set focus mode %d", focusMode);
12299 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12300 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12301 rc = BAD_VALUE;
12302 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012303 }
12304 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012305 } else {
12306 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12307 LOGE("Focus forced to infinity %d", focusMode);
12308 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12309 rc = BAD_VALUE;
12310 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012311 }
12312
Jason Lee84ae9972017-02-24 13:24:24 -080012313 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12314 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012315 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12316 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12317 focalDistance)) {
12318 rc = BAD_VALUE;
12319 }
12320 }
12321
12322 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12323 uint8_t fwk_antibandingMode =
12324 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12325 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12326 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12327 if (NAME_NOT_FOUND != val) {
12328 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012329 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12330 if (m60HzZone) {
12331 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12332 } else {
12333 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12334 }
12335 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012336 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12337 hal_antibandingMode)) {
12338 rc = BAD_VALUE;
12339 }
12340 }
12341 }
12342
12343 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12344 int32_t expCompensation = frame_settings.find(
12345 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12346 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12347 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12348 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12349 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012350 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012351 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12352 expCompensation)) {
12353 rc = BAD_VALUE;
12354 }
12355 }
12356
12357 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12358 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12359 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12360 rc = BAD_VALUE;
12361 }
12362 }
12363 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12364 rc = setHalFpsRange(frame_settings, hal_metadata);
12365 if (rc != NO_ERROR) {
12366 LOGE("setHalFpsRange failed");
12367 }
12368 }
12369
12370 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12371 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12372 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12373 rc = BAD_VALUE;
12374 }
12375 }
12376
12377 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12378 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12379 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12380 fwk_effectMode);
12381 if (NAME_NOT_FOUND != val) {
12382 uint8_t effectMode = (uint8_t)val;
12383 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12384 rc = BAD_VALUE;
12385 }
12386 }
12387 }
12388
12389 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12390 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12391 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12392 colorCorrectMode)) {
12393 rc = BAD_VALUE;
12394 }
12395 }
12396
12397 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12398 cam_color_correct_gains_t colorCorrectGains;
12399 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12400 colorCorrectGains.gains[i] =
12401 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12402 }
12403 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12404 colorCorrectGains)) {
12405 rc = BAD_VALUE;
12406 }
12407 }
12408
12409 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12410 cam_color_correct_matrix_t colorCorrectTransform;
12411 cam_rational_type_t transform_elem;
12412 size_t num = 0;
12413 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12414 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12415 transform_elem.numerator =
12416 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12417 transform_elem.denominator =
12418 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12419 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12420 num++;
12421 }
12422 }
12423 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12424 colorCorrectTransform)) {
12425 rc = BAD_VALUE;
12426 }
12427 }
12428
12429 cam_trigger_t aecTrigger;
12430 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12431 aecTrigger.trigger_id = -1;
12432 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12433 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12434 aecTrigger.trigger =
12435 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12436 aecTrigger.trigger_id =
12437 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12438 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12439 aecTrigger)) {
12440 rc = BAD_VALUE;
12441 }
12442 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12443 aecTrigger.trigger, aecTrigger.trigger_id);
12444 }
12445
12446 /*af_trigger must come with a trigger id*/
12447 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12448 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12449 cam_trigger_t af_trigger;
12450 af_trigger.trigger =
12451 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12452 af_trigger.trigger_id =
12453 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12454 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12455 rc = BAD_VALUE;
12456 }
12457 LOGD("AfTrigger: %d AfTriggerID: %d",
12458 af_trigger.trigger, af_trigger.trigger_id);
12459 }
12460
12461 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12462 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12463 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12464 rc = BAD_VALUE;
12465 }
12466 }
12467 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12468 cam_edge_application_t edge_application;
12469 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012470
Thierry Strudel3d639192016-09-09 11:52:26 -070012471 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12472 edge_application.sharpness = 0;
12473 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012474 edge_application.sharpness =
12475 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12476 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12477 int32_t sharpness =
12478 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12479 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12480 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12481 LOGD("Setting edge mode sharpness %d", sharpness);
12482 edge_application.sharpness = sharpness;
12483 }
12484 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012485 }
12486 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12487 rc = BAD_VALUE;
12488 }
12489 }
12490
12491 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12492 int32_t respectFlashMode = 1;
12493 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12494 uint8_t fwk_aeMode =
12495 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012496 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12497 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12498 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012499 respectFlashMode = 0;
12500 LOGH("AE Mode controls flash, ignore android.flash.mode");
12501 }
12502 }
12503 if (respectFlashMode) {
12504 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12505 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12506 LOGH("flash mode after mapping %d", val);
12507 // To check: CAM_INTF_META_FLASH_MODE usage
12508 if (NAME_NOT_FOUND != val) {
12509 uint8_t flashMode = (uint8_t)val;
12510 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12511 rc = BAD_VALUE;
12512 }
12513 }
12514 }
12515 }
12516
12517 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12518 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12519 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12520 rc = BAD_VALUE;
12521 }
12522 }
12523
12524 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12525 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12526 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12527 flashFiringTime)) {
12528 rc = BAD_VALUE;
12529 }
12530 }
12531
12532 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12533 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12534 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12535 hotPixelMode)) {
12536 rc = BAD_VALUE;
12537 }
12538 }
12539
12540 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12541 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12542 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12543 lensAperture)) {
12544 rc = BAD_VALUE;
12545 }
12546 }
12547
12548 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12549 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12550 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12551 filterDensity)) {
12552 rc = BAD_VALUE;
12553 }
12554 }
12555
12556 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12557 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12558 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12559 focalLength)) {
12560 rc = BAD_VALUE;
12561 }
12562 }
12563
12564 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12565 uint8_t optStabMode =
12566 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12567 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12568 optStabMode)) {
12569 rc = BAD_VALUE;
12570 }
12571 }
12572
12573 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12574 uint8_t videoStabMode =
12575 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12576 LOGD("videoStabMode from APP = %d", videoStabMode);
12577 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12578 videoStabMode)) {
12579 rc = BAD_VALUE;
12580 }
12581 }
12582
12583
12584 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12585 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12586 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12587 noiseRedMode)) {
12588 rc = BAD_VALUE;
12589 }
12590 }
12591
12592 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12593 float reprocessEffectiveExposureFactor =
12594 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12595 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12596 reprocessEffectiveExposureFactor)) {
12597 rc = BAD_VALUE;
12598 }
12599 }
12600
12601 cam_crop_region_t scalerCropRegion;
12602 bool scalerCropSet = false;
12603 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12604 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12605 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12606 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12607 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12608
12609 // Map coordinate system from active array to sensor output.
12610 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12611 scalerCropRegion.width, scalerCropRegion.height);
12612
12613 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12614 scalerCropRegion)) {
12615 rc = BAD_VALUE;
12616 }
12617 scalerCropSet = true;
12618 }
12619
12620 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12621 int64_t sensorExpTime =
12622 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12623 LOGD("setting sensorExpTime %lld", sensorExpTime);
12624 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12625 sensorExpTime)) {
12626 rc = BAD_VALUE;
12627 }
12628 }
12629
12630 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12631 int64_t sensorFrameDuration =
12632 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012633 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12634 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12635 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12636 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12637 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12638 sensorFrameDuration)) {
12639 rc = BAD_VALUE;
12640 }
12641 }
12642
12643 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12644 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12645 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12646 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12647 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12648 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12649 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12650 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12651 sensorSensitivity)) {
12652 rc = BAD_VALUE;
12653 }
12654 }
12655
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012656#ifndef USE_HAL_3_3
12657 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12658 int32_t ispSensitivity =
12659 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12660 if (ispSensitivity <
12661 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12662 ispSensitivity =
12663 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12664 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12665 }
12666 if (ispSensitivity >
12667 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12668 ispSensitivity =
12669 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12670 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12671 }
12672 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12673 ispSensitivity)) {
12674 rc = BAD_VALUE;
12675 }
12676 }
12677#endif
12678
Thierry Strudel3d639192016-09-09 11:52:26 -070012679 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12680 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12681 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12682 rc = BAD_VALUE;
12683 }
12684 }
12685
12686 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12687 uint8_t fwk_facedetectMode =
12688 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12689
12690 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12691 fwk_facedetectMode);
12692
12693 if (NAME_NOT_FOUND != val) {
12694 uint8_t facedetectMode = (uint8_t)val;
12695 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12696 facedetectMode)) {
12697 rc = BAD_VALUE;
12698 }
12699 }
12700 }
12701
Thierry Strudel54dc9782017-02-15 12:12:10 -080012702 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012703 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012704 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012705 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12706 histogramMode)) {
12707 rc = BAD_VALUE;
12708 }
12709 }
12710
12711 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12712 uint8_t sharpnessMapMode =
12713 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12714 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12715 sharpnessMapMode)) {
12716 rc = BAD_VALUE;
12717 }
12718 }
12719
12720 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12721 uint8_t tonemapMode =
12722 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12723 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12724 rc = BAD_VALUE;
12725 }
12726 }
12727 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12728 /*All tonemap channels will have the same number of points*/
12729 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12730 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12731 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12732 cam_rgb_tonemap_curves tonemapCurves;
12733 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12734 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12735 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12736 tonemapCurves.tonemap_points_cnt,
12737 CAM_MAX_TONEMAP_CURVE_SIZE);
12738 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12739 }
12740
12741 /* ch0 = G*/
12742 size_t point = 0;
12743 cam_tonemap_curve_t tonemapCurveGreen;
12744 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12745 for (size_t j = 0; j < 2; j++) {
12746 tonemapCurveGreen.tonemap_points[i][j] =
12747 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12748 point++;
12749 }
12750 }
12751 tonemapCurves.curves[0] = tonemapCurveGreen;
12752
12753 /* ch 1 = B */
12754 point = 0;
12755 cam_tonemap_curve_t tonemapCurveBlue;
12756 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12757 for (size_t j = 0; j < 2; j++) {
12758 tonemapCurveBlue.tonemap_points[i][j] =
12759 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12760 point++;
12761 }
12762 }
12763 tonemapCurves.curves[1] = tonemapCurveBlue;
12764
12765 /* ch 2 = R */
12766 point = 0;
12767 cam_tonemap_curve_t tonemapCurveRed;
12768 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12769 for (size_t j = 0; j < 2; j++) {
12770 tonemapCurveRed.tonemap_points[i][j] =
12771 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12772 point++;
12773 }
12774 }
12775 tonemapCurves.curves[2] = tonemapCurveRed;
12776
12777 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12778 tonemapCurves)) {
12779 rc = BAD_VALUE;
12780 }
12781 }
12782
12783 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12784 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12785 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12786 captureIntent)) {
12787 rc = BAD_VALUE;
12788 }
12789 }
12790
12791 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12792 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12793 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12794 blackLevelLock)) {
12795 rc = BAD_VALUE;
12796 }
12797 }
12798
12799 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12800 uint8_t lensShadingMapMode =
12801 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12802 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12803 lensShadingMapMode)) {
12804 rc = BAD_VALUE;
12805 }
12806 }
12807
12808 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12809 cam_area_t roi;
12810 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012811 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012812
12813 // Map coordinate system from active array to sensor output.
12814 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12815 roi.rect.height);
12816
12817 if (scalerCropSet) {
12818 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12819 }
12820 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12821 rc = BAD_VALUE;
12822 }
12823 }
12824
12825 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12826 cam_area_t roi;
12827 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012828 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012829
12830 // Map coordinate system from active array to sensor output.
12831 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12832 roi.rect.height);
12833
12834 if (scalerCropSet) {
12835 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12836 }
12837 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12838 rc = BAD_VALUE;
12839 }
12840 }
12841
12842 // CDS for non-HFR non-video mode
12843 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12844 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12845 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12846 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12847 LOGE("Invalid CDS mode %d!", *fwk_cds);
12848 } else {
12849 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12850 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12851 rc = BAD_VALUE;
12852 }
12853 }
12854 }
12855
Thierry Strudel04e026f2016-10-10 11:27:36 -070012856 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012857 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012858 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012859 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12860 }
12861 if (m_bVideoHdrEnabled)
12862 vhdr = CAM_VIDEO_HDR_MODE_ON;
12863
Thierry Strudel54dc9782017-02-15 12:12:10 -080012864 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12865
12866 if(vhdr != curr_hdr_state)
12867 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12868
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012869 rc = setVideoHdrMode(mParameters, vhdr);
12870 if (rc != NO_ERROR) {
12871 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012872 }
12873
12874 //IR
12875 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12876 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12877 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012878 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12879 uint8_t isIRon = 0;
12880
12881 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012882 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12883 LOGE("Invalid IR mode %d!", fwk_ir);
12884 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012885 if(isIRon != curr_ir_state )
12886 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12887
Thierry Strudel04e026f2016-10-10 11:27:36 -070012888 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12889 CAM_INTF_META_IR_MODE, fwk_ir)) {
12890 rc = BAD_VALUE;
12891 }
12892 }
12893 }
12894
Thierry Strudel54dc9782017-02-15 12:12:10 -080012895 //Binning Correction Mode
12896 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12897 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12898 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12899 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12900 || (0 > fwk_binning_correction)) {
12901 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12902 } else {
12903 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12904 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12905 rc = BAD_VALUE;
12906 }
12907 }
12908 }
12909
Thierry Strudel269c81a2016-10-12 12:13:59 -070012910 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12911 float aec_speed;
12912 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12913 LOGD("AEC Speed :%f", aec_speed);
12914 if ( aec_speed < 0 ) {
12915 LOGE("Invalid AEC mode %f!", aec_speed);
12916 } else {
12917 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12918 aec_speed)) {
12919 rc = BAD_VALUE;
12920 }
12921 }
12922 }
12923
12924 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12925 float awb_speed;
12926 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12927 LOGD("AWB Speed :%f", awb_speed);
12928 if ( awb_speed < 0 ) {
12929 LOGE("Invalid AWB mode %f!", awb_speed);
12930 } else {
12931 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12932 awb_speed)) {
12933 rc = BAD_VALUE;
12934 }
12935 }
12936 }
12937
Thierry Strudel3d639192016-09-09 11:52:26 -070012938 // TNR
12939 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12940 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12941 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012942 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012943 cam_denoise_param_t tnr;
12944 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12945 tnr.process_plates =
12946 (cam_denoise_process_type_t)frame_settings.find(
12947 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12948 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012949
12950 if(b_TnrRequested != curr_tnr_state)
12951 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12952
Thierry Strudel3d639192016-09-09 11:52:26 -070012953 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12954 rc = BAD_VALUE;
12955 }
12956 }
12957
Thierry Strudel54dc9782017-02-15 12:12:10 -080012958 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012959 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012960 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012961 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12962 *exposure_metering_mode)) {
12963 rc = BAD_VALUE;
12964 }
12965 }
12966
Thierry Strudel3d639192016-09-09 11:52:26 -070012967 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12968 int32_t fwk_testPatternMode =
12969 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12970 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12971 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12972
12973 if (NAME_NOT_FOUND != testPatternMode) {
12974 cam_test_pattern_data_t testPatternData;
12975 memset(&testPatternData, 0, sizeof(testPatternData));
12976 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12977 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12978 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12979 int32_t *fwk_testPatternData =
12980 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12981 testPatternData.r = fwk_testPatternData[0];
12982 testPatternData.b = fwk_testPatternData[3];
12983 switch (gCamCapability[mCameraId]->color_arrangement) {
12984 case CAM_FILTER_ARRANGEMENT_RGGB:
12985 case CAM_FILTER_ARRANGEMENT_GRBG:
12986 testPatternData.gr = fwk_testPatternData[1];
12987 testPatternData.gb = fwk_testPatternData[2];
12988 break;
12989 case CAM_FILTER_ARRANGEMENT_GBRG:
12990 case CAM_FILTER_ARRANGEMENT_BGGR:
12991 testPatternData.gr = fwk_testPatternData[2];
12992 testPatternData.gb = fwk_testPatternData[1];
12993 break;
12994 default:
12995 LOGE("color arrangement %d is not supported",
12996 gCamCapability[mCameraId]->color_arrangement);
12997 break;
12998 }
12999 }
13000 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
13001 testPatternData)) {
13002 rc = BAD_VALUE;
13003 }
13004 } else {
13005 LOGE("Invalid framework sensor test pattern mode %d",
13006 fwk_testPatternMode);
13007 }
13008 }
13009
13010 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
13011 size_t count = 0;
13012 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
13013 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
13014 gps_coords.data.d, gps_coords.count, count);
13015 if (gps_coords.count != count) {
13016 rc = BAD_VALUE;
13017 }
13018 }
13019
13020 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
13021 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
13022 size_t count = 0;
13023 const char *gps_methods_src = (const char *)
13024 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
13025 memset(gps_methods, '\0', sizeof(gps_methods));
13026 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
13027 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
13028 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
13029 if (GPS_PROCESSING_METHOD_SIZE != count) {
13030 rc = BAD_VALUE;
13031 }
13032 }
13033
13034 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
13035 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
13036 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
13037 gps_timestamp)) {
13038 rc = BAD_VALUE;
13039 }
13040 }
13041
13042 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
13043 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
13044 cam_rotation_info_t rotation_info;
13045 if (orientation == 0) {
13046 rotation_info.rotation = ROTATE_0;
13047 } else if (orientation == 90) {
13048 rotation_info.rotation = ROTATE_90;
13049 } else if (orientation == 180) {
13050 rotation_info.rotation = ROTATE_180;
13051 } else if (orientation == 270) {
13052 rotation_info.rotation = ROTATE_270;
13053 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070013054 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070013055 rotation_info.streamId = snapshotStreamId;
13056 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
13057 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13058 rc = BAD_VALUE;
13059 }
13060 }
13061
13062 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13063 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13064 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13065 rc = BAD_VALUE;
13066 }
13067 }
13068
13069 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13070 uint32_t thumb_quality = (uint32_t)
13071 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13072 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13073 thumb_quality)) {
13074 rc = BAD_VALUE;
13075 }
13076 }
13077
13078 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13079 cam_dimension_t dim;
13080 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13081 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13082 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13083 rc = BAD_VALUE;
13084 }
13085 }
13086
13087 // Internal metadata
13088 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13089 size_t count = 0;
13090 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13091 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13092 privatedata.data.i32, privatedata.count, count);
13093 if (privatedata.count != count) {
13094 rc = BAD_VALUE;
13095 }
13096 }
13097
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013098 // ISO/Exposure Priority
13099 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13100 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13101 cam_priority_mode_t mode =
13102 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13103 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13104 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13105 use_iso_exp_pty.previewOnly = FALSE;
13106 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13107 use_iso_exp_pty.value = *ptr;
13108
13109 if(CAM_ISO_PRIORITY == mode) {
13110 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13111 use_iso_exp_pty)) {
13112 rc = BAD_VALUE;
13113 }
13114 }
13115 else {
13116 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13117 use_iso_exp_pty)) {
13118 rc = BAD_VALUE;
13119 }
13120 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013121
13122 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13123 rc = BAD_VALUE;
13124 }
13125 }
13126 } else {
13127 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13128 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013129 }
13130 }
13131
13132 // Saturation
13133 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13134 int32_t* use_saturation =
13135 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13136 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13137 rc = BAD_VALUE;
13138 }
13139 }
13140
Thierry Strudel3d639192016-09-09 11:52:26 -070013141 // EV step
13142 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13143 gCamCapability[mCameraId]->exp_compensation_step)) {
13144 rc = BAD_VALUE;
13145 }
13146
13147 // CDS info
13148 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13149 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13150 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13151
13152 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13153 CAM_INTF_META_CDS_DATA, *cdsData)) {
13154 rc = BAD_VALUE;
13155 }
13156 }
13157
Shuzhen Wang19463d72016-03-08 11:09:52 -080013158 // Hybrid AE
13159 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13160 uint8_t *hybrid_ae = (uint8_t *)
13161 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
Shuzhen Wang77b049a2017-08-30 12:24:36 -070013162 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13163 rc = BAD_VALUE;
13164 }
Shuzhen Wang19463d72016-03-08 11:09:52 -080013165 }
13166
Shuzhen Wang14415f52016-11-16 18:26:18 -080013167 // Histogram
13168 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13169 uint8_t histogramMode =
13170 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13171 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13172 histogramMode)) {
13173 rc = BAD_VALUE;
13174 }
13175 }
13176
13177 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13178 int32_t histogramBins =
13179 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13180 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13181 histogramBins)) {
13182 rc = BAD_VALUE;
13183 }
13184 }
13185
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013186 // Tracking AF
13187 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13188 uint8_t trackingAfTrigger =
13189 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13190 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13191 trackingAfTrigger)) {
13192 rc = BAD_VALUE;
13193 }
13194 }
13195
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013196 // Makernote
13197 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13198 if (entry.count != 0) {
13199 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13200 cam_makernote_t makernote;
13201 makernote.length = entry.count;
13202 memcpy(makernote.data, entry.data.u8, makernote.length);
13203 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13204 rc = BAD_VALUE;
13205 }
13206 } else {
13207 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13208 MAX_MAKERNOTE_LENGTH);
13209 rc = BAD_VALUE;
13210 }
13211 }
13212
Thierry Strudel3d639192016-09-09 11:52:26 -070013213 return rc;
13214}
13215
13216/*===========================================================================
13217 * FUNCTION : captureResultCb
13218 *
13219 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13220 *
13221 * PARAMETERS :
13222 * @frame : frame information from mm-camera-interface
13223 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13224 * @userdata: userdata
13225 *
13226 * RETURN : NONE
13227 *==========================================================================*/
13228void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13229 camera3_stream_buffer_t *buffer,
13230 uint32_t frame_number, bool isInputBuffer, void *userdata)
13231{
13232 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13233 if (hw == NULL) {
13234 LOGE("Invalid hw %p", hw);
13235 return;
13236 }
13237
13238 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13239 return;
13240}
13241
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013242/*===========================================================================
13243 * FUNCTION : setBufferErrorStatus
13244 *
13245 * DESCRIPTION: Callback handler for channels to report any buffer errors
13246 *
13247 * PARAMETERS :
13248 * @ch : Channel on which buffer error is reported from
13249 * @frame_number : frame number on which buffer error is reported on
13250 * @buffer_status : buffer error status
13251 * @userdata: userdata
13252 *
13253 * RETURN : NONE
13254 *==========================================================================*/
13255void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13256 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13257{
13258 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13259 if (hw == NULL) {
13260 LOGE("Invalid hw %p", hw);
13261 return;
13262 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013263
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013264 hw->setBufferErrorStatus(ch, frame_number, err);
13265 return;
13266}
13267
13268void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13269 uint32_t frameNumber, camera3_buffer_status_t err)
13270{
13271 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13272 pthread_mutex_lock(&mMutex);
13273
13274 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13275 if (req.frame_number != frameNumber)
13276 continue;
13277 for (auto& k : req.mPendingBufferList) {
13278 if(k.stream->priv == ch) {
13279 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13280 }
13281 }
13282 }
13283
13284 pthread_mutex_unlock(&mMutex);
13285 return;
13286}
Thierry Strudel3d639192016-09-09 11:52:26 -070013287/*===========================================================================
13288 * FUNCTION : initialize
13289 *
13290 * DESCRIPTION: Pass framework callback pointers to HAL
13291 *
13292 * PARAMETERS :
13293 *
13294 *
13295 * RETURN : Success : 0
13296 * Failure: -ENODEV
13297 *==========================================================================*/
13298
13299int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13300 const camera3_callback_ops_t *callback_ops)
13301{
13302 LOGD("E");
13303 QCamera3HardwareInterface *hw =
13304 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13305 if (!hw) {
13306 LOGE("NULL camera device");
13307 return -ENODEV;
13308 }
13309
13310 int rc = hw->initialize(callback_ops);
13311 LOGD("X");
13312 return rc;
13313}
13314
13315/*===========================================================================
13316 * FUNCTION : configure_streams
13317 *
13318 * DESCRIPTION:
13319 *
13320 * PARAMETERS :
13321 *
13322 *
13323 * RETURN : Success: 0
13324 * Failure: -EINVAL (if stream configuration is invalid)
13325 * -ENODEV (fatal error)
13326 *==========================================================================*/
13327
13328int QCamera3HardwareInterface::configure_streams(
13329 const struct camera3_device *device,
13330 camera3_stream_configuration_t *stream_list)
13331{
13332 LOGD("E");
13333 QCamera3HardwareInterface *hw =
13334 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13335 if (!hw) {
13336 LOGE("NULL camera device");
13337 return -ENODEV;
13338 }
13339 int rc = hw->configureStreams(stream_list);
13340 LOGD("X");
13341 return rc;
13342}
13343
13344/*===========================================================================
13345 * FUNCTION : construct_default_request_settings
13346 *
13347 * DESCRIPTION: Configure a settings buffer to meet the required use case
13348 *
13349 * PARAMETERS :
13350 *
13351 *
13352 * RETURN : Success: Return valid metadata
13353 * Failure: Return NULL
13354 *==========================================================================*/
13355const camera_metadata_t* QCamera3HardwareInterface::
13356 construct_default_request_settings(const struct camera3_device *device,
13357 int type)
13358{
13359
13360 LOGD("E");
13361 camera_metadata_t* fwk_metadata = NULL;
13362 QCamera3HardwareInterface *hw =
13363 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13364 if (!hw) {
13365 LOGE("NULL camera device");
13366 return NULL;
13367 }
13368
13369 fwk_metadata = hw->translateCapabilityToMetadata(type);
13370
13371 LOGD("X");
13372 return fwk_metadata;
13373}
13374
13375/*===========================================================================
13376 * FUNCTION : process_capture_request
13377 *
13378 * DESCRIPTION:
13379 *
13380 * PARAMETERS :
13381 *
13382 *
13383 * RETURN :
13384 *==========================================================================*/
13385int QCamera3HardwareInterface::process_capture_request(
13386 const struct camera3_device *device,
13387 camera3_capture_request_t *request)
13388{
13389 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013390 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013391 QCamera3HardwareInterface *hw =
13392 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13393 if (!hw) {
13394 LOGE("NULL camera device");
13395 return -EINVAL;
13396 }
13397
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013398 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013399 LOGD("X");
13400 return rc;
13401}
13402
13403/*===========================================================================
13404 * FUNCTION : dump
13405 *
13406 * DESCRIPTION:
13407 *
13408 * PARAMETERS :
13409 *
13410 *
13411 * RETURN :
13412 *==========================================================================*/
13413
13414void QCamera3HardwareInterface::dump(
13415 const struct camera3_device *device, int fd)
13416{
13417 /* Log level property is read when "adb shell dumpsys media.camera" is
13418 called so that the log level can be controlled without restarting
13419 the media server */
13420 getLogLevel();
13421
13422 LOGD("E");
13423 QCamera3HardwareInterface *hw =
13424 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13425 if (!hw) {
13426 LOGE("NULL camera device");
13427 return;
13428 }
13429
13430 hw->dump(fd);
13431 LOGD("X");
13432 return;
13433}
13434
13435/*===========================================================================
13436 * FUNCTION : flush
13437 *
13438 * DESCRIPTION:
13439 *
13440 * PARAMETERS :
13441 *
13442 *
13443 * RETURN :
13444 *==========================================================================*/
13445
13446int QCamera3HardwareInterface::flush(
13447 const struct camera3_device *device)
13448{
13449 int rc;
13450 LOGD("E");
13451 QCamera3HardwareInterface *hw =
13452 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13453 if (!hw) {
13454 LOGE("NULL camera device");
13455 return -EINVAL;
13456 }
13457
13458 pthread_mutex_lock(&hw->mMutex);
13459 // Validate current state
13460 switch (hw->mState) {
13461 case STARTED:
13462 /* valid state */
13463 break;
13464
13465 case ERROR:
13466 pthread_mutex_unlock(&hw->mMutex);
13467 hw->handleCameraDeviceError();
13468 return -ENODEV;
13469
13470 default:
13471 LOGI("Flush returned during state %d", hw->mState);
13472 pthread_mutex_unlock(&hw->mMutex);
13473 return 0;
13474 }
13475 pthread_mutex_unlock(&hw->mMutex);
13476
13477 rc = hw->flush(true /* restart channels */ );
13478 LOGD("X");
13479 return rc;
13480}
13481
13482/*===========================================================================
13483 * FUNCTION : close_camera_device
13484 *
13485 * DESCRIPTION:
13486 *
13487 * PARAMETERS :
13488 *
13489 *
13490 * RETURN :
13491 *==========================================================================*/
13492int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13493{
13494 int ret = NO_ERROR;
13495 QCamera3HardwareInterface *hw =
13496 reinterpret_cast<QCamera3HardwareInterface *>(
13497 reinterpret_cast<camera3_device_t *>(device)->priv);
13498 if (!hw) {
13499 LOGE("NULL camera device");
13500 return BAD_VALUE;
13501 }
13502
13503 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13504 delete hw;
13505 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013506 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013507 return ret;
13508}
13509
13510/*===========================================================================
13511 * FUNCTION : getWaveletDenoiseProcessPlate
13512 *
13513 * DESCRIPTION: query wavelet denoise process plate
13514 *
13515 * PARAMETERS : None
13516 *
13517 * RETURN : WNR prcocess plate value
13518 *==========================================================================*/
13519cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13520{
13521 char prop[PROPERTY_VALUE_MAX];
13522 memset(prop, 0, sizeof(prop));
13523 property_get("persist.denoise.process.plates", prop, "0");
13524 int processPlate = atoi(prop);
13525 switch(processPlate) {
13526 case 0:
13527 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13528 case 1:
13529 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13530 case 2:
13531 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13532 case 3:
13533 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13534 default:
13535 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13536 }
13537}
13538
13539
13540/*===========================================================================
13541 * FUNCTION : getTemporalDenoiseProcessPlate
13542 *
13543 * DESCRIPTION: query temporal denoise process plate
13544 *
13545 * PARAMETERS : None
13546 *
13547 * RETURN : TNR prcocess plate value
13548 *==========================================================================*/
13549cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13550{
13551 char prop[PROPERTY_VALUE_MAX];
13552 memset(prop, 0, sizeof(prop));
13553 property_get("persist.tnr.process.plates", prop, "0");
13554 int processPlate = atoi(prop);
13555 switch(processPlate) {
13556 case 0:
13557 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13558 case 1:
13559 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13560 case 2:
13561 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13562 case 3:
13563 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13564 default:
13565 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13566 }
13567}
13568
13569
13570/*===========================================================================
13571 * FUNCTION : extractSceneMode
13572 *
13573 * DESCRIPTION: Extract scene mode from frameworks set metadata
13574 *
13575 * PARAMETERS :
13576 * @frame_settings: CameraMetadata reference
13577 * @metaMode: ANDROID_CONTORL_MODE
13578 * @hal_metadata: hal metadata structure
13579 *
13580 * RETURN : None
13581 *==========================================================================*/
13582int32_t QCamera3HardwareInterface::extractSceneMode(
13583 const CameraMetadata &frame_settings, uint8_t metaMode,
13584 metadata_buffer_t *hal_metadata)
13585{
13586 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013587 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13588
13589 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13590 LOGD("Ignoring control mode OFF_KEEP_STATE");
13591 return NO_ERROR;
13592 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013593
13594 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13595 camera_metadata_ro_entry entry =
13596 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13597 if (0 == entry.count)
13598 return rc;
13599
13600 uint8_t fwk_sceneMode = entry.data.u8[0];
13601
13602 int val = lookupHalName(SCENE_MODES_MAP,
13603 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13604 fwk_sceneMode);
13605 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013606 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013607 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013608 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013609 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013610
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013611 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13612 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13613 }
13614
13615 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13616 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013617 cam_hdr_param_t hdr_params;
13618 hdr_params.hdr_enable = 1;
13619 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13620 hdr_params.hdr_need_1x = false;
13621 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13622 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13623 rc = BAD_VALUE;
13624 }
13625 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013626
Thierry Strudel3d639192016-09-09 11:52:26 -070013627 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13628 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13629 rc = BAD_VALUE;
13630 }
13631 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013632
13633 if (mForceHdrSnapshot) {
13634 cam_hdr_param_t hdr_params;
13635 hdr_params.hdr_enable = 1;
13636 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13637 hdr_params.hdr_need_1x = false;
13638 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13639 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13640 rc = BAD_VALUE;
13641 }
13642 }
13643
Thierry Strudel3d639192016-09-09 11:52:26 -070013644 return rc;
13645}
13646
13647/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013648 * FUNCTION : setVideoHdrMode
13649 *
13650 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13651 *
13652 * PARAMETERS :
13653 * @hal_metadata: hal metadata structure
13654 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13655 *
13656 * RETURN : None
13657 *==========================================================================*/
13658int32_t QCamera3HardwareInterface::setVideoHdrMode(
13659 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13660{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013661 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13662 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13663 }
13664
13665 LOGE("Invalid Video HDR mode %d!", vhdr);
13666 return BAD_VALUE;
13667}
13668
13669/*===========================================================================
13670 * FUNCTION : setSensorHDR
13671 *
13672 * DESCRIPTION: Enable/disable sensor HDR.
13673 *
13674 * PARAMETERS :
13675 * @hal_metadata: hal metadata structure
13676 * @enable: boolean whether to enable/disable sensor HDR
13677 *
13678 * RETURN : None
13679 *==========================================================================*/
13680int32_t QCamera3HardwareInterface::setSensorHDR(
13681 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13682{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013683 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013684 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13685
13686 if (enable) {
13687 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13688 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13689 #ifdef _LE_CAMERA_
13690 //Default to staggered HDR for IOT
13691 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13692 #else
13693 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13694 #endif
13695 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13696 }
13697
13698 bool isSupported = false;
13699 switch (sensor_hdr) {
13700 case CAM_SENSOR_HDR_IN_SENSOR:
13701 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13702 CAM_QCOM_FEATURE_SENSOR_HDR) {
13703 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013704 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013705 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013706 break;
13707 case CAM_SENSOR_HDR_ZIGZAG:
13708 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13709 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13710 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013711 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013712 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013713 break;
13714 case CAM_SENSOR_HDR_STAGGERED:
13715 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13716 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13717 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013718 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013719 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013720 break;
13721 case CAM_SENSOR_HDR_OFF:
13722 isSupported = true;
13723 LOGD("Turning off sensor HDR");
13724 break;
13725 default:
13726 LOGE("HDR mode %d not supported", sensor_hdr);
13727 rc = BAD_VALUE;
13728 break;
13729 }
13730
13731 if(isSupported) {
13732 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13733 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13734 rc = BAD_VALUE;
13735 } else {
13736 if(!isVideoHdrEnable)
13737 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013738 }
13739 }
13740 return rc;
13741}
13742
13743/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013744 * FUNCTION : needRotationReprocess
13745 *
13746 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13747 *
13748 * PARAMETERS : none
13749 *
13750 * RETURN : true: needed
13751 * false: no need
13752 *==========================================================================*/
13753bool QCamera3HardwareInterface::needRotationReprocess()
13754{
13755 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13756 // current rotation is not zero, and pp has the capability to process rotation
13757 LOGH("need do reprocess for rotation");
13758 return true;
13759 }
13760
13761 return false;
13762}
13763
13764/*===========================================================================
13765 * FUNCTION : needReprocess
13766 *
13767 * DESCRIPTION: if reprocess in needed
13768 *
13769 * PARAMETERS : none
13770 *
13771 * RETURN : true: needed
13772 * false: no need
13773 *==========================================================================*/
13774bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13775{
13776 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13777 // TODO: add for ZSL HDR later
13778 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13779 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13780 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13781 return true;
13782 } else {
13783 LOGH("already post processed frame");
13784 return false;
13785 }
13786 }
13787 return needRotationReprocess();
13788}
13789
13790/*===========================================================================
13791 * FUNCTION : needJpegExifRotation
13792 *
13793 * DESCRIPTION: if rotation from jpeg is needed
13794 *
13795 * PARAMETERS : none
13796 *
13797 * RETURN : true: needed
13798 * false: no need
13799 *==========================================================================*/
13800bool QCamera3HardwareInterface::needJpegExifRotation()
13801{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013802 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013803 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13804 LOGD("Need use Jpeg EXIF Rotation");
13805 return true;
13806 }
13807 return false;
13808}
13809
13810/*===========================================================================
13811 * FUNCTION : addOfflineReprocChannel
13812 *
13813 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13814 * coming from input channel
13815 *
13816 * PARAMETERS :
13817 * @config : reprocess configuration
13818 * @inputChHandle : pointer to the input (source) channel
13819 *
13820 *
13821 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13822 *==========================================================================*/
13823QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13824 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13825{
13826 int32_t rc = NO_ERROR;
13827 QCamera3ReprocessChannel *pChannel = NULL;
13828
13829 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013830 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13831 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013832 if (NULL == pChannel) {
13833 LOGE("no mem for reprocess channel");
13834 return NULL;
13835 }
13836
13837 rc = pChannel->initialize(IS_TYPE_NONE);
13838 if (rc != NO_ERROR) {
13839 LOGE("init reprocess channel failed, ret = %d", rc);
13840 delete pChannel;
13841 return NULL;
13842 }
13843
13844 // pp feature config
13845 cam_pp_feature_config_t pp_config;
13846 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13847
13848 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13849 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13850 & CAM_QCOM_FEATURE_DSDN) {
13851 //Use CPP CDS incase h/w supports it.
13852 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13853 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13854 }
13855 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13856 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13857 }
13858
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013859 if (config.hdr_param.hdr_enable) {
13860 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13861 pp_config.hdr_param = config.hdr_param;
13862 }
13863
13864 if (mForceHdrSnapshot) {
13865 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13866 pp_config.hdr_param.hdr_enable = 1;
13867 pp_config.hdr_param.hdr_need_1x = 0;
13868 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13869 }
13870
Thierry Strudel3d639192016-09-09 11:52:26 -070013871 rc = pChannel->addReprocStreamsFromSource(pp_config,
13872 config,
13873 IS_TYPE_NONE,
13874 mMetadataChannel);
13875
13876 if (rc != NO_ERROR) {
13877 delete pChannel;
13878 return NULL;
13879 }
13880 return pChannel;
13881}
13882
13883/*===========================================================================
13884 * FUNCTION : getMobicatMask
13885 *
13886 * DESCRIPTION: returns mobicat mask
13887 *
13888 * PARAMETERS : none
13889 *
13890 * RETURN : mobicat mask
13891 *
13892 *==========================================================================*/
13893uint8_t QCamera3HardwareInterface::getMobicatMask()
13894{
13895 return m_MobicatMask;
13896}
13897
13898/*===========================================================================
13899 * FUNCTION : setMobicat
13900 *
13901 * DESCRIPTION: set Mobicat on/off.
13902 *
13903 * PARAMETERS :
13904 * @params : none
13905 *
13906 * RETURN : int32_t type of status
13907 * NO_ERROR -- success
13908 * none-zero failure code
13909 *==========================================================================*/
13910int32_t QCamera3HardwareInterface::setMobicat()
13911{
Thierry Strudel3d639192016-09-09 11:52:26 -070013912 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013913
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013914 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013915 tune_cmd_t tune_cmd;
13916 tune_cmd.type = SET_RELOAD_CHROMATIX;
13917 tune_cmd.module = MODULE_ALL;
13918 tune_cmd.value = TRUE;
13919 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13920 CAM_INTF_PARM_SET_VFE_COMMAND,
13921 tune_cmd);
13922
13923 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13924 CAM_INTF_PARM_SET_PP_COMMAND,
13925 tune_cmd);
13926 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013927
13928 return ret;
13929}
13930
13931/*===========================================================================
13932* FUNCTION : getLogLevel
13933*
13934* DESCRIPTION: Reads the log level property into a variable
13935*
13936* PARAMETERS :
13937* None
13938*
13939* RETURN :
13940* None
13941*==========================================================================*/
13942void QCamera3HardwareInterface::getLogLevel()
13943{
13944 char prop[PROPERTY_VALUE_MAX];
13945 uint32_t globalLogLevel = 0;
13946
13947 property_get("persist.camera.hal.debug", prop, "0");
13948 int val = atoi(prop);
13949 if (0 <= val) {
13950 gCamHal3LogLevel = (uint32_t)val;
13951 }
13952
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013953 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013954 gKpiDebugLevel = atoi(prop);
13955
13956 property_get("persist.camera.global.debug", prop, "0");
13957 val = atoi(prop);
13958 if (0 <= val) {
13959 globalLogLevel = (uint32_t)val;
13960 }
13961
13962 /* Highest log level among hal.logs and global.logs is selected */
13963 if (gCamHal3LogLevel < globalLogLevel)
13964 gCamHal3LogLevel = globalLogLevel;
13965
13966 return;
13967}
13968
13969/*===========================================================================
13970 * FUNCTION : validateStreamRotations
13971 *
13972 * DESCRIPTION: Check if the rotations requested are supported
13973 *
13974 * PARAMETERS :
13975 * @stream_list : streams to be configured
13976 *
13977 * RETURN : NO_ERROR on success
13978 * -EINVAL on failure
13979 *
13980 *==========================================================================*/
13981int QCamera3HardwareInterface::validateStreamRotations(
13982 camera3_stream_configuration_t *streamList)
13983{
13984 int rc = NO_ERROR;
13985
13986 /*
13987 * Loop through all streams requested in configuration
13988 * Check if unsupported rotations have been requested on any of them
13989 */
13990 for (size_t j = 0; j < streamList->num_streams; j++){
13991 camera3_stream_t *newStream = streamList->streams[j];
13992
Emilian Peev35ceeed2017-06-29 11:58:56 -070013993 switch(newStream->rotation) {
13994 case CAMERA3_STREAM_ROTATION_0:
13995 case CAMERA3_STREAM_ROTATION_90:
13996 case CAMERA3_STREAM_ROTATION_180:
13997 case CAMERA3_STREAM_ROTATION_270:
13998 //Expected values
13999 break;
14000 default:
14001 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
14002 "type:%d and stream format:%d", __func__,
14003 newStream->rotation, newStream->stream_type,
14004 newStream->format);
14005 return -EINVAL;
14006 }
14007
Thierry Strudel3d639192016-09-09 11:52:26 -070014008 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
14009 bool isImplDef = (newStream->format ==
14010 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
14011 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
14012 isImplDef);
14013
14014 if (isRotated && (!isImplDef || isZsl)) {
14015 LOGE("Error: Unsupported rotation of %d requested for stream"
14016 "type:%d and stream format:%d",
14017 newStream->rotation, newStream->stream_type,
14018 newStream->format);
14019 rc = -EINVAL;
14020 break;
14021 }
14022 }
14023
14024 return rc;
14025}
14026
14027/*===========================================================================
14028* FUNCTION : getFlashInfo
14029*
14030* DESCRIPTION: Retrieve information about whether the device has a flash.
14031*
14032* PARAMETERS :
14033* @cameraId : Camera id to query
14034* @hasFlash : Boolean indicating whether there is a flash device
14035* associated with given camera
14036* @flashNode : If a flash device exists, this will be its device node.
14037*
14038* RETURN :
14039* None
14040*==========================================================================*/
14041void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
14042 bool& hasFlash,
14043 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
14044{
14045 cam_capability_t* camCapability = gCamCapability[cameraId];
14046 if (NULL == camCapability) {
14047 hasFlash = false;
14048 flashNode[0] = '\0';
14049 } else {
14050 hasFlash = camCapability->flash_available;
14051 strlcpy(flashNode,
14052 (char*)camCapability->flash_dev_name,
14053 QCAMERA_MAX_FILEPATH_LENGTH);
14054 }
14055}
14056
14057/*===========================================================================
14058* FUNCTION : getEepromVersionInfo
14059*
14060* DESCRIPTION: Retrieve version info of the sensor EEPROM data
14061*
14062* PARAMETERS : None
14063*
14064* RETURN : string describing EEPROM version
14065* "\0" if no such info available
14066*==========================================================================*/
14067const char *QCamera3HardwareInterface::getEepromVersionInfo()
14068{
14069 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14070}
14071
14072/*===========================================================================
14073* FUNCTION : getLdafCalib
14074*
14075* DESCRIPTION: Retrieve Laser AF calibration data
14076*
14077* PARAMETERS : None
14078*
14079* RETURN : Two uint32_t describing laser AF calibration data
14080* NULL if none is available.
14081*==========================================================================*/
14082const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14083{
14084 if (mLdafCalibExist) {
14085 return &mLdafCalib[0];
14086 } else {
14087 return NULL;
14088 }
14089}
14090
14091/*===========================================================================
Arnd Geis082a4d72017-08-24 10:33:07 -070014092* FUNCTION : getEaselFwVersion
14093*
14094* DESCRIPTION: Retrieve Easel firmware version
14095*
14096* PARAMETERS : None
14097*
14098* RETURN : string describing Firmware version
Arnd Geis8cbfc182017-09-07 14:46:41 -070014099* "\0" if version is not up to date
Arnd Geis082a4d72017-08-24 10:33:07 -070014100*==========================================================================*/
14101const char *QCamera3HardwareInterface::getEaselFwVersion()
14102{
Arnd Geis8cbfc182017-09-07 14:46:41 -070014103 if (mEaselFwUpdated) {
14104 return (const char *)&mEaselFwVersion[0];
14105 } else {
14106 return NULL;
Arnd Geis082a4d72017-08-24 10:33:07 -070014107 }
Arnd Geis082a4d72017-08-24 10:33:07 -070014108}
14109
14110/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014111 * FUNCTION : dynamicUpdateMetaStreamInfo
14112 *
14113 * DESCRIPTION: This function:
14114 * (1) stops all the channels
14115 * (2) returns error on pending requests and buffers
14116 * (3) sends metastream_info in setparams
14117 * (4) starts all channels
14118 * This is useful when sensor has to be restarted to apply any
14119 * settings such as frame rate from a different sensor mode
14120 *
14121 * PARAMETERS : None
14122 *
14123 * RETURN : NO_ERROR on success
14124 * Error codes on failure
14125 *
14126 *==========================================================================*/
14127int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14128{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014129 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014130 int rc = NO_ERROR;
14131
14132 LOGD("E");
14133
14134 rc = stopAllChannels();
14135 if (rc < 0) {
14136 LOGE("stopAllChannels failed");
14137 return rc;
14138 }
14139
14140 rc = notifyErrorForPendingRequests();
14141 if (rc < 0) {
14142 LOGE("notifyErrorForPendingRequests failed");
14143 return rc;
14144 }
14145
14146 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14147 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14148 "Format:%d",
14149 mStreamConfigInfo.type[i],
14150 mStreamConfigInfo.stream_sizes[i].width,
14151 mStreamConfigInfo.stream_sizes[i].height,
14152 mStreamConfigInfo.postprocess_mask[i],
14153 mStreamConfigInfo.format[i]);
14154 }
14155
14156 /* Send meta stream info once again so that ISP can start */
14157 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14158 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14159 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14160 mParameters);
14161 if (rc < 0) {
14162 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14163 }
14164
14165 rc = startAllChannels();
14166 if (rc < 0) {
14167 LOGE("startAllChannels failed");
14168 return rc;
14169 }
14170
14171 LOGD("X");
14172 return rc;
14173}
14174
14175/*===========================================================================
14176 * FUNCTION : stopAllChannels
14177 *
14178 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14179 *
14180 * PARAMETERS : None
14181 *
14182 * RETURN : NO_ERROR on success
14183 * Error codes on failure
14184 *
14185 *==========================================================================*/
14186int32_t QCamera3HardwareInterface::stopAllChannels()
14187{
14188 int32_t rc = NO_ERROR;
14189
14190 LOGD("Stopping all channels");
14191 // Stop the Streams/Channels
14192 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14193 it != mStreamInfo.end(); it++) {
14194 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14195 if (channel) {
14196 channel->stop();
14197 }
14198 (*it)->status = INVALID;
14199 }
14200
14201 if (mSupportChannel) {
14202 mSupportChannel->stop();
14203 }
14204 if (mAnalysisChannel) {
14205 mAnalysisChannel->stop();
14206 }
14207 if (mRawDumpChannel) {
14208 mRawDumpChannel->stop();
14209 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014210 if (mHdrPlusRawSrcChannel) {
14211 mHdrPlusRawSrcChannel->stop();
14212 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014213 if (mMetadataChannel) {
14214 /* If content of mStreamInfo is not 0, there is metadata stream */
14215 mMetadataChannel->stop();
14216 }
14217
14218 LOGD("All channels stopped");
14219 return rc;
14220}
14221
14222/*===========================================================================
14223 * FUNCTION : startAllChannels
14224 *
14225 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14226 *
14227 * PARAMETERS : None
14228 *
14229 * RETURN : NO_ERROR on success
14230 * Error codes on failure
14231 *
14232 *==========================================================================*/
14233int32_t QCamera3HardwareInterface::startAllChannels()
14234{
14235 int32_t rc = NO_ERROR;
14236
14237 LOGD("Start all channels ");
14238 // Start the Streams/Channels
14239 if (mMetadataChannel) {
14240 /* If content of mStreamInfo is not 0, there is metadata stream */
14241 rc = mMetadataChannel->start();
14242 if (rc < 0) {
14243 LOGE("META channel start failed");
14244 return rc;
14245 }
14246 }
14247 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14248 it != mStreamInfo.end(); it++) {
14249 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14250 if (channel) {
14251 rc = channel->start();
14252 if (rc < 0) {
14253 LOGE("channel start failed");
14254 return rc;
14255 }
14256 }
14257 }
14258 if (mAnalysisChannel) {
14259 mAnalysisChannel->start();
14260 }
14261 if (mSupportChannel) {
14262 rc = mSupportChannel->start();
14263 if (rc < 0) {
14264 LOGE("Support channel start failed");
14265 return rc;
14266 }
14267 }
14268 if (mRawDumpChannel) {
14269 rc = mRawDumpChannel->start();
14270 if (rc < 0) {
14271 LOGE("RAW dump channel start failed");
14272 return rc;
14273 }
14274 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014275 if (mHdrPlusRawSrcChannel) {
14276 rc = mHdrPlusRawSrcChannel->start();
14277 if (rc < 0) {
14278 LOGE("HDR+ RAW channel start failed");
14279 return rc;
14280 }
14281 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014282
14283 LOGD("All channels started");
14284 return rc;
14285}
14286
14287/*===========================================================================
14288 * FUNCTION : notifyErrorForPendingRequests
14289 *
14290 * DESCRIPTION: This function sends error for all the pending requests/buffers
14291 *
14292 * PARAMETERS : None
14293 *
14294 * RETURN : Error codes
14295 * NO_ERROR on success
14296 *
14297 *==========================================================================*/
14298int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14299{
Emilian Peev7650c122017-01-19 08:24:33 -080014300 notifyErrorFoPendingDepthData(mDepthChannel);
14301
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014302 auto pendingRequest = mPendingRequestsList.begin();
14303 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014304
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014305 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14306 // buffers (for which buffers aren't sent yet).
14307 while (pendingRequest != mPendingRequestsList.end() ||
14308 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14309 if (pendingRequest == mPendingRequestsList.end() ||
14310 pendingBuffer->frame_number < pendingRequest->frame_number) {
14311 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14312 // with error.
14313 for (auto &info : pendingBuffer->mPendingBufferList) {
14314 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014315 camera3_notify_msg_t notify_msg;
14316 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14317 notify_msg.type = CAMERA3_MSG_ERROR;
14318 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014319 notify_msg.message.error.error_stream = info.stream;
14320 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014321 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014322
14323 camera3_stream_buffer_t buffer = {};
14324 buffer.acquire_fence = -1;
14325 buffer.release_fence = -1;
14326 buffer.buffer = info.buffer;
14327 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14328 buffer.stream = info.stream;
14329 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014330 }
14331
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014332 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14333 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14334 pendingBuffer->frame_number > pendingRequest->frame_number) {
14335 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014336 camera3_notify_msg_t notify_msg;
14337 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14338 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014339 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14340 notify_msg.message.error.error_stream = nullptr;
14341 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014342 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014343
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014344 if (pendingRequest->input_buffer != nullptr) {
14345 camera3_capture_result result = {};
14346 result.frame_number = pendingRequest->frame_number;
14347 result.result = nullptr;
14348 result.input_buffer = pendingRequest->input_buffer;
14349 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014350 }
14351
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014352 mShutterDispatcher.clear(pendingRequest->frame_number);
14353 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14354 } else {
14355 // If both buffers and result metadata weren't sent yet, notify about a request error
14356 // and return buffers with error.
14357 for (auto &info : pendingBuffer->mPendingBufferList) {
14358 camera3_notify_msg_t notify_msg;
14359 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14360 notify_msg.type = CAMERA3_MSG_ERROR;
14361 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14362 notify_msg.message.error.error_stream = info.stream;
14363 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14364 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014365
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014366 camera3_stream_buffer_t buffer = {};
14367 buffer.acquire_fence = -1;
14368 buffer.release_fence = -1;
14369 buffer.buffer = info.buffer;
14370 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14371 buffer.stream = info.stream;
14372 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14373 }
14374
14375 if (pendingRequest->input_buffer != nullptr) {
14376 camera3_capture_result result = {};
14377 result.frame_number = pendingRequest->frame_number;
14378 result.result = nullptr;
14379 result.input_buffer = pendingRequest->input_buffer;
14380 orchestrateResult(&result);
14381 }
14382
14383 mShutterDispatcher.clear(pendingRequest->frame_number);
14384 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14385 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014386 }
14387 }
14388
14389 /* Reset pending frame Drop list and requests list */
14390 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014391 mShutterDispatcher.clear();
14392 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014393 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014394 mExpectedFrameDuration = 0;
14395 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014396 LOGH("Cleared all the pending buffers ");
14397
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014398 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014399}
14400
14401bool QCamera3HardwareInterface::isOnEncoder(
14402 const cam_dimension_t max_viewfinder_size,
14403 uint32_t width, uint32_t height)
14404{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014405 return ((width > (uint32_t)max_viewfinder_size.width) ||
14406 (height > (uint32_t)max_viewfinder_size.height) ||
14407 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14408 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014409}
14410
14411/*===========================================================================
14412 * FUNCTION : setBundleInfo
14413 *
14414 * DESCRIPTION: Set bundle info for all streams that are bundle.
14415 *
14416 * PARAMETERS : None
14417 *
14418 * RETURN : NO_ERROR on success
14419 * Error codes on failure
14420 *==========================================================================*/
14421int32_t QCamera3HardwareInterface::setBundleInfo()
14422{
14423 int32_t rc = NO_ERROR;
14424
14425 if (mChannelHandle) {
14426 cam_bundle_config_t bundleInfo;
14427 memset(&bundleInfo, 0, sizeof(bundleInfo));
14428 rc = mCameraHandle->ops->get_bundle_info(
14429 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14430 if (rc != NO_ERROR) {
14431 LOGE("get_bundle_info failed");
14432 return rc;
14433 }
14434 if (mAnalysisChannel) {
14435 mAnalysisChannel->setBundleInfo(bundleInfo);
14436 }
14437 if (mSupportChannel) {
14438 mSupportChannel->setBundleInfo(bundleInfo);
14439 }
14440 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14441 it != mStreamInfo.end(); it++) {
14442 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14443 channel->setBundleInfo(bundleInfo);
14444 }
14445 if (mRawDumpChannel) {
14446 mRawDumpChannel->setBundleInfo(bundleInfo);
14447 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014448 if (mHdrPlusRawSrcChannel) {
14449 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14450 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014451 }
14452
14453 return rc;
14454}
14455
14456/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014457 * FUNCTION : setInstantAEC
14458 *
14459 * DESCRIPTION: Set Instant AEC related params.
14460 *
14461 * PARAMETERS :
14462 * @meta: CameraMetadata reference
14463 *
14464 * RETURN : NO_ERROR on success
14465 * Error codes on failure
14466 *==========================================================================*/
14467int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14468{
14469 int32_t rc = NO_ERROR;
14470 uint8_t val = 0;
14471 char prop[PROPERTY_VALUE_MAX];
14472
14473 // First try to configure instant AEC from framework metadata
14474 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14475 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14476 }
14477
14478 // If framework did not set this value, try to read from set prop.
14479 if (val == 0) {
14480 memset(prop, 0, sizeof(prop));
14481 property_get("persist.camera.instant.aec", prop, "0");
14482 val = (uint8_t)atoi(prop);
14483 }
14484
14485 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14486 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14487 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14488 mInstantAEC = val;
14489 mInstantAECSettledFrameNumber = 0;
14490 mInstantAecFrameIdxCount = 0;
14491 LOGH("instantAEC value set %d",val);
14492 if (mInstantAEC) {
14493 memset(prop, 0, sizeof(prop));
14494 property_get("persist.camera.ae.instant.bound", prop, "10");
14495 int32_t aec_frame_skip_cnt = atoi(prop);
14496 if (aec_frame_skip_cnt >= 0) {
14497 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14498 } else {
14499 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14500 rc = BAD_VALUE;
14501 }
14502 }
14503 } else {
14504 LOGE("Bad instant aec value set %d", val);
14505 rc = BAD_VALUE;
14506 }
14507 return rc;
14508}
14509
14510/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014511 * FUNCTION : get_num_overall_buffers
14512 *
14513 * DESCRIPTION: Estimate number of pending buffers across all requests.
14514 *
14515 * PARAMETERS : None
14516 *
14517 * RETURN : Number of overall pending buffers
14518 *
14519 *==========================================================================*/
14520uint32_t PendingBuffersMap::get_num_overall_buffers()
14521{
14522 uint32_t sum_buffers = 0;
14523 for (auto &req : mPendingBuffersInRequest) {
14524 sum_buffers += req.mPendingBufferList.size();
14525 }
14526 return sum_buffers;
14527}
14528
14529/*===========================================================================
14530 * FUNCTION : removeBuf
14531 *
14532 * DESCRIPTION: Remove a matching buffer from tracker.
14533 *
14534 * PARAMETERS : @buffer: image buffer for the callback
14535 *
14536 * RETURN : None
14537 *
14538 *==========================================================================*/
14539void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14540{
14541 bool buffer_found = false;
14542 for (auto req = mPendingBuffersInRequest.begin();
14543 req != mPendingBuffersInRequest.end(); req++) {
14544 for (auto k = req->mPendingBufferList.begin();
14545 k != req->mPendingBufferList.end(); k++ ) {
14546 if (k->buffer == buffer) {
14547 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14548 req->frame_number, buffer);
14549 k = req->mPendingBufferList.erase(k);
14550 if (req->mPendingBufferList.empty()) {
14551 // Remove this request from Map
14552 req = mPendingBuffersInRequest.erase(req);
14553 }
14554 buffer_found = true;
14555 break;
14556 }
14557 }
14558 if (buffer_found) {
14559 break;
14560 }
14561 }
14562 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14563 get_num_overall_buffers());
14564}
14565
14566/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014567 * FUNCTION : getBufErrStatus
14568 *
14569 * DESCRIPTION: get buffer error status
14570 *
14571 * PARAMETERS : @buffer: buffer handle
14572 *
14573 * RETURN : Error status
14574 *
14575 *==========================================================================*/
14576int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14577{
14578 for (auto& req : mPendingBuffersInRequest) {
14579 for (auto& k : req.mPendingBufferList) {
14580 if (k.buffer == buffer)
14581 return k.bufStatus;
14582 }
14583 }
14584 return CAMERA3_BUFFER_STATUS_OK;
14585}
14586
14587/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014588 * FUNCTION : setPAAFSupport
14589 *
14590 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14591 * feature mask according to stream type and filter
14592 * arrangement
14593 *
14594 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14595 * @stream_type: stream type
14596 * @filter_arrangement: filter arrangement
14597 *
14598 * RETURN : None
14599 *==========================================================================*/
14600void QCamera3HardwareInterface::setPAAFSupport(
14601 cam_feature_mask_t& feature_mask,
14602 cam_stream_type_t stream_type,
14603 cam_color_filter_arrangement_t filter_arrangement)
14604{
Thierry Strudel3d639192016-09-09 11:52:26 -070014605 switch (filter_arrangement) {
14606 case CAM_FILTER_ARRANGEMENT_RGGB:
14607 case CAM_FILTER_ARRANGEMENT_GRBG:
14608 case CAM_FILTER_ARRANGEMENT_GBRG:
14609 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014610 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14611 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014612 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014613 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14614 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014615 }
14616 break;
14617 case CAM_FILTER_ARRANGEMENT_Y:
14618 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14619 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14620 }
14621 break;
14622 default:
14623 break;
14624 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014625 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14626 feature_mask, stream_type, filter_arrangement);
14627
14628
Thierry Strudel3d639192016-09-09 11:52:26 -070014629}
14630
14631/*===========================================================================
14632* FUNCTION : getSensorMountAngle
14633*
14634* DESCRIPTION: Retrieve sensor mount angle
14635*
14636* PARAMETERS : None
14637*
14638* RETURN : sensor mount angle in uint32_t
14639*==========================================================================*/
14640uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14641{
14642 return gCamCapability[mCameraId]->sensor_mount_angle;
14643}
14644
14645/*===========================================================================
14646* FUNCTION : getRelatedCalibrationData
14647*
14648* DESCRIPTION: Retrieve related system calibration data
14649*
14650* PARAMETERS : None
14651*
14652* RETURN : Pointer of related system calibration data
14653*==========================================================================*/
14654const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14655{
14656 return (const cam_related_system_calibration_data_t *)
14657 &(gCamCapability[mCameraId]->related_cam_calibration);
14658}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014659
14660/*===========================================================================
14661 * FUNCTION : is60HzZone
14662 *
14663 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14664 *
14665 * PARAMETERS : None
14666 *
14667 * RETURN : True if in 60Hz zone, False otherwise
14668 *==========================================================================*/
14669bool QCamera3HardwareInterface::is60HzZone()
14670{
14671 time_t t = time(NULL);
14672 struct tm lt;
14673
14674 struct tm* r = localtime_r(&t, &lt);
14675
14676 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14677 return true;
14678 else
14679 return false;
14680}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014681
14682/*===========================================================================
14683 * FUNCTION : adjustBlackLevelForCFA
14684 *
14685 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14686 * of bayer CFA (Color Filter Array).
14687 *
14688 * PARAMETERS : @input: black level pattern in the order of RGGB
14689 * @output: black level pattern in the order of CFA
14690 * @color_arrangement: CFA color arrangement
14691 *
14692 * RETURN : None
14693 *==========================================================================*/
14694template<typename T>
14695void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14696 T input[BLACK_LEVEL_PATTERN_CNT],
14697 T output[BLACK_LEVEL_PATTERN_CNT],
14698 cam_color_filter_arrangement_t color_arrangement)
14699{
14700 switch (color_arrangement) {
14701 case CAM_FILTER_ARRANGEMENT_GRBG:
14702 output[0] = input[1];
14703 output[1] = input[0];
14704 output[2] = input[3];
14705 output[3] = input[2];
14706 break;
14707 case CAM_FILTER_ARRANGEMENT_GBRG:
14708 output[0] = input[2];
14709 output[1] = input[3];
14710 output[2] = input[0];
14711 output[3] = input[1];
14712 break;
14713 case CAM_FILTER_ARRANGEMENT_BGGR:
14714 output[0] = input[3];
14715 output[1] = input[2];
14716 output[2] = input[1];
14717 output[3] = input[0];
14718 break;
14719 case CAM_FILTER_ARRANGEMENT_RGGB:
14720 output[0] = input[0];
14721 output[1] = input[1];
14722 output[2] = input[2];
14723 output[3] = input[3];
14724 break;
14725 default:
14726 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14727 break;
14728 }
14729}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014730
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014731void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14732 CameraMetadata &resultMetadata,
14733 std::shared_ptr<metadata_buffer_t> settings)
14734{
14735 if (settings == nullptr) {
14736 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14737 return;
14738 }
14739
14740 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14741 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14742 }
14743
14744 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14745 String8 str((const char *)gps_methods);
14746 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14747 }
14748
14749 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14750 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14751 }
14752
14753 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14754 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14755 }
14756
14757 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14758 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14759 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14760 }
14761
14762 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14763 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14764 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14765 }
14766
14767 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14768 int32_t fwk_thumb_size[2];
14769 fwk_thumb_size[0] = thumb_size->width;
14770 fwk_thumb_size[1] = thumb_size->height;
14771 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14772 }
14773
14774 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14775 uint8_t fwk_intent = intent[0];
14776 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14777 }
14778}
14779
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014780bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14781 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chenec328c82017-08-30 16:41:08 -070014782 if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
14783 metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
14784 ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
14785 return false;
14786 }
14787
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014788 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14789 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14790 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014791 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014792 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014793 return false;
14794 }
14795
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014796 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014797 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14798 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014799 return false;
14800 }
14801
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014802 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14803 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14804 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14805 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14806 return false;
14807 }
14808
14809 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14810 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14811 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14812 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14813 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
14814 return false;
14815 }
14816
14817 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
14818 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
14819 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
14820 return false;
14821 }
14822
14823 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
14824 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
14825 ANDROID_CONTROL_EFFECT_MODE_OFF) {
14826 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
14827 return false;
14828 }
14829
14830 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
14831 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
14832 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
14833 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
14834 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
14835 return false;
14836 }
14837
14838 // TODO (b/32585046): support non-ZSL.
14839 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
14840 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
14841 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
14842 return false;
14843 }
14844
14845 // TODO (b/32586081): support flash.
14846 if (!metadata.exists(ANDROID_FLASH_MODE) ||
14847 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
14848 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
14849 return false;
14850 }
14851
14852 // TODO (b/36492953): support digital zoom.
14853 if (!metadata.exists(ANDROID_SCALER_CROP_REGION) ||
14854 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[0] != 0 ||
14855 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[1] != 0 ||
14856 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[2] !=
14857 gCamCapability[mCameraId]->active_array_size.width ||
14858 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[3] !=
14859 gCamCapability[mCameraId]->active_array_size.height) {
14860 ALOGV("%s: ANDROID_SCALER_CROP_REGION is not the same as active array region.",
14861 __FUNCTION__);
14862 return false;
14863 }
14864
14865 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
14866 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
14867 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
14868 return false;
14869 }
14870
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014871
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014872 // TODO (b/36693254, b/36690506): support other outputs.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014873 if (!gEnableMultipleHdrplusOutputs && request.num_output_buffers != 1) {
14874 ALOGV("%s: Only support 1 output: %d", __FUNCTION__, request.num_output_buffers);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014875 return false;
14876 }
14877
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014878 switch (request.output_buffers[0].stream->format) {
14879 case HAL_PIXEL_FORMAT_BLOB:
14880 break;
14881 case HAL_PIXEL_FORMAT_YCbCr_420_888:
14882 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
14883 // TODO (b/36693254): Only support full size.
14884 if (!gEnableMultipleHdrplusOutputs) {
14885 if (static_cast<int>(request.output_buffers[0].stream->width) !=
14886 gCamCapability[mCameraId]->picture_sizes_tbl[0].width ||
14887 static_cast<int>(request.output_buffers[0].stream->height) !=
14888 gCamCapability[mCameraId]->picture_sizes_tbl[0].height) {
14889 ALOGV("%s: Only full size is supported.", __FUNCTION__);
14890 return false;
14891 }
14892 }
14893 break;
14894 default:
14895 ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
14896 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14897 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14898 request.output_buffers[0].stream->width,
14899 request.output_buffers[0].stream->height,
14900 request.output_buffers[0].stream->format);
14901 }
14902 return false;
14903 }
14904
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014905 return true;
14906}
14907
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014908void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
14909 if (hdrPlusRequest == nullptr) return;
14910
14911 for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
14912 // Find the stream for this buffer.
14913 for (auto streamInfo : mStreamInfo) {
14914 if (streamInfo->id == outputBufferIter.first) {
14915 if (streamInfo->channel == mPictureChannel) {
14916 // For picture channel, this buffer is internally allocated so return this
14917 // buffer to picture channel.
14918 mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
14919 } else {
14920 // Unregister this buffer for other channels.
14921 streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
14922 }
14923 break;
14924 }
14925 }
14926 }
14927
14928 hdrPlusRequest->outputBuffers.clear();
14929 hdrPlusRequest->frameworkOutputBuffers.clear();
14930}
14931
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014932bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14933 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14934 const CameraMetadata &metadata)
14935{
14936 if (hdrPlusRequest == nullptr) return false;
14937 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
14938
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014939 status_t res = OK;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014940 pbcamera::CaptureRequest pbRequest;
14941 pbRequest.id = request.frame_number;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014942 // Iterate through all requested output buffers and add them to an HDR+ request.
14943 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14944 // Find the index of the stream in mStreamInfo.
14945 uint32_t pbStreamId = 0;
14946 bool found = false;
14947 for (auto streamInfo : mStreamInfo) {
14948 if (streamInfo->stream == request.output_buffers[i].stream) {
14949 pbStreamId = streamInfo->id;
14950 found = true;
14951 break;
14952 }
14953 }
14954
14955 if (!found) {
14956 ALOGE("%s: requested stream was not configured.", __FUNCTION__);
14957 abortPendingHdrplusRequest(hdrPlusRequest);
14958 return false;
14959 }
14960 auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
14961 switch (request.output_buffers[i].stream->format) {
14962 case HAL_PIXEL_FORMAT_BLOB:
14963 {
14964 // For jpeg output, get a YUV buffer from pic channel.
14965 QCamera3PicChannel *picChannel =
14966 (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
14967 res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
14968 if (res != OK) {
14969 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14970 __FUNCTION__, strerror(-res), res);
14971 abortPendingHdrplusRequest(hdrPlusRequest);
14972 return false;
14973 }
14974 break;
14975 }
14976 case HAL_PIXEL_FORMAT_YCbCr_420_888:
14977 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
14978 {
14979 // For YUV output, register the buffer and get the buffer def from the channel.
14980 QCamera3ProcessingChannel *channel =
14981 (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
14982 res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
14983 outBuffer.get());
14984 if (res != OK) {
14985 ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
14986 strerror(-res), res);
14987 abortPendingHdrplusRequest(hdrPlusRequest);
14988 return false;
14989 }
14990 break;
14991 }
14992 default:
14993 abortPendingHdrplusRequest(hdrPlusRequest);
14994 return false;
14995 }
14996
14997 pbcamera::StreamBuffer buffer;
14998 buffer.streamId = pbStreamId;
14999 buffer.dmaBufFd = outBuffer->fd;
15000 buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
15001 buffer.dataSize = outBuffer->frame_len;
15002
15003 pbRequest.outputBuffers.push_back(buffer);
15004
15005 hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
15006 hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
15007 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015008
15009 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070015010 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015011 if (res != OK) {
15012 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
15013 strerror(-res), res);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015014 abortPendingHdrplusRequest(hdrPlusRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015015 return false;
15016 }
15017
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015018 return true;
15019}
15020
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015021status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
15022{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015023 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
15024 return OK;
15025 }
15026
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015027 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015028 if (res != OK) {
15029 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
15030 strerror(-res), res);
15031 return res;
15032 }
15033 gHdrPlusClientOpening = true;
15034
15035 return OK;
15036}
15037
Chien-Yu Chenee335912017-02-09 17:53:20 -080015038status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
15039{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015040 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015041
Chien-Yu Chena6c99062017-05-23 13:45:06 -070015042 if (mHdrPlusModeEnabled) {
15043 return OK;
15044 }
15045
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015046 // Check if gHdrPlusClient is opened or being opened.
15047 if (gHdrPlusClient == nullptr) {
15048 if (gHdrPlusClientOpening) {
15049 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
15050 return OK;
15051 }
15052
15053 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015054 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015055 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
15056 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015057 return res;
15058 }
15059
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015060 // When opening HDR+ client completes, HDR+ mode will be enabled.
15061 return OK;
15062
Chien-Yu Chenee335912017-02-09 17:53:20 -080015063 }
15064
15065 // Configure stream for HDR+.
15066 res = configureHdrPlusStreamsLocked();
15067 if (res != OK) {
15068 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015069 return res;
15070 }
15071
15072 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
15073 res = gHdrPlusClient->setZslHdrPlusMode(true);
15074 if (res != OK) {
15075 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080015076 return res;
15077 }
15078
15079 mHdrPlusModeEnabled = true;
15080 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
15081
15082 return OK;
15083}
15084
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015085void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
15086{
15087 if (gHdrPlusClientOpening) {
15088 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
15089 }
15090}
15091
Chien-Yu Chenee335912017-02-09 17:53:20 -080015092void QCamera3HardwareInterface::disableHdrPlusModeLocked()
15093{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015094 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015095 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015096 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
15097 if (res != OK) {
15098 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
15099 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015100
15101 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015102 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015103 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015104 }
15105
15106 mHdrPlusModeEnabled = false;
15107 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
15108}
15109
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015110bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
15111{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015112 // Check that at least one YUV or one JPEG output is configured.
15113 // TODO: Support RAW (b/36690506)
15114 for (auto streamInfo : mStreamInfo) {
15115 if (streamInfo != nullptr && streamInfo->stream != nullptr) {
15116 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
15117 (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
15118 streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15119 streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
15120 return true;
15121 }
15122 }
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015123 }
15124
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015125 return false;
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015126}
15127
Chien-Yu Chenee335912017-02-09 17:53:20 -080015128status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015129{
15130 pbcamera::InputConfiguration inputConfig;
15131 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
15132 status_t res = OK;
15133
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015134 // Sensor MIPI will send data to Easel.
15135 inputConfig.isSensorInput = true;
15136 inputConfig.sensorMode.cameraId = mCameraId;
15137 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
15138 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
15139 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
15140 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
15141 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
15142 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
15143 if (mSensorModeInfo.num_raw_bits != 10) {
15144 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
15145 mSensorModeInfo.num_raw_bits);
15146 return BAD_VALUE;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015147 }
15148
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015149 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015150
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015151 // Iterate through configured output streams in HAL and configure those streams in HDR+
15152 // service.
15153 for (auto streamInfo : mStreamInfo) {
15154 pbcamera::StreamConfiguration outputConfig;
15155 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
15156 switch (streamInfo->stream->format) {
15157 case HAL_PIXEL_FORMAT_BLOB:
15158 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15159 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15160 res = fillPbStreamConfig(&outputConfig, streamInfo->id,
15161 streamInfo->channel, /*stream index*/0);
15162 if (res != OK) {
15163 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
15164 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015165
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015166 return res;
15167 }
15168
15169 outputStreamConfigs.push_back(outputConfig);
15170 break;
15171 default:
15172 // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
15173 break;
15174 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015175 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015176 }
15177
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015178 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015179 if (res != OK) {
15180 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
15181 strerror(-res), res);
15182 return res;
15183 }
15184
15185 return OK;
15186}
15187
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015188void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15189{
15190 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15191 // Set HAL state to error.
15192 pthread_mutex_lock(&mMutex);
15193 mState = ERROR;
15194 pthread_mutex_unlock(&mMutex);
15195
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015196 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015197}
15198
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015199void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15200{
Arnd Geis8cbfc182017-09-07 14:46:41 -070015201 int rc = NO_ERROR;
15202
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015203 if (client == nullptr) {
15204 ALOGE("%s: Opened client is null.", __FUNCTION__);
15205 return;
15206 }
15207
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015208 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015209 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15210
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015211 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015212 if (!gHdrPlusClientOpening) {
15213 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15214 return;
15215 }
15216
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015217 gHdrPlusClient = std::move(client);
15218 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015219 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015220
15221 // Set static metadata.
15222 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15223 if (res != OK) {
15224 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15225 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015226 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015227 gHdrPlusClient = nullptr;
15228 return;
15229 }
15230
15231 // Enable HDR+ mode.
15232 res = enableHdrPlusModeLocked();
15233 if (res != OK) {
15234 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15235 }
Arnd Geis8cbfc182017-09-07 14:46:41 -070015236
15237 // Get Easel firmware version
15238 if (EaselManagerClientOpened) {
15239 rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
15240 if (rc != OK) {
15241 ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
15242 } else {
15243 mEaselFwUpdated = true;
15244 }
15245 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015246}
15247
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015248void QCamera3HardwareInterface::onOpenFailed(status_t err)
15249{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015250 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015251 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015252 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015253 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015254}
15255
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015256void QCamera3HardwareInterface::onFatalError()
15257{
15258 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
15259
15260 // Set HAL state to error.
15261 pthread_mutex_lock(&mMutex);
15262 mState = ERROR;
15263 pthread_mutex_unlock(&mMutex);
15264
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015265 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015266}
15267
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015268void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15269{
15270 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15271 __LINE__, requestId, apSensorTimestampNs);
15272
15273 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15274}
15275
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015276void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15277{
15278 pthread_mutex_lock(&mMutex);
15279
15280 // Find the pending request for this result metadata.
15281 auto requestIter = mPendingRequestsList.begin();
15282 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15283 requestIter++;
15284 }
15285
15286 if (requestIter == mPendingRequestsList.end()) {
15287 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15288 pthread_mutex_unlock(&mMutex);
15289 return;
15290 }
15291
15292 requestIter->partial_result_cnt++;
15293
15294 CameraMetadata metadata;
15295 uint8_t ready = true;
15296 metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15297
15298 // Send it to framework.
15299 camera3_capture_result_t result = {};
15300
15301 result.result = metadata.getAndLock();
15302 // Populate metadata result
15303 result.frame_number = requestId;
15304 result.num_output_buffers = 0;
15305 result.output_buffers = NULL;
15306 result.partial_result = requestIter->partial_result_cnt;
15307
15308 orchestrateResult(&result);
15309 metadata.unlock(result.result);
15310
15311 pthread_mutex_unlock(&mMutex);
15312}
15313
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070015314void QCamera3HardwareInterface::onPostview(uint32_t requestId,
15315 std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
15316 uint32_t stride, int32_t format)
15317{
15318 if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
15319 ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
15320 __LINE__, width, height, requestId);
15321 char buf[FILENAME_MAX] = {};
15322 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
15323 requestId, width, height);
15324
15325 pbcamera::StreamConfiguration config = {};
15326 config.image.width = width;
15327 config.image.height = height;
15328 config.image.format = format;
15329
15330 pbcamera::PlaneConfiguration plane = {};
15331 plane.stride = stride;
15332 plane.scanline = height;
15333
15334 config.image.planes.push_back(plane);
15335
15336 pbcamera::StreamBuffer buffer = {};
15337 buffer.streamId = 0;
15338 buffer.dmaBufFd = -1;
15339 buffer.data = postview->data();
15340 buffer.dataSize = postview->size();
15341
15342 hdrplus_client_utils::writePpm(buf, config, buffer);
15343 }
15344
15345 pthread_mutex_lock(&mMutex);
15346
15347 // Find the pending request for this result metadata.
15348 auto requestIter = mPendingRequestsList.begin();
15349 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15350 requestIter++;
15351 }
15352
15353 if (requestIter == mPendingRequestsList.end()) {
15354 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15355 pthread_mutex_unlock(&mMutex);
15356 return;
15357 }
15358
15359 requestIter->partial_result_cnt++;
15360
15361 CameraMetadata metadata;
15362 int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
15363 static_cast<int32_t>(stride)};
15364 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
15365 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
15366
15367 // Send it to framework.
15368 camera3_capture_result_t result = {};
15369
15370 result.result = metadata.getAndLock();
15371 // Populate metadata result
15372 result.frame_number = requestId;
15373 result.num_output_buffers = 0;
15374 result.output_buffers = NULL;
15375 result.partial_result = requestIter->partial_result_cnt;
15376
15377 orchestrateResult(&result);
15378 metadata.unlock(result.result);
15379
15380 pthread_mutex_unlock(&mMutex);
15381}
15382
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015383void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015384 const camera_metadata_t &resultMetadata)
15385{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015386 if (result == nullptr) {
15387 ALOGE("%s: result is nullptr.", __FUNCTION__);
15388 return;
15389 }
15390
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015391 // Find the pending HDR+ request.
15392 HdrPlusPendingRequest pendingRequest;
15393 {
15394 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15395 auto req = mHdrPlusPendingRequests.find(result->requestId);
15396 pendingRequest = req->second;
15397 }
15398
15399 // Update the result metadata with the settings of the HDR+ still capture request because
15400 // the result metadata belongs to a ZSL buffer.
15401 CameraMetadata metadata;
15402 metadata = &resultMetadata;
15403 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15404 camera_metadata_t* updatedResultMetadata = metadata.release();
15405
15406 uint32_t halSnapshotStreamId = 0;
15407 if (mPictureChannel != nullptr) {
15408 halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
15409 }
15410
15411 auto halMetadata = std::make_shared<metadata_buffer_t>();
15412 clear_metadata_buffer(halMetadata.get());
15413
15414 // Convert updated result metadata to HAL metadata.
15415 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15416 halSnapshotStreamId, /*minFrameDuration*/0);
15417 if (res != 0) {
15418 ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
15419 }
15420
15421 for (auto &outputBuffer : result->outputBuffers) {
15422 uint32_t streamId = outputBuffer.streamId;
15423
15424 // Find the framework output buffer in the pending request.
15425 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15426 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15427 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15428 streamId);
15429 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015430 }
15431
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015432 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15433
15434 // Find the channel for the output buffer.
15435 QCamera3ProcessingChannel *channel =
15436 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15437
15438 // Find the output buffer def.
15439 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15440 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15441 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15442 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015443 }
15444
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015445 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015446
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015447 // Check whether to dump the buffer.
15448 if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15449 frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
15450 // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
15451 char prop[PROPERTY_VALUE_MAX];
15452 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15453 bool dumpYuvOutput = atoi(prop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015454
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015455 if (dumpYuvOutput) {
15456 // Dump yuv buffer to a ppm file.
15457 pbcamera::StreamConfiguration outputConfig;
15458 status_t rc = fillPbStreamConfig(&outputConfig, streamId,
15459 channel, /*stream index*/0);
15460 if (rc == OK) {
15461 char buf[FILENAME_MAX] = {};
15462 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15463 result->requestId, streamId,
15464 outputConfig.image.width, outputConfig.image.height);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015465
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015466 hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
15467 } else {
15468 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
15469 "%s (%d).", __FUNCTION__, strerror(-rc), rc);
15470 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015471 }
15472 }
15473
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015474 if (channel == mPictureChannel) {
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015475 // Return the buffer to pic channel for encoding.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015476 mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
15477 frameworkOutputBuffer->buffer, result->requestId,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015478 halMetadata);
15479 } else {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015480 // Return the buffer to camera framework.
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015481 pthread_mutex_lock(&mMutex);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015482 handleBufferWithLock(frameworkOutputBuffer, result->requestId);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015483 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015484
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015485 channel->unregisterBuffer(outputBufferDef.get());
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015486 }
15487 }
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015488
15489 // Send HDR+ metadata to framework.
15490 {
15491 pthread_mutex_lock(&mMutex);
15492
15493 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15494 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
15495 pthread_mutex_unlock(&mMutex);
15496 }
15497
15498 // Remove the HDR+ pending request.
15499 {
15500 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15501 auto req = mHdrPlusPendingRequests.find(result->requestId);
15502 mHdrPlusPendingRequests.erase(req);
15503 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015504}
15505
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015506void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15507{
15508 if (failedResult == nullptr) {
15509 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15510 return;
15511 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015512
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015513 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015514
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015515 // Find the pending HDR+ request.
15516 HdrPlusPendingRequest pendingRequest;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015517 {
15518 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015519 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15520 if (req == mHdrPlusPendingRequests.end()) {
15521 ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
15522 return;
15523 }
15524 pendingRequest = req->second;
15525 }
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015526
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015527 for (auto &outputBuffer : failedResult->outputBuffers) {
15528 uint32_t streamId = outputBuffer.streamId;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015529
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015530 // Find the channel
15531 // Find the framework output buffer in the pending request.
15532 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15533 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15534 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15535 streamId);
15536 continue;
15537 }
15538
15539 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15540
15541 // Find the channel for the output buffer.
15542 QCamera3ProcessingChannel *channel =
15543 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15544
15545 // Find the output buffer def.
15546 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15547 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15548 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15549 continue;
15550 }
15551
15552 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
15553
15554 if (channel == mPictureChannel) {
15555 // Return the buffer to pic channel.
15556 mPictureChannel->returnYuvBuffer(outputBufferDef.get());
15557 } else {
15558 channel->unregisterBuffer(outputBufferDef.get());
15559 }
15560 }
15561
15562 // Remove the HDR+ pending request.
15563 {
15564 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15565 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15566 mHdrPlusPendingRequests.erase(req);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015567 }
15568
15569 pthread_mutex_lock(&mMutex);
15570
15571 // Find the pending buffers.
15572 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15573 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15574 if (pendingBuffers->frame_number == failedResult->requestId) {
15575 break;
15576 }
15577 pendingBuffers++;
15578 }
15579
15580 // Send out buffer errors for the pending buffers.
15581 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15582 std::vector<camera3_stream_buffer_t> streamBuffers;
15583 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15584 // Prepare a stream buffer.
15585 camera3_stream_buffer_t streamBuffer = {};
15586 streamBuffer.stream = buffer.stream;
15587 streamBuffer.buffer = buffer.buffer;
15588 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15589 streamBuffer.acquire_fence = -1;
15590 streamBuffer.release_fence = -1;
15591
15592 streamBuffers.push_back(streamBuffer);
15593
15594 // Send out error buffer event.
15595 camera3_notify_msg_t notify_msg = {};
15596 notify_msg.type = CAMERA3_MSG_ERROR;
15597 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15598 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15599 notify_msg.message.error.error_stream = buffer.stream;
15600
15601 orchestrateNotify(&notify_msg);
15602 }
15603
15604 camera3_capture_result_t result = {};
15605 result.frame_number = pendingBuffers->frame_number;
15606 result.num_output_buffers = streamBuffers.size();
15607 result.output_buffers = &streamBuffers[0];
15608
15609 // Send out result with buffer errors.
15610 orchestrateResult(&result);
15611
15612 // Remove pending buffers.
15613 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15614 }
15615
15616 // Remove pending request.
15617 auto halRequest = mPendingRequestsList.begin();
15618 while (halRequest != mPendingRequestsList.end()) {
15619 if (halRequest->frame_number == failedResult->requestId) {
15620 mPendingRequestsList.erase(halRequest);
15621 break;
15622 }
15623 halRequest++;
15624 }
15625
15626 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015627}
15628
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015629
15630ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15631 mParent(parent) {}
15632
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015633void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015634{
15635 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015636
15637 if (isReprocess) {
15638 mReprocessShutters.emplace(frameNumber, Shutter());
15639 } else {
15640 mShutters.emplace(frameNumber, Shutter());
15641 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015642}
15643
15644void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15645{
15646 std::lock_guard<std::mutex> lock(mLock);
15647
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015648 std::map<uint32_t, Shutter> *shutters = nullptr;
15649
15650 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015651 auto shutter = mShutters.find(frameNumber);
15652 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015653 shutter = mReprocessShutters.find(frameNumber);
15654 if (shutter == mReprocessShutters.end()) {
15655 // Shutter was already sent.
15656 return;
15657 }
15658 shutters = &mReprocessShutters;
15659 } else {
15660 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015661 }
15662
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015663 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015664 shutter->second.ready = true;
15665 shutter->second.timestamp = timestamp;
15666
15667 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015668 shutter = shutters->begin();
15669 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015670 if (!shutter->second.ready) {
15671 // If this shutter is not ready, the following shutters can't be sent.
15672 break;
15673 }
15674
15675 camera3_notify_msg_t msg = {};
15676 msg.type = CAMERA3_MSG_SHUTTER;
15677 msg.message.shutter.frame_number = shutter->first;
15678 msg.message.shutter.timestamp = shutter->second.timestamp;
15679 mParent->orchestrateNotify(&msg);
15680
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015681 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015682 }
15683}
15684
15685void ShutterDispatcher::clear(uint32_t frameNumber)
15686{
15687 std::lock_guard<std::mutex> lock(mLock);
15688 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015689 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015690}
15691
15692void ShutterDispatcher::clear()
15693{
15694 std::lock_guard<std::mutex> lock(mLock);
15695
15696 // Log errors for stale shutters.
15697 for (auto &shutter : mShutters) {
15698 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15699 __FUNCTION__, shutter.first, shutter.second.ready,
15700 shutter.second.timestamp);
15701 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015702
15703 // Log errors for stale reprocess shutters.
15704 for (auto &shutter : mReprocessShutters) {
15705 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15706 __FUNCTION__, shutter.first, shutter.second.ready,
15707 shutter.second.timestamp);
15708 }
15709
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015710 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015711 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015712}
15713
15714OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15715 mParent(parent) {}
15716
15717status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15718{
15719 std::lock_guard<std::mutex> lock(mLock);
15720 mStreamBuffers.clear();
15721 if (!streamList) {
15722 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15723 return -EINVAL;
15724 }
15725
15726 // Create a "frame-number -> buffer" map for each stream.
15727 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15728 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15729 }
15730
15731 return OK;
15732}
15733
15734status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15735{
15736 std::lock_guard<std::mutex> lock(mLock);
15737
15738 // Find the "frame-number -> buffer" map for the stream.
15739 auto buffers = mStreamBuffers.find(stream);
15740 if (buffers == mStreamBuffers.end()) {
15741 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15742 return -EINVAL;
15743 }
15744
15745 // Create an unready buffer for this frame number.
15746 buffers->second.emplace(frameNumber, Buffer());
15747 return OK;
15748}
15749
15750void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15751 const camera3_stream_buffer_t &buffer)
15752{
15753 std::lock_guard<std::mutex> lock(mLock);
15754
15755 // Find the frame number -> buffer map for the stream.
15756 auto buffers = mStreamBuffers.find(buffer.stream);
15757 if (buffers == mStreamBuffers.end()) {
15758 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15759 return;
15760 }
15761
15762 // Find the unready buffer this frame number and mark it ready.
15763 auto pendingBuffer = buffers->second.find(frameNumber);
15764 if (pendingBuffer == buffers->second.end()) {
15765 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15766 return;
15767 }
15768
15769 pendingBuffer->second.ready = true;
15770 pendingBuffer->second.buffer = buffer;
15771
15772 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15773 pendingBuffer = buffers->second.begin();
15774 while (pendingBuffer != buffers->second.end()) {
15775 if (!pendingBuffer->second.ready) {
15776 // If this buffer is not ready, the following buffers can't be sent.
15777 break;
15778 }
15779
15780 camera3_capture_result_t result = {};
15781 result.frame_number = pendingBuffer->first;
15782 result.num_output_buffers = 1;
15783 result.output_buffers = &pendingBuffer->second.buffer;
15784
15785 // Send out result with buffer errors.
15786 mParent->orchestrateResult(&result);
15787
15788 pendingBuffer = buffers->second.erase(pendingBuffer);
15789 }
15790}
15791
15792void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15793{
15794 std::lock_guard<std::mutex> lock(mLock);
15795
15796 // Log errors for stale buffers.
15797 for (auto &buffers : mStreamBuffers) {
15798 for (auto &buffer : buffers.second) {
15799 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15800 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15801 }
15802 buffers.second.clear();
15803 }
15804
15805 if (clearConfiguredStreams) {
15806 mStreamBuffers.clear();
15807 }
15808}
15809
Thierry Strudel3d639192016-09-09 11:52:26 -070015810}; //end namespace qcamera