blob: 8120397b6c155c0612081a83e2532a8c3c50a0a4 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070075// mm_camera has 2 partial results: 3A, and final result.
76// HDR+ requests have 3 partial results: postview, next request ready, and final result.
77#define PARTIAL_RESULT_COUNT 3
Thierry Strudel3d639192016-09-09 11:52:26 -070078#define FRAME_SKIP_DELAY 0
79
80#define MAX_VALUE_8BIT ((1<<8)-1)
81#define MAX_VALUE_10BIT ((1<<10)-1)
82#define MAX_VALUE_12BIT ((1<<12)-1)
83
84#define VIDEO_4K_WIDTH 3840
85#define VIDEO_4K_HEIGHT 2160
86
Jason Leeb9e76432017-03-10 17:14:19 -080087#define MAX_EIS_WIDTH 3840
88#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070089
90#define MAX_RAW_STREAMS 1
91#define MAX_STALLING_STREAMS 1
92#define MAX_PROCESSED_STREAMS 3
93/* Batch mode is enabled only if FPS set is equal to or greater than this */
94#define MIN_FPS_FOR_BATCH_MODE (120)
95#define PREVIEW_FPS_FOR_HFR (30)
96#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080097#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070098#define MAX_HFR_BATCH_SIZE (8)
99#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +0100101#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700148std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -0700155bool gEnableMultipleHdrplusOutputs = false; // Whether to enable multiple output from Easel HDR+.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800157// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
158bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700159
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700160std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700161
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
164 {"On", CAM_CDS_MODE_ON},
165 {"Off", CAM_CDS_MODE_OFF},
166 {"Auto",CAM_CDS_MODE_AUTO}
167};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700168const QCamera3HardwareInterface::QCameraMap<
169 camera_metadata_enum_android_video_hdr_mode_t,
170 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
171 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
172 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
173};
174
Thierry Strudel54dc9782017-02-15 12:12:10 -0800175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_binning_correction_mode_t,
177 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
178 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
179 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
180};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700181
182const QCamera3HardwareInterface::QCameraMap<
183 camera_metadata_enum_android_ir_mode_t,
184 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
185 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
186 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
187 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
188};
Thierry Strudel3d639192016-09-09 11:52:26 -0700189
190const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_effect_mode_t,
192 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
193 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
194 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
195 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
196 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
197 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
198 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
199 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
202};
203
204const QCamera3HardwareInterface::QCameraMap<
205 camera_metadata_enum_android_control_awb_mode_t,
206 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
207 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
208 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
209 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
210 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
211 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
212 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
215 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
216};
217
218const QCamera3HardwareInterface::QCameraMap<
219 camera_metadata_enum_android_control_scene_mode_t,
220 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
221 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
222 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
223 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
227 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
228 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
229 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
230 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
231 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
232 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
233 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
234 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
235 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800236 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
237 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_af_mode_t,
242 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
245 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
246 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
247 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_color_correction_aberration_mode_t,
254 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
255 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
256 CAM_COLOR_CORRECTION_ABERRATION_OFF },
257 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
258 CAM_COLOR_CORRECTION_ABERRATION_FAST },
259 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
260 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_control_ae_antibanding_mode_t,
265 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
270};
271
272const QCamera3HardwareInterface::QCameraMap<
273 camera_metadata_enum_android_control_ae_mode_t,
274 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
275 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
278 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700279 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
280 { (camera_metadata_enum_android_control_ae_mode_t)
281 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_flash_mode_t,
286 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
287 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
288 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
289 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_statistics_face_detect_mode_t,
294 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
297 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
298};
299
300const QCamera3HardwareInterface::QCameraMap<
301 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
302 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
303 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
304 CAM_FOCUS_UNCALIBRATED },
305 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
306 CAM_FOCUS_APPROXIMATE },
307 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
308 CAM_FOCUS_CALIBRATED }
309};
310
311const QCamera3HardwareInterface::QCameraMap<
312 camera_metadata_enum_android_lens_state_t,
313 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
314 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
315 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
316};
317
318const int32_t available_thumbnail_sizes[] = {0, 0,
319 176, 144,
320 240, 144,
321 256, 144,
322 240, 160,
323 256, 154,
324 240, 240,
325 320, 240};
326
327const QCamera3HardwareInterface::QCameraMap<
328 camera_metadata_enum_android_sensor_test_pattern_mode_t,
329 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
335 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
336};
337
338/* Since there is no mapping for all the options some Android enum are not listed.
339 * Also, the order in this list is important because while mapping from HAL to Android it will
340 * traverse from lower to higher index which means that for HAL values that are map to different
341 * Android values, the traverse logic will select the first one found.
342 */
343const QCamera3HardwareInterface::QCameraMap<
344 camera_metadata_enum_android_sensor_reference_illuminant1_t,
345 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
361 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
362};
363
364const QCamera3HardwareInterface::QCameraMap<
365 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
366 { 60, CAM_HFR_MODE_60FPS},
367 { 90, CAM_HFR_MODE_90FPS},
368 { 120, CAM_HFR_MODE_120FPS},
369 { 150, CAM_HFR_MODE_150FPS},
370 { 180, CAM_HFR_MODE_180FPS},
371 { 210, CAM_HFR_MODE_210FPS},
372 { 240, CAM_HFR_MODE_240FPS},
373 { 480, CAM_HFR_MODE_480FPS},
374};
375
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_instant_aec_mode_t,
378 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
379 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
381 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
382};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800383
384const QCamera3HardwareInterface::QCameraMap<
385 qcamera3_ext_exposure_meter_mode_t,
386 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
387 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
388 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
389 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
390 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
391 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
392 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
393 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
394};
395
396const QCamera3HardwareInterface::QCameraMap<
397 qcamera3_ext_iso_mode_t,
398 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
399 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
400 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
401 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
402 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
403 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
404 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
405 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
406 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
407};
408
Thierry Strudel3d639192016-09-09 11:52:26 -0700409camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
410 .initialize = QCamera3HardwareInterface::initialize,
411 .configure_streams = QCamera3HardwareInterface::configure_streams,
412 .register_stream_buffers = NULL,
413 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
414 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
415 .get_metadata_vendor_tag_ops = NULL,
416 .dump = QCamera3HardwareInterface::dump,
417 .flush = QCamera3HardwareInterface::flush,
418 .reserved = {0},
419};
420
421// initialise to some default value
422uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
423
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700424static inline void logEaselEvent(const char *tag, const char *event) {
425 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
426 struct timespec ts = {};
427 static int64_t kMsPerSec = 1000;
428 static int64_t kNsPerMs = 1000000;
429 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
430 if (res != OK) {
431 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
432 } else {
433 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
434 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
435 }
436 }
437}
438
Thierry Strudel3d639192016-09-09 11:52:26 -0700439/*===========================================================================
440 * FUNCTION : QCamera3HardwareInterface
441 *
442 * DESCRIPTION: constructor of QCamera3HardwareInterface
443 *
444 * PARAMETERS :
445 * @cameraId : camera ID
446 *
447 * RETURN : none
448 *==========================================================================*/
449QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
450 const camera_module_callbacks_t *callbacks)
451 : mCameraId(cameraId),
452 mCameraHandle(NULL),
453 mCameraInitialized(false),
454 mCallbackOps(NULL),
455 mMetadataChannel(NULL),
456 mPictureChannel(NULL),
457 mRawChannel(NULL),
458 mSupportChannel(NULL),
459 mAnalysisChannel(NULL),
460 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700461 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700462 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800463 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100464 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800465 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700466 mChannelHandle(0),
467 mFirstConfiguration(true),
468 mFlush(false),
469 mFlushPerf(false),
470 mParamHeap(NULL),
471 mParameters(NULL),
472 mPrevParameters(NULL),
473 m_bIsVideo(false),
474 m_bIs4KVideo(false),
475 m_bEisSupportedSize(false),
476 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800477 m_bEis3PropertyEnabled(false),
Binhao Lin09245482017-08-31 18:25:29 -0700478 m_bAVTimerEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700479 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700480 mShutterDispatcher(this),
481 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mMinProcessedFrameDuration(0),
483 mMinJpegFrameDuration(0),
484 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100485 mExpectedFrameDuration(0),
486 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700487 mMetaFrameCount(0U),
488 mUpdateDebugLevel(false),
489 mCallbacks(callbacks),
490 mCaptureIntent(0),
491 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800492 /* DevCamDebug metadata internal m control*/
493 mDevCamDebugMetaEnable(0),
494 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700495 mBatchSize(0),
496 mToBeQueuedVidBufs(0),
497 mHFRVideoFps(DEFAULT_VIDEO_FPS),
498 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800499 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800500 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mFirstFrameNumberInBatch(0),
502 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800503 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700504 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
505 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000506 mPDSupported(false),
507 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700508 mInstantAEC(false),
509 mResetInstantAEC(false),
510 mInstantAECSettledFrameNumber(0),
511 mAecSkipDisplayFrameBound(0),
512 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700513 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800514 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700515 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700517 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700518 mState(CLOSED),
519 mIsDeviceLinked(false),
520 mIsMainCamera(true),
521 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700522 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800523 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800524 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700525 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800526 mIsApInputUsedForHdrPlus(false),
527 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700528 m_bSensorHDREnabled(false),
529 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700530{
531 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700532 mCommon.init(gCamCapability[cameraId]);
533 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700534#ifndef USE_HAL_3_3
535 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
536#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700537 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700538#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700539 mCameraDevice.common.close = close_camera_device;
540 mCameraDevice.ops = &mCameraOps;
541 mCameraDevice.priv = this;
542 gCamCapability[cameraId]->version = CAM_HAL_V3;
543 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
544 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
545 gCamCapability[cameraId]->min_num_pp_bufs = 3;
546
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800547 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700548
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800549 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700550 mPendingLiveRequest = 0;
551 mCurrentRequestId = -1;
552 pthread_mutex_init(&mMutex, NULL);
553
554 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
555 mDefaultMetadata[i] = NULL;
556
557 // Getting system props of different kinds
558 char prop[PROPERTY_VALUE_MAX];
559 memset(prop, 0, sizeof(prop));
560 property_get("persist.camera.raw.dump", prop, "0");
561 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800562 property_get("persist.camera.hal3.force.hdr", prop, "0");
563 mForceHdrSnapshot = atoi(prop);
564
Thierry Strudel3d639192016-09-09 11:52:26 -0700565 if (mEnableRawDump)
566 LOGD("Raw dump from Camera HAL enabled");
567
568 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
569 memset(mLdafCalib, 0, sizeof(mLdafCalib));
570
Arnd Geis082a4d72017-08-24 10:33:07 -0700571 memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
572
Thierry Strudel3d639192016-09-09 11:52:26 -0700573 memset(prop, 0, sizeof(prop));
574 property_get("persist.camera.tnr.preview", prop, "0");
575 m_bTnrPreview = (uint8_t)atoi(prop);
576
577 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800578 property_get("persist.camera.swtnr.preview", prop, "1");
579 m_bSwTnrPreview = (uint8_t)atoi(prop);
580
581 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700582 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700583 m_bTnrVideo = (uint8_t)atoi(prop);
584
585 memset(prop, 0, sizeof(prop));
586 property_get("persist.camera.avtimer.debug", prop, "0");
587 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800588 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700589
Thierry Strudel54dc9782017-02-15 12:12:10 -0800590 memset(prop, 0, sizeof(prop));
591 property_get("persist.camera.cacmode.disable", prop, "0");
592 m_cacModeDisabled = (uint8_t)atoi(prop);
593
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700594 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700595 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700596
Thierry Strudel3d639192016-09-09 11:52:26 -0700597 //Load and read GPU library.
598 lib_surface_utils = NULL;
599 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700600 mSurfaceStridePadding = CAM_PAD_TO_64;
601#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700602 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
603 if (lib_surface_utils) {
604 *(void **)&LINK_get_surface_pixel_alignment =
605 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
606 if (LINK_get_surface_pixel_alignment) {
607 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
608 }
609 dlclose(lib_surface_utils);
610 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700611#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000612 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
613 mPDSupported = (0 <= mPDIndex) ? true : false;
614
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700615 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700616}
617
618/*===========================================================================
619 * FUNCTION : ~QCamera3HardwareInterface
620 *
621 * DESCRIPTION: destructor of QCamera3HardwareInterface
622 *
623 * PARAMETERS : none
624 *
625 * RETURN : none
626 *==========================================================================*/
627QCamera3HardwareInterface::~QCamera3HardwareInterface()
628{
629 LOGD("E");
630
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800631 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700632
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800633 // Disable power hint and enable the perf lock for close camera
634 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
635 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
636
637 // unlink of dualcam during close camera
638 if (mIsDeviceLinked) {
639 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
640 &m_pDualCamCmdPtr->bundle_info;
641 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
642 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
643 pthread_mutex_lock(&gCamLock);
644
645 if (mIsMainCamera == 1) {
646 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
647 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
648 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
649 // related session id should be session id of linked session
650 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
651 } else {
652 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
653 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
654 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
655 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
656 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800657 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800658 pthread_mutex_unlock(&gCamLock);
659
660 rc = mCameraHandle->ops->set_dual_cam_cmd(
661 mCameraHandle->camera_handle);
662 if (rc < 0) {
663 LOGE("Dualcam: Unlink failed, but still proceed to close");
664 }
665 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700666
667 /* We need to stop all streams before deleting any stream */
668 if (mRawDumpChannel) {
669 mRawDumpChannel->stop();
670 }
671
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700672 if (mHdrPlusRawSrcChannel) {
673 mHdrPlusRawSrcChannel->stop();
674 }
675
Thierry Strudel3d639192016-09-09 11:52:26 -0700676 // NOTE: 'camera3_stream_t *' objects are already freed at
677 // this stage by the framework
678 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
679 it != mStreamInfo.end(); it++) {
680 QCamera3ProcessingChannel *channel = (*it)->channel;
681 if (channel) {
682 channel->stop();
683 }
684 }
685 if (mSupportChannel)
686 mSupportChannel->stop();
687
688 if (mAnalysisChannel) {
689 mAnalysisChannel->stop();
690 }
691 if (mMetadataChannel) {
692 mMetadataChannel->stop();
693 }
694 if (mChannelHandle) {
695 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700696 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700697 LOGD("stopping channel %d", mChannelHandle);
698 }
699
700 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
701 it != mStreamInfo.end(); it++) {
702 QCamera3ProcessingChannel *channel = (*it)->channel;
703 if (channel)
704 delete channel;
705 free (*it);
706 }
707 if (mSupportChannel) {
708 delete mSupportChannel;
709 mSupportChannel = NULL;
710 }
711
712 if (mAnalysisChannel) {
713 delete mAnalysisChannel;
714 mAnalysisChannel = NULL;
715 }
716 if (mRawDumpChannel) {
717 delete mRawDumpChannel;
718 mRawDumpChannel = NULL;
719 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700720 if (mHdrPlusRawSrcChannel) {
721 delete mHdrPlusRawSrcChannel;
722 mHdrPlusRawSrcChannel = NULL;
723 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700724 if (mDummyBatchChannel) {
725 delete mDummyBatchChannel;
726 mDummyBatchChannel = NULL;
727 }
728
729 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800730 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700731
732 if (mMetadataChannel) {
733 delete mMetadataChannel;
734 mMetadataChannel = NULL;
735 }
736
737 /* Clean up all channels */
738 if (mCameraInitialized) {
739 if(!mFirstConfiguration){
740 //send the last unconfigure
741 cam_stream_size_info_t stream_config_info;
742 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
743 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
744 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800745 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700746 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700747 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700748 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
749 stream_config_info);
750 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
751 if (rc < 0) {
752 LOGE("set_parms failed for unconfigure");
753 }
754 }
755 deinitParameters();
756 }
757
758 if (mChannelHandle) {
759 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
760 mChannelHandle);
761 LOGH("deleting channel %d", mChannelHandle);
762 mChannelHandle = 0;
763 }
764
765 if (mState != CLOSED)
766 closeCamera();
767
768 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
769 req.mPendingBufferList.clear();
770 }
771 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700772 for (pendingRequestIterator i = mPendingRequestsList.begin();
773 i != mPendingRequestsList.end();) {
774 i = erasePendingRequest(i);
775 }
776 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
777 if (mDefaultMetadata[i])
778 free_camera_metadata(mDefaultMetadata[i]);
779
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800780 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700781
782 pthread_cond_destroy(&mRequestCond);
783
784 pthread_cond_destroy(&mBuffersCond);
785
786 pthread_mutex_destroy(&mMutex);
787 LOGD("X");
788}
789
790/*===========================================================================
791 * FUNCTION : erasePendingRequest
792 *
793 * DESCRIPTION: function to erase a desired pending request after freeing any
794 * allocated memory
795 *
796 * PARAMETERS :
797 * @i : iterator pointing to pending request to be erased
798 *
799 * RETURN : iterator pointing to the next request
800 *==========================================================================*/
801QCamera3HardwareInterface::pendingRequestIterator
802 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
803{
804 if (i->input_buffer != NULL) {
805 free(i->input_buffer);
806 i->input_buffer = NULL;
807 }
808 if (i->settings != NULL)
809 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100810
811 mExpectedInflightDuration -= i->expectedFrameDuration;
812 if (mExpectedInflightDuration < 0) {
813 LOGE("Negative expected in-flight duration!");
814 mExpectedInflightDuration = 0;
815 }
816
Thierry Strudel3d639192016-09-09 11:52:26 -0700817 return mPendingRequestsList.erase(i);
818}
819
820/*===========================================================================
821 * FUNCTION : camEvtHandle
822 *
823 * DESCRIPTION: Function registered to mm-camera-interface to handle events
824 *
825 * PARAMETERS :
826 * @camera_handle : interface layer camera handle
827 * @evt : ptr to event
828 * @user_data : user data ptr
829 *
830 * RETURN : none
831 *==========================================================================*/
832void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
833 mm_camera_event_t *evt,
834 void *user_data)
835{
836 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
837 if (obj && evt) {
838 switch(evt->server_event_type) {
839 case CAM_EVENT_TYPE_DAEMON_DIED:
840 pthread_mutex_lock(&obj->mMutex);
841 obj->mState = ERROR;
842 pthread_mutex_unlock(&obj->mMutex);
843 LOGE("Fatal, camera daemon died");
844 break;
845
846 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
847 LOGD("HAL got request pull from Daemon");
848 pthread_mutex_lock(&obj->mMutex);
849 obj->mWokenUpByDaemon = true;
850 obj->unblockRequestIfNecessary();
851 pthread_mutex_unlock(&obj->mMutex);
852 break;
853
854 default:
855 LOGW("Warning: Unhandled event %d",
856 evt->server_event_type);
857 break;
858 }
859 } else {
860 LOGE("NULL user_data/evt");
861 }
862}
863
864/*===========================================================================
865 * FUNCTION : openCamera
866 *
867 * DESCRIPTION: open camera
868 *
869 * PARAMETERS :
870 * @hw_device : double ptr for camera device struct
871 *
872 * RETURN : int32_t type of status
873 * NO_ERROR -- success
874 * none-zero failure code
875 *==========================================================================*/
876int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
877{
878 int rc = 0;
879 if (mState != CLOSED) {
880 *hw_device = NULL;
881 return PERMISSION_DENIED;
882 }
883
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700884 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800885 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700886 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
887 mCameraId);
888
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700889 if (mCameraHandle) {
890 LOGE("Failure: Camera already opened");
891 return ALREADY_EXISTS;
892 }
893
894 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700895 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700896 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700897 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700898 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700899 if (rc != 0) {
900 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
901 return rc;
902 }
903 }
904 }
905
Thierry Strudel3d639192016-09-09 11:52:26 -0700906 rc = openCamera();
907 if (rc == 0) {
908 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800909 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700910 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700911
912 // Suspend Easel because opening camera failed.
913 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700914 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700915 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
916 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700917 if (suspendErr != 0) {
918 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
919 strerror(-suspendErr), suspendErr);
920 }
921 }
922 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800923 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700924
Thierry Strudel3d639192016-09-09 11:52:26 -0700925 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
926 mCameraId, rc);
927
928 if (rc == NO_ERROR) {
929 mState = OPENED;
930 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800931
Thierry Strudel3d639192016-09-09 11:52:26 -0700932 return rc;
933}
934
935/*===========================================================================
936 * FUNCTION : openCamera
937 *
938 * DESCRIPTION: open camera
939 *
940 * PARAMETERS : none
941 *
942 * RETURN : int32_t type of status
943 * NO_ERROR -- success
944 * none-zero failure code
945 *==========================================================================*/
946int QCamera3HardwareInterface::openCamera()
947{
948 int rc = 0;
949 char value[PROPERTY_VALUE_MAX];
950
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800951 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800952
Thierry Strudel3d639192016-09-09 11:52:26 -0700953 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
954 if (rc < 0) {
955 LOGE("Failed to reserve flash for camera id: %d",
956 mCameraId);
957 return UNKNOWN_ERROR;
958 }
959
960 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
961 if (rc) {
962 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
963 return rc;
964 }
965
966 if (!mCameraHandle) {
967 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
968 return -ENODEV;
969 }
970
971 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
972 camEvtHandle, (void *)this);
973
974 if (rc < 0) {
975 LOGE("Error, failed to register event callback");
976 /* Not closing camera here since it is already handled in destructor */
977 return FAILED_TRANSACTION;
978 }
979
980 mExifParams.debug_params =
981 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
982 if (mExifParams.debug_params) {
983 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
984 } else {
985 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
986 return NO_MEMORY;
987 }
988 mFirstConfiguration = true;
989
990 //Notify display HAL that a camera session is active.
991 //But avoid calling the same during bootup because camera service might open/close
992 //cameras at boot time during its initialization and display service will also internally
993 //wait for camera service to initialize first while calling this display API, resulting in a
994 //deadlock situation. Since boot time camera open/close calls are made only to fetch
995 //capabilities, no need of this display bw optimization.
996 //Use "service.bootanim.exit" property to know boot status.
997 property_get("service.bootanim.exit", value, "0");
998 if (atoi(value) == 1) {
999 pthread_mutex_lock(&gCamLock);
1000 if (gNumCameraSessions++ == 0) {
1001 setCameraLaunchStatus(true);
1002 }
1003 pthread_mutex_unlock(&gCamLock);
1004 }
1005
1006 //fill the session id needed while linking dual cam
1007 pthread_mutex_lock(&gCamLock);
1008 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1009 &sessionId[mCameraId]);
1010 pthread_mutex_unlock(&gCamLock);
1011
1012 if (rc < 0) {
1013 LOGE("Error, failed to get sessiion id");
1014 return UNKNOWN_ERROR;
1015 } else {
1016 //Allocate related cam sync buffer
1017 //this is needed for the payload that goes along with bundling cmd for related
1018 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001019 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1020 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001021 if(rc != OK) {
1022 rc = NO_MEMORY;
1023 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1024 return NO_MEMORY;
1025 }
1026
1027 //Map memory for related cam sync buffer
1028 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001029 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1030 m_pDualCamCmdHeap->getFd(0),
1031 sizeof(cam_dual_camera_cmd_info_t),
1032 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001033 if(rc < 0) {
1034 LOGE("Dualcam: failed to map Related cam sync buffer");
1035 rc = FAILED_TRANSACTION;
1036 return NO_MEMORY;
1037 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001038 m_pDualCamCmdPtr =
1039 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001040 }
1041
1042 LOGH("mCameraId=%d",mCameraId);
1043
1044 return NO_ERROR;
1045}
1046
1047/*===========================================================================
1048 * FUNCTION : closeCamera
1049 *
1050 * DESCRIPTION: close camera
1051 *
1052 * PARAMETERS : none
1053 *
1054 * RETURN : int32_t type of status
1055 * NO_ERROR -- success
1056 * none-zero failure code
1057 *==========================================================================*/
1058int QCamera3HardwareInterface::closeCamera()
1059{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001060 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001061 int rc = NO_ERROR;
1062 char value[PROPERTY_VALUE_MAX];
1063
1064 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1065 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001066
1067 // unmap memory for related cam sync buffer
1068 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001069 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001070 if (NULL != m_pDualCamCmdHeap) {
1071 m_pDualCamCmdHeap->deallocate();
1072 delete m_pDualCamCmdHeap;
1073 m_pDualCamCmdHeap = NULL;
1074 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001075 }
1076
Thierry Strudel3d639192016-09-09 11:52:26 -07001077 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1078 mCameraHandle = NULL;
1079
1080 //reset session id to some invalid id
1081 pthread_mutex_lock(&gCamLock);
1082 sessionId[mCameraId] = 0xDEADBEEF;
1083 pthread_mutex_unlock(&gCamLock);
1084
1085 //Notify display HAL that there is no active camera session
1086 //but avoid calling the same during bootup. Refer to openCamera
1087 //for more details.
1088 property_get("service.bootanim.exit", value, "0");
1089 if (atoi(value) == 1) {
1090 pthread_mutex_lock(&gCamLock);
1091 if (--gNumCameraSessions == 0) {
1092 setCameraLaunchStatus(false);
1093 }
1094 pthread_mutex_unlock(&gCamLock);
1095 }
1096
Thierry Strudel3d639192016-09-09 11:52:26 -07001097 if (mExifParams.debug_params) {
1098 free(mExifParams.debug_params);
1099 mExifParams.debug_params = NULL;
1100 }
1101 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1102 LOGW("Failed to release flash for camera id: %d",
1103 mCameraId);
1104 }
1105 mState = CLOSED;
1106 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1107 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001108
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001109 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001110 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1111 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001112 if (gHdrPlusClient != nullptr) {
1113 // Disable HDR+ mode.
1114 disableHdrPlusModeLocked();
1115 // Disconnect Easel if it's connected.
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001116 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001117 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001118 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001119
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001120 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001121 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001122 if (rc != 0) {
1123 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1124 }
1125
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001126 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001127 if (rc != 0) {
1128 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1129 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001130 }
1131 }
1132
Thierry Strudel3d639192016-09-09 11:52:26 -07001133 return rc;
1134}
1135
1136/*===========================================================================
1137 * FUNCTION : initialize
1138 *
1139 * DESCRIPTION: Initialize frameworks callback functions
1140 *
1141 * PARAMETERS :
1142 * @callback_ops : callback function to frameworks
1143 *
1144 * RETURN :
1145 *
1146 *==========================================================================*/
1147int QCamera3HardwareInterface::initialize(
1148 const struct camera3_callback_ops *callback_ops)
1149{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001150 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001151 int rc;
1152
1153 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1154 pthread_mutex_lock(&mMutex);
1155
1156 // Validate current state
1157 switch (mState) {
1158 case OPENED:
1159 /* valid state */
1160 break;
1161 default:
1162 LOGE("Invalid state %d", mState);
1163 rc = -ENODEV;
1164 goto err1;
1165 }
1166
1167 rc = initParameters();
1168 if (rc < 0) {
1169 LOGE("initParamters failed %d", rc);
1170 goto err1;
1171 }
1172 mCallbackOps = callback_ops;
1173
1174 mChannelHandle = mCameraHandle->ops->add_channel(
1175 mCameraHandle->camera_handle, NULL, NULL, this);
1176 if (mChannelHandle == 0) {
1177 LOGE("add_channel failed");
1178 rc = -ENOMEM;
1179 pthread_mutex_unlock(&mMutex);
1180 return rc;
1181 }
1182
1183 pthread_mutex_unlock(&mMutex);
1184 mCameraInitialized = true;
1185 mState = INITIALIZED;
1186 LOGI("X");
1187 return 0;
1188
1189err1:
1190 pthread_mutex_unlock(&mMutex);
1191 return rc;
1192}
1193
1194/*===========================================================================
1195 * FUNCTION : validateStreamDimensions
1196 *
1197 * DESCRIPTION: Check if the configuration requested are those advertised
1198 *
1199 * PARAMETERS :
1200 * @stream_list : streams to be configured
1201 *
1202 * RETURN :
1203 *
1204 *==========================================================================*/
1205int QCamera3HardwareInterface::validateStreamDimensions(
1206 camera3_stream_configuration_t *streamList)
1207{
1208 int rc = NO_ERROR;
1209 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001210 uint32_t depthWidth = 0;
1211 uint32_t depthHeight = 0;
1212 if (mPDSupported) {
1213 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1214 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1215 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001216
1217 camera3_stream_t *inputStream = NULL;
1218 /*
1219 * Loop through all streams to find input stream if it exists*
1220 */
1221 for (size_t i = 0; i< streamList->num_streams; i++) {
1222 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1223 if (inputStream != NULL) {
1224 LOGE("Error, Multiple input streams requested");
1225 return -EINVAL;
1226 }
1227 inputStream = streamList->streams[i];
1228 }
1229 }
1230 /*
1231 * Loop through all streams requested in configuration
1232 * Check if unsupported sizes have been requested on any of them
1233 */
1234 for (size_t j = 0; j < streamList->num_streams; j++) {
1235 bool sizeFound = false;
1236 camera3_stream_t *newStream = streamList->streams[j];
1237
1238 uint32_t rotatedHeight = newStream->height;
1239 uint32_t rotatedWidth = newStream->width;
1240 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1241 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1242 rotatedHeight = newStream->width;
1243 rotatedWidth = newStream->height;
1244 }
1245
1246 /*
1247 * Sizes are different for each type of stream format check against
1248 * appropriate table.
1249 */
1250 switch (newStream->format) {
1251 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1252 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1253 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001254 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1255 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1256 mPDSupported) {
1257 if ((depthWidth == newStream->width) &&
1258 (depthHeight == newStream->height)) {
1259 sizeFound = true;
1260 }
1261 break;
1262 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001263 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1264 for (size_t i = 0; i < count; i++) {
1265 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1266 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1267 sizeFound = true;
1268 break;
1269 }
1270 }
1271 break;
1272 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001273 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1274 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001275 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001276 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001277 if ((depthSamplesCount == newStream->width) &&
1278 (1 == newStream->height)) {
1279 sizeFound = true;
1280 }
1281 break;
1282 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001283 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1284 /* Verify set size against generated sizes table */
1285 for (size_t i = 0; i < count; i++) {
1286 if (((int32_t)rotatedWidth ==
1287 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1288 ((int32_t)rotatedHeight ==
1289 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1290 sizeFound = true;
1291 break;
1292 }
1293 }
1294 break;
1295 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1296 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1297 default:
1298 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1299 || newStream->stream_type == CAMERA3_STREAM_INPUT
1300 || IS_USAGE_ZSL(newStream->usage)) {
1301 if (((int32_t)rotatedWidth ==
1302 gCamCapability[mCameraId]->active_array_size.width) &&
1303 ((int32_t)rotatedHeight ==
1304 gCamCapability[mCameraId]->active_array_size.height)) {
1305 sizeFound = true;
1306 break;
1307 }
1308 /* We could potentially break here to enforce ZSL stream
1309 * set from frameworks always is full active array size
1310 * but it is not clear from the spc if framework will always
1311 * follow that, also we have logic to override to full array
1312 * size, so keeping the logic lenient at the moment
1313 */
1314 }
1315 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1316 MAX_SIZES_CNT);
1317 for (size_t i = 0; i < count; i++) {
1318 if (((int32_t)rotatedWidth ==
1319 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1320 ((int32_t)rotatedHeight ==
1321 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1322 sizeFound = true;
1323 break;
1324 }
1325 }
1326 break;
1327 } /* End of switch(newStream->format) */
1328
1329 /* We error out even if a single stream has unsupported size set */
1330 if (!sizeFound) {
1331 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1332 rotatedWidth, rotatedHeight, newStream->format,
1333 gCamCapability[mCameraId]->active_array_size.width,
1334 gCamCapability[mCameraId]->active_array_size.height);
1335 rc = -EINVAL;
1336 break;
1337 }
1338 } /* End of for each stream */
1339 return rc;
1340}
1341
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001342/*===========================================================================
1343 * FUNCTION : validateUsageFlags
1344 *
1345 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1346 *
1347 * PARAMETERS :
1348 * @stream_list : streams to be configured
1349 *
1350 * RETURN :
1351 * NO_ERROR if the usage flags are supported
1352 * error code if usage flags are not supported
1353 *
1354 *==========================================================================*/
1355int QCamera3HardwareInterface::validateUsageFlags(
1356 const camera3_stream_configuration_t* streamList)
1357{
1358 for (size_t j = 0; j < streamList->num_streams; j++) {
1359 const camera3_stream_t *newStream = streamList->streams[j];
1360
1361 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1362 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1363 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1364 continue;
1365 }
1366
Jason Leec4cf5032017-05-24 18:31:41 -07001367 // Here we only care whether it's EIS3 or not
1368 char is_type_value[PROPERTY_VALUE_MAX];
1369 property_get("persist.camera.is_type", is_type_value, "4");
1370 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1371 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1372 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1373 isType = IS_TYPE_NONE;
1374
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001375 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1376 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1377 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1378 bool forcePreviewUBWC = true;
1379 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1380 forcePreviewUBWC = false;
1381 }
1382 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001383 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001384 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001385 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001386 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001387 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001388
1389 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1390 // So color spaces will always match.
1391
1392 // Check whether underlying formats of shared streams match.
1393 if (isVideo && isPreview && videoFormat != previewFormat) {
1394 LOGE("Combined video and preview usage flag is not supported");
1395 return -EINVAL;
1396 }
1397 if (isPreview && isZSL && previewFormat != zslFormat) {
1398 LOGE("Combined preview and zsl usage flag is not supported");
1399 return -EINVAL;
1400 }
1401 if (isVideo && isZSL && videoFormat != zslFormat) {
1402 LOGE("Combined video and zsl usage flag is not supported");
1403 return -EINVAL;
1404 }
1405 }
1406 return NO_ERROR;
1407}
1408
1409/*===========================================================================
1410 * FUNCTION : validateUsageFlagsForEis
1411 *
1412 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1413 *
1414 * PARAMETERS :
1415 * @stream_list : streams to be configured
1416 *
1417 * RETURN :
1418 * NO_ERROR if the usage flags are supported
1419 * error code if usage flags are not supported
1420 *
1421 *==========================================================================*/
1422int QCamera3HardwareInterface::validateUsageFlagsForEis(
1423 const camera3_stream_configuration_t* streamList)
1424{
1425 for (size_t j = 0; j < streamList->num_streams; j++) {
1426 const camera3_stream_t *newStream = streamList->streams[j];
1427
1428 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1429 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1430
1431 // Because EIS is "hard-coded" for certain use case, and current
1432 // implementation doesn't support shared preview and video on the same
1433 // stream, return failure if EIS is forced on.
1434 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1435 LOGE("Combined video and preview usage flag is not supported due to EIS");
1436 return -EINVAL;
1437 }
1438 }
1439 return NO_ERROR;
1440}
1441
Thierry Strudel3d639192016-09-09 11:52:26 -07001442/*==============================================================================
1443 * FUNCTION : isSupportChannelNeeded
1444 *
1445 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1446 *
1447 * PARAMETERS :
1448 * @stream_list : streams to be configured
1449 * @stream_config_info : the config info for streams to be configured
1450 *
1451 * RETURN : Boolen true/false decision
1452 *
1453 *==========================================================================*/
1454bool QCamera3HardwareInterface::isSupportChannelNeeded(
1455 camera3_stream_configuration_t *streamList,
1456 cam_stream_size_info_t stream_config_info)
1457{
1458 uint32_t i;
1459 bool pprocRequested = false;
1460 /* Check for conditions where PProc pipeline does not have any streams*/
1461 for (i = 0; i < stream_config_info.num_streams; i++) {
1462 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1463 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1464 pprocRequested = true;
1465 break;
1466 }
1467 }
1468
1469 if (pprocRequested == false )
1470 return true;
1471
1472 /* Dummy stream needed if only raw or jpeg streams present */
1473 for (i = 0; i < streamList->num_streams; i++) {
1474 switch(streamList->streams[i]->format) {
1475 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1476 case HAL_PIXEL_FORMAT_RAW10:
1477 case HAL_PIXEL_FORMAT_RAW16:
1478 case HAL_PIXEL_FORMAT_BLOB:
1479 break;
1480 default:
1481 return false;
1482 }
1483 }
1484 return true;
1485}
1486
1487/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001488 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001489 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001490 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001491 *
1492 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001493 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001494 *
1495 * RETURN : int32_t type of status
1496 * NO_ERROR -- success
1497 * none-zero failure code
1498 *
1499 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001500int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001501{
1502 int32_t rc = NO_ERROR;
1503
1504 cam_dimension_t max_dim = {0, 0};
1505 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1506 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1507 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1508 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1509 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1510 }
1511
1512 clear_metadata_buffer(mParameters);
1513
1514 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1515 max_dim);
1516 if (rc != NO_ERROR) {
1517 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1518 return rc;
1519 }
1520
1521 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1522 if (rc != NO_ERROR) {
1523 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1524 return rc;
1525 }
1526
1527 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001528 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001529
1530 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1531 mParameters);
1532 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001533 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001534 return rc;
1535 }
1536
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001537 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001538 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1539 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1540 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1541 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1542 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001543
1544 return rc;
1545}
1546
1547/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001548 * FUNCTION : getCurrentSensorModeInfo
1549 *
1550 * DESCRIPTION: Get sensor mode information that is currently selected.
1551 *
1552 * PARAMETERS :
1553 * @sensorModeInfo : sensor mode information (output)
1554 *
1555 * RETURN : int32_t type of status
1556 * NO_ERROR -- success
1557 * none-zero failure code
1558 *
1559 *==========================================================================*/
1560int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1561{
1562 int32_t rc = NO_ERROR;
1563
1564 clear_metadata_buffer(mParameters);
1565 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1566
1567 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1568 mParameters);
1569 if (rc != NO_ERROR) {
1570 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1571 return rc;
1572 }
1573
1574 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1575 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1576 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1577 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1578 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1579 sensorModeInfo.num_raw_bits);
1580
1581 return rc;
1582}
1583
1584/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001585 * FUNCTION : addToPPFeatureMask
1586 *
1587 * DESCRIPTION: add additional features to pp feature mask based on
1588 * stream type and usecase
1589 *
1590 * PARAMETERS :
1591 * @stream_format : stream type for feature mask
1592 * @stream_idx : stream idx within postprocess_mask list to change
1593 *
1594 * RETURN : NULL
1595 *
1596 *==========================================================================*/
1597void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1598 uint32_t stream_idx)
1599{
1600 char feature_mask_value[PROPERTY_VALUE_MAX];
1601 cam_feature_mask_t feature_mask;
1602 int args_converted;
1603 int property_len;
1604
1605 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001606#ifdef _LE_CAMERA_
1607 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1608 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1609 property_len = property_get("persist.camera.hal3.feature",
1610 feature_mask_value, swtnr_feature_mask_value);
1611#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001612 property_len = property_get("persist.camera.hal3.feature",
1613 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001614#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001615 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1616 (feature_mask_value[1] == 'x')) {
1617 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1618 } else {
1619 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1620 }
1621 if (1 != args_converted) {
1622 feature_mask = 0;
1623 LOGE("Wrong feature mask %s", feature_mask_value);
1624 return;
1625 }
1626
1627 switch (stream_format) {
1628 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1629 /* Add LLVD to pp feature mask only if video hint is enabled */
1630 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1631 mStreamConfigInfo.postprocess_mask[stream_idx]
1632 |= CAM_QTI_FEATURE_SW_TNR;
1633 LOGH("Added SW TNR to pp feature mask");
1634 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1635 mStreamConfigInfo.postprocess_mask[stream_idx]
1636 |= CAM_QCOM_FEATURE_LLVD;
1637 LOGH("Added LLVD SeeMore to pp feature mask");
1638 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001639 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1640 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1641 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1642 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001643 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1644 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1645 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1646 CAM_QTI_FEATURE_BINNING_CORRECTION;
1647 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001648 break;
1649 }
1650 default:
1651 break;
1652 }
1653 LOGD("PP feature mask %llx",
1654 mStreamConfigInfo.postprocess_mask[stream_idx]);
1655}
1656
1657/*==============================================================================
1658 * FUNCTION : updateFpsInPreviewBuffer
1659 *
1660 * DESCRIPTION: update FPS information in preview buffer.
1661 *
1662 * PARAMETERS :
1663 * @metadata : pointer to metadata buffer
1664 * @frame_number: frame_number to look for in pending buffer list
1665 *
1666 * RETURN : None
1667 *
1668 *==========================================================================*/
1669void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1670 uint32_t frame_number)
1671{
1672 // Mark all pending buffers for this particular request
1673 // with corresponding framerate information
1674 for (List<PendingBuffersInRequest>::iterator req =
1675 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1676 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1677 for(List<PendingBufferInfo>::iterator j =
1678 req->mPendingBufferList.begin();
1679 j != req->mPendingBufferList.end(); j++) {
1680 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1681 if ((req->frame_number == frame_number) &&
1682 (channel->getStreamTypeMask() &
1683 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1684 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1685 CAM_INTF_PARM_FPS_RANGE, metadata) {
1686 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1687 struct private_handle_t *priv_handle =
1688 (struct private_handle_t *)(*(j->buffer));
1689 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1690 }
1691 }
1692 }
1693 }
1694}
1695
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001696/*==============================================================================
1697 * FUNCTION : updateTimeStampInPendingBuffers
1698 *
1699 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1700 * of a frame number
1701 *
1702 * PARAMETERS :
1703 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1704 * @timestamp : timestamp to be set
1705 *
1706 * RETURN : None
1707 *
1708 *==========================================================================*/
1709void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1710 uint32_t frameNumber, nsecs_t timestamp)
1711{
1712 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1713 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
Binhao Lin09245482017-08-31 18:25:29 -07001714 // WAR: save the av_timestamp to the next frame
1715 if(req->frame_number == frameNumber + 1) {
1716 req->av_timestamp = timestamp;
1717 }
1718
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001719 if (req->frame_number != frameNumber)
1720 continue;
1721
1722 for (auto k = req->mPendingBufferList.begin();
1723 k != req->mPendingBufferList.end(); k++ ) {
Binhao Lin09245482017-08-31 18:25:29 -07001724 // WAR: update timestamp when it's not VT usecase
1725 QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
1726 if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
1727 m_bAVTimerEnabled)) {
1728 struct private_handle_t *priv_handle =
1729 (struct private_handle_t *) (*(k->buffer));
1730 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1731 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001732 }
1733 }
1734 return;
1735}
1736
Thierry Strudel3d639192016-09-09 11:52:26 -07001737/*===========================================================================
1738 * FUNCTION : configureStreams
1739 *
1740 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1741 * and output streams.
1742 *
1743 * PARAMETERS :
1744 * @stream_list : streams to be configured
1745 *
1746 * RETURN :
1747 *
1748 *==========================================================================*/
1749int QCamera3HardwareInterface::configureStreams(
1750 camera3_stream_configuration_t *streamList)
1751{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001752 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001753 int rc = 0;
1754
1755 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001756 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001757 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001758 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001759
1760 return rc;
1761}
1762
1763/*===========================================================================
1764 * FUNCTION : configureStreamsPerfLocked
1765 *
1766 * DESCRIPTION: configureStreams while perfLock is held.
1767 *
1768 * PARAMETERS :
1769 * @stream_list : streams to be configured
1770 *
1771 * RETURN : int32_t type of status
1772 * NO_ERROR -- success
1773 * none-zero failure code
1774 *==========================================================================*/
1775int QCamera3HardwareInterface::configureStreamsPerfLocked(
1776 camera3_stream_configuration_t *streamList)
1777{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001778 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001779 int rc = 0;
1780
1781 // Sanity check stream_list
1782 if (streamList == NULL) {
1783 LOGE("NULL stream configuration");
1784 return BAD_VALUE;
1785 }
1786 if (streamList->streams == NULL) {
1787 LOGE("NULL stream list");
1788 return BAD_VALUE;
1789 }
1790
1791 if (streamList->num_streams < 1) {
1792 LOGE("Bad number of streams requested: %d",
1793 streamList->num_streams);
1794 return BAD_VALUE;
1795 }
1796
1797 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1798 LOGE("Maximum number of streams %d exceeded: %d",
1799 MAX_NUM_STREAMS, streamList->num_streams);
1800 return BAD_VALUE;
1801 }
1802
Jason Leec4cf5032017-05-24 18:31:41 -07001803 mOpMode = streamList->operation_mode;
1804 LOGD("mOpMode: %d", mOpMode);
1805
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001806 rc = validateUsageFlags(streamList);
1807 if (rc != NO_ERROR) {
1808 return rc;
1809 }
1810
Thierry Strudel3d639192016-09-09 11:52:26 -07001811 /* first invalidate all the steams in the mStreamList
1812 * if they appear again, they will be validated */
1813 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1814 it != mStreamInfo.end(); it++) {
1815 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1816 if (channel) {
1817 channel->stop();
1818 }
1819 (*it)->status = INVALID;
1820 }
1821
1822 if (mRawDumpChannel) {
1823 mRawDumpChannel->stop();
1824 delete mRawDumpChannel;
1825 mRawDumpChannel = NULL;
1826 }
1827
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001828 if (mHdrPlusRawSrcChannel) {
1829 mHdrPlusRawSrcChannel->stop();
1830 delete mHdrPlusRawSrcChannel;
1831 mHdrPlusRawSrcChannel = NULL;
1832 }
1833
Thierry Strudel3d639192016-09-09 11:52:26 -07001834 if (mSupportChannel)
1835 mSupportChannel->stop();
1836
1837 if (mAnalysisChannel) {
1838 mAnalysisChannel->stop();
1839 }
1840 if (mMetadataChannel) {
1841 /* If content of mStreamInfo is not 0, there is metadata stream */
1842 mMetadataChannel->stop();
1843 }
1844 if (mChannelHandle) {
1845 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001846 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001847 LOGD("stopping channel %d", mChannelHandle);
1848 }
1849
1850 pthread_mutex_lock(&mMutex);
1851
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001852 mPictureChannel = NULL;
1853
Thierry Strudel3d639192016-09-09 11:52:26 -07001854 // Check state
1855 switch (mState) {
1856 case INITIALIZED:
1857 case CONFIGURED:
1858 case STARTED:
1859 /* valid state */
1860 break;
1861 default:
1862 LOGE("Invalid state %d", mState);
1863 pthread_mutex_unlock(&mMutex);
1864 return -ENODEV;
1865 }
1866
1867 /* Check whether we have video stream */
1868 m_bIs4KVideo = false;
1869 m_bIsVideo = false;
1870 m_bEisSupportedSize = false;
1871 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001872 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001873 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001874 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001875 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001876 uint32_t videoWidth = 0U;
1877 uint32_t videoHeight = 0U;
1878 size_t rawStreamCnt = 0;
1879 size_t stallStreamCnt = 0;
1880 size_t processedStreamCnt = 0;
1881 // Number of streams on ISP encoder path
1882 size_t numStreamsOnEncoder = 0;
1883 size_t numYuv888OnEncoder = 0;
1884 bool bYuv888OverrideJpeg = false;
1885 cam_dimension_t largeYuv888Size = {0, 0};
1886 cam_dimension_t maxViewfinderSize = {0, 0};
1887 bool bJpegExceeds4K = false;
1888 bool bJpegOnEncoder = false;
1889 bool bUseCommonFeatureMask = false;
1890 cam_feature_mask_t commonFeatureMask = 0;
1891 bool bSmallJpegSize = false;
1892 uint32_t width_ratio;
1893 uint32_t height_ratio;
1894 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1895 camera3_stream_t *inputStream = NULL;
1896 bool isJpeg = false;
1897 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001898 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001899 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001900
1901 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1902
1903 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001904 uint8_t eis_prop_set;
1905 uint32_t maxEisWidth = 0;
1906 uint32_t maxEisHeight = 0;
1907
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001908 // Initialize all instant AEC related variables
1909 mInstantAEC = false;
1910 mResetInstantAEC = false;
1911 mInstantAECSettledFrameNumber = 0;
1912 mAecSkipDisplayFrameBound = 0;
1913 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001914 mCurrFeatureState = 0;
1915 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001916
Binhao Lin09245482017-08-31 18:25:29 -07001917 m_bAVTimerEnabled = false;
1918
Thierry Strudel3d639192016-09-09 11:52:26 -07001919 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1920
1921 size_t count = IS_TYPE_MAX;
1922 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1923 for (size_t i = 0; i < count; i++) {
1924 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001925 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1926 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001927 break;
1928 }
1929 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001930
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001931 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001932 maxEisWidth = MAX_EIS_WIDTH;
1933 maxEisHeight = MAX_EIS_HEIGHT;
1934 }
1935
1936 /* EIS setprop control */
1937 char eis_prop[PROPERTY_VALUE_MAX];
1938 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001939 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001940 eis_prop_set = (uint8_t)atoi(eis_prop);
1941
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001942 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001943 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1944
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001945 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1946 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001947
Thierry Strudel3d639192016-09-09 11:52:26 -07001948 /* stream configurations */
1949 for (size_t i = 0; i < streamList->num_streams; i++) {
1950 camera3_stream_t *newStream = streamList->streams[i];
1951 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1952 "height = %d, rotation = %d, usage = 0x%x",
1953 i, newStream->stream_type, newStream->format,
1954 newStream->width, newStream->height, newStream->rotation,
1955 newStream->usage);
1956 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1957 newStream->stream_type == CAMERA3_STREAM_INPUT){
1958 isZsl = true;
1959 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001960 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1961 IS_USAGE_PREVIEW(newStream->usage)) {
1962 isPreview = true;
1963 }
1964
Thierry Strudel3d639192016-09-09 11:52:26 -07001965 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1966 inputStream = newStream;
1967 }
1968
Emilian Peev7650c122017-01-19 08:24:33 -08001969 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1970 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001971 isJpeg = true;
1972 jpegSize.width = newStream->width;
1973 jpegSize.height = newStream->height;
1974 if (newStream->width > VIDEO_4K_WIDTH ||
1975 newStream->height > VIDEO_4K_HEIGHT)
1976 bJpegExceeds4K = true;
1977 }
1978
1979 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1980 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1981 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001982 // In HAL3 we can have multiple different video streams.
1983 // The variables video width and height are used below as
1984 // dimensions of the biggest of them
1985 if (videoWidth < newStream->width ||
1986 videoHeight < newStream->height) {
1987 videoWidth = newStream->width;
1988 videoHeight = newStream->height;
1989 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001990 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1991 (VIDEO_4K_HEIGHT <= newStream->height)) {
1992 m_bIs4KVideo = true;
1993 }
1994 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1995 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001996
Thierry Strudel3d639192016-09-09 11:52:26 -07001997 }
1998 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1999 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
2000 switch (newStream->format) {
2001 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002002 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2003 depthPresent = true;
2004 break;
2005 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002006 stallStreamCnt++;
2007 if (isOnEncoder(maxViewfinderSize, newStream->width,
2008 newStream->height)) {
2009 numStreamsOnEncoder++;
2010 bJpegOnEncoder = true;
2011 }
2012 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
2013 newStream->width);
2014 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2015 newStream->height);;
2016 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2017 "FATAL: max_downscale_factor cannot be zero and so assert");
2018 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2019 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2020 LOGH("Setting small jpeg size flag to true");
2021 bSmallJpegSize = true;
2022 }
2023 break;
2024 case HAL_PIXEL_FORMAT_RAW10:
2025 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2026 case HAL_PIXEL_FORMAT_RAW16:
2027 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002028 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2029 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2030 pdStatCount++;
2031 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002032 break;
2033 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2034 processedStreamCnt++;
2035 if (isOnEncoder(maxViewfinderSize, newStream->width,
2036 newStream->height)) {
2037 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2038 !IS_USAGE_ZSL(newStream->usage)) {
2039 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2040 }
2041 numStreamsOnEncoder++;
2042 }
2043 break;
2044 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2045 processedStreamCnt++;
2046 if (isOnEncoder(maxViewfinderSize, newStream->width,
2047 newStream->height)) {
2048 // If Yuv888 size is not greater than 4K, set feature mask
2049 // to SUPERSET so that it support concurrent request on
2050 // YUV and JPEG.
2051 if (newStream->width <= VIDEO_4K_WIDTH &&
2052 newStream->height <= VIDEO_4K_HEIGHT) {
2053 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2054 }
2055 numStreamsOnEncoder++;
2056 numYuv888OnEncoder++;
2057 largeYuv888Size.width = newStream->width;
2058 largeYuv888Size.height = newStream->height;
2059 }
2060 break;
2061 default:
2062 processedStreamCnt++;
2063 if (isOnEncoder(maxViewfinderSize, newStream->width,
2064 newStream->height)) {
2065 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2066 numStreamsOnEncoder++;
2067 }
2068 break;
2069 }
2070
2071 }
2072 }
2073
2074 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2075 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2076 !m_bIsVideo) {
2077 m_bEisEnable = false;
2078 }
2079
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002080 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2081 pthread_mutex_unlock(&mMutex);
2082 return -EINVAL;
2083 }
2084
Thierry Strudel54dc9782017-02-15 12:12:10 -08002085 uint8_t forceEnableTnr = 0;
2086 char tnr_prop[PROPERTY_VALUE_MAX];
2087 memset(tnr_prop, 0, sizeof(tnr_prop));
2088 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2089 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2090
Thierry Strudel3d639192016-09-09 11:52:26 -07002091 /* Logic to enable/disable TNR based on specific config size/etc.*/
2092 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002093 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2094 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002095 else if (forceEnableTnr)
2096 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002097
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002098 char videoHdrProp[PROPERTY_VALUE_MAX];
2099 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2100 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2101 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2102
2103 if (hdr_mode_prop == 1 && m_bIsVideo &&
2104 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2105 m_bVideoHdrEnabled = true;
2106 else
2107 m_bVideoHdrEnabled = false;
2108
2109
Thierry Strudel3d639192016-09-09 11:52:26 -07002110 /* Check if num_streams is sane */
2111 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2112 rawStreamCnt > MAX_RAW_STREAMS ||
2113 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2114 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2115 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2116 pthread_mutex_unlock(&mMutex);
2117 return -EINVAL;
2118 }
2119 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002120 if (isZsl && m_bIs4KVideo) {
2121 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002122 pthread_mutex_unlock(&mMutex);
2123 return -EINVAL;
2124 }
2125 /* Check if stream sizes are sane */
2126 if (numStreamsOnEncoder > 2) {
2127 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 } else if (1 < numStreamsOnEncoder){
2131 bUseCommonFeatureMask = true;
2132 LOGH("Multiple streams above max viewfinder size, common mask needed");
2133 }
2134
2135 /* Check if BLOB size is greater than 4k in 4k recording case */
2136 if (m_bIs4KVideo && bJpegExceeds4K) {
2137 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2138 pthread_mutex_unlock(&mMutex);
2139 return -EINVAL;
2140 }
2141
Emilian Peev7650c122017-01-19 08:24:33 -08002142 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2143 depthPresent) {
2144 LOGE("HAL doesn't support depth streams in HFR mode!");
2145 pthread_mutex_unlock(&mMutex);
2146 return -EINVAL;
2147 }
2148
Thierry Strudel3d639192016-09-09 11:52:26 -07002149 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2150 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2151 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2152 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2153 // configurations:
2154 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2155 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2156 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2157 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2158 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2159 __func__);
2160 pthread_mutex_unlock(&mMutex);
2161 return -EINVAL;
2162 }
2163
2164 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2165 // the YUV stream's size is greater or equal to the JPEG size, set common
2166 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2167 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2168 jpegSize.width, jpegSize.height) &&
2169 largeYuv888Size.width > jpegSize.width &&
2170 largeYuv888Size.height > jpegSize.height) {
2171 bYuv888OverrideJpeg = true;
2172 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2173 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2174 }
2175
2176 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2177 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2178 commonFeatureMask);
2179 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2180 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2181
2182 rc = validateStreamDimensions(streamList);
2183 if (rc == NO_ERROR) {
2184 rc = validateStreamRotations(streamList);
2185 }
2186 if (rc != NO_ERROR) {
2187 LOGE("Invalid stream configuration requested!");
2188 pthread_mutex_unlock(&mMutex);
2189 return rc;
2190 }
2191
Emilian Peev0f3c3162017-03-15 12:57:46 +00002192 if (1 < pdStatCount) {
2193 LOGE("HAL doesn't support multiple PD streams");
2194 pthread_mutex_unlock(&mMutex);
2195 return -EINVAL;
2196 }
2197
2198 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2199 (1 == pdStatCount)) {
2200 LOGE("HAL doesn't support PD streams in HFR mode!");
2201 pthread_mutex_unlock(&mMutex);
2202 return -EINVAL;
2203 }
2204
Thierry Strudel3d639192016-09-09 11:52:26 -07002205 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2206 for (size_t i = 0; i < streamList->num_streams; i++) {
2207 camera3_stream_t *newStream = streamList->streams[i];
2208 LOGH("newStream type = %d, stream format = %d "
2209 "stream size : %d x %d, stream rotation = %d",
2210 newStream->stream_type, newStream->format,
2211 newStream->width, newStream->height, newStream->rotation);
2212 //if the stream is in the mStreamList validate it
2213 bool stream_exists = false;
2214 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2215 it != mStreamInfo.end(); it++) {
2216 if ((*it)->stream == newStream) {
2217 QCamera3ProcessingChannel *channel =
2218 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2219 stream_exists = true;
2220 if (channel)
2221 delete channel;
2222 (*it)->status = VALID;
2223 (*it)->stream->priv = NULL;
2224 (*it)->channel = NULL;
2225 }
2226 }
2227 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2228 //new stream
2229 stream_info_t* stream_info;
2230 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2231 if (!stream_info) {
2232 LOGE("Could not allocate stream info");
2233 rc = -ENOMEM;
2234 pthread_mutex_unlock(&mMutex);
2235 return rc;
2236 }
2237 stream_info->stream = newStream;
2238 stream_info->status = VALID;
2239 stream_info->channel = NULL;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07002240 stream_info->id = i;
Thierry Strudel3d639192016-09-09 11:52:26 -07002241 mStreamInfo.push_back(stream_info);
2242 }
2243 /* Covers Opaque ZSL and API1 F/W ZSL */
2244 if (IS_USAGE_ZSL(newStream->usage)
2245 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2246 if (zslStream != NULL) {
2247 LOGE("Multiple input/reprocess streams requested!");
2248 pthread_mutex_unlock(&mMutex);
2249 return BAD_VALUE;
2250 }
2251 zslStream = newStream;
2252 }
2253 /* Covers YUV reprocess */
2254 if (inputStream != NULL) {
2255 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2256 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2257 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2258 && inputStream->width == newStream->width
2259 && inputStream->height == newStream->height) {
2260 if (zslStream != NULL) {
2261 /* This scenario indicates multiple YUV streams with same size
2262 * as input stream have been requested, since zsl stream handle
2263 * is solely use for the purpose of overriding the size of streams
2264 * which share h/w streams we will just make a guess here as to
2265 * which of the stream is a ZSL stream, this will be refactored
2266 * once we make generic logic for streams sharing encoder output
2267 */
2268 LOGH("Warning, Multiple ip/reprocess streams requested!");
2269 }
2270 zslStream = newStream;
2271 }
2272 }
2273 }
2274
2275 /* If a zsl stream is set, we know that we have configured at least one input or
2276 bidirectional stream */
2277 if (NULL != zslStream) {
2278 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2279 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2280 mInputStreamInfo.format = zslStream->format;
2281 mInputStreamInfo.usage = zslStream->usage;
2282 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2283 mInputStreamInfo.dim.width,
2284 mInputStreamInfo.dim.height,
2285 mInputStreamInfo.format, mInputStreamInfo.usage);
2286 }
2287
2288 cleanAndSortStreamInfo();
2289 if (mMetadataChannel) {
2290 delete mMetadataChannel;
2291 mMetadataChannel = NULL;
2292 }
2293 if (mSupportChannel) {
2294 delete mSupportChannel;
2295 mSupportChannel = NULL;
2296 }
2297
2298 if (mAnalysisChannel) {
2299 delete mAnalysisChannel;
2300 mAnalysisChannel = NULL;
2301 }
2302
2303 if (mDummyBatchChannel) {
2304 delete mDummyBatchChannel;
2305 mDummyBatchChannel = NULL;
2306 }
2307
Emilian Peev7650c122017-01-19 08:24:33 -08002308 if (mDepthChannel) {
2309 mDepthChannel = NULL;
2310 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002311 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002312
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002313 mShutterDispatcher.clear();
2314 mOutputBufferDispatcher.clear();
2315
Thierry Strudel2896d122017-02-23 19:18:03 -08002316 char is_type_value[PROPERTY_VALUE_MAX];
2317 property_get("persist.camera.is_type", is_type_value, "4");
2318 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2319
Binhao Line406f062017-05-03 14:39:44 -07002320 char property_value[PROPERTY_VALUE_MAX];
2321 property_get("persist.camera.gzoom.at", property_value, "0");
2322 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002323 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2324 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2325 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2326 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002327
2328 property_get("persist.camera.gzoom.4k", property_value, "0");
2329 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2330
Thierry Strudel3d639192016-09-09 11:52:26 -07002331 //Create metadata channel and initialize it
2332 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2333 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2334 gCamCapability[mCameraId]->color_arrangement);
2335 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2336 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002337 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002338 if (mMetadataChannel == NULL) {
2339 LOGE("failed to allocate metadata channel");
2340 rc = -ENOMEM;
2341 pthread_mutex_unlock(&mMutex);
2342 return rc;
2343 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002344 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002345 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2346 if (rc < 0) {
2347 LOGE("metadata channel initialization failed");
2348 delete mMetadataChannel;
2349 mMetadataChannel = NULL;
2350 pthread_mutex_unlock(&mMutex);
2351 return rc;
2352 }
2353
Thierry Strudel2896d122017-02-23 19:18:03 -08002354 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002355 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002356 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002357 // Keep track of preview/video streams indices.
2358 // There could be more than one preview streams, but only one video stream.
2359 int32_t video_stream_idx = -1;
2360 int32_t preview_stream_idx[streamList->num_streams];
2361 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002362 bool previewTnr[streamList->num_streams];
2363 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2364 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2365 // Loop through once to determine preview TNR conditions before creating channels.
2366 for (size_t i = 0; i < streamList->num_streams; i++) {
2367 camera3_stream_t *newStream = streamList->streams[i];
2368 uint32_t stream_usage = newStream->usage;
2369 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2370 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2371 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2372 video_stream_idx = (int32_t)i;
2373 else
2374 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2375 }
2376 }
2377 // By default, preview stream TNR is disabled.
2378 // Enable TNR to the preview stream if all conditions below are satisfied:
2379 // 1. preview resolution == video resolution.
2380 // 2. video stream TNR is enabled.
2381 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2382 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2383 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2384 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2385 if (m_bTnrEnabled && m_bTnrVideo &&
2386 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2387 video_stream->width == preview_stream->width &&
2388 video_stream->height == preview_stream->height) {
2389 previewTnr[preview_stream_idx[i]] = true;
2390 }
2391 }
2392
Thierry Strudel3d639192016-09-09 11:52:26 -07002393 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2394 /* Allocate channel objects for the requested streams */
2395 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002396
Thierry Strudel3d639192016-09-09 11:52:26 -07002397 camera3_stream_t *newStream = streamList->streams[i];
2398 uint32_t stream_usage = newStream->usage;
2399 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2400 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2401 struct camera_info *p_info = NULL;
2402 pthread_mutex_lock(&gCamLock);
2403 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2404 pthread_mutex_unlock(&gCamLock);
2405 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2406 || IS_USAGE_ZSL(newStream->usage)) &&
2407 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002408 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002409 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002410 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2411 if (bUseCommonFeatureMask)
2412 zsl_ppmask = commonFeatureMask;
2413 else
2414 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002415 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002416 if (numStreamsOnEncoder > 0)
2417 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2418 else
2419 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002420 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002421 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002422 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002423 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002424 LOGH("Input stream configured, reprocess config");
2425 } else {
2426 //for non zsl streams find out the format
2427 switch (newStream->format) {
2428 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2429 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002430 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002431 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2432 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2433 /* add additional features to pp feature mask */
2434 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2435 mStreamConfigInfo.num_streams);
2436
2437 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2438 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2439 CAM_STREAM_TYPE_VIDEO;
2440 if (m_bTnrEnabled && m_bTnrVideo) {
2441 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2442 CAM_QCOM_FEATURE_CPP_TNR;
2443 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2444 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2445 ~CAM_QCOM_FEATURE_CDS;
2446 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002447 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2448 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2449 CAM_QTI_FEATURE_PPEISCORE;
2450 }
Binhao Line406f062017-05-03 14:39:44 -07002451 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2452 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2453 CAM_QCOM_FEATURE_GOOG_ZOOM;
2454 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002455 } else {
2456 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2457 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002458 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002459 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2460 CAM_QCOM_FEATURE_CPP_TNR;
2461 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2462 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2463 ~CAM_QCOM_FEATURE_CDS;
2464 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002465 if(!m_bSwTnrPreview) {
2466 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2467 ~CAM_QTI_FEATURE_SW_TNR;
2468 }
Binhao Line406f062017-05-03 14:39:44 -07002469 if (is_goog_zoom_preview_enabled) {
2470 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2471 CAM_QCOM_FEATURE_GOOG_ZOOM;
2472 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002473 padding_info.width_padding = mSurfaceStridePadding;
2474 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002475 previewSize.width = (int32_t)newStream->width;
2476 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002477 }
2478 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2479 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2480 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2481 newStream->height;
2482 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2483 newStream->width;
2484 }
2485 }
2486 break;
2487 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002488 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002489 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2490 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2491 if (bUseCommonFeatureMask)
2492 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2493 commonFeatureMask;
2494 else
2495 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2496 CAM_QCOM_FEATURE_NONE;
2497 } else {
2498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2499 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2500 }
2501 break;
2502 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002503 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002504 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2505 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2506 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2507 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2508 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002509 /* Remove rotation if it is not supported
2510 for 4K LiveVideo snapshot case (online processing) */
2511 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2512 CAM_QCOM_FEATURE_ROTATION)) {
2513 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2514 &= ~CAM_QCOM_FEATURE_ROTATION;
2515 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002516 } else {
2517 if (bUseCommonFeatureMask &&
2518 isOnEncoder(maxViewfinderSize, newStream->width,
2519 newStream->height)) {
2520 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2521 } else {
2522 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2523 }
2524 }
2525 if (isZsl) {
2526 if (zslStream) {
2527 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2528 (int32_t)zslStream->width;
2529 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2530 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002531 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2532 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002533 } else {
2534 LOGE("Error, No ZSL stream identified");
2535 pthread_mutex_unlock(&mMutex);
2536 return -EINVAL;
2537 }
2538 } else if (m_bIs4KVideo) {
2539 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2540 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2541 } else if (bYuv888OverrideJpeg) {
2542 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2543 (int32_t)largeYuv888Size.width;
2544 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2545 (int32_t)largeYuv888Size.height;
2546 }
2547 break;
2548 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2549 case HAL_PIXEL_FORMAT_RAW16:
2550 case HAL_PIXEL_FORMAT_RAW10:
2551 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2552 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2553 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002554 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2555 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2556 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2557 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2558 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2559 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2560 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2561 gCamCapability[mCameraId]->dt[mPDIndex];
2562 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2563 gCamCapability[mCameraId]->vc[mPDIndex];
2564 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002565 break;
2566 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002567 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002568 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2569 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2570 break;
2571 }
2572 }
2573
2574 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2575 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2576 gCamCapability[mCameraId]->color_arrangement);
2577
2578 if (newStream->priv == NULL) {
2579 //New stream, construct channel
2580 switch (newStream->stream_type) {
2581 case CAMERA3_STREAM_INPUT:
2582 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2583 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2584 break;
2585 case CAMERA3_STREAM_BIDIRECTIONAL:
2586 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2587 GRALLOC_USAGE_HW_CAMERA_WRITE;
2588 break;
2589 case CAMERA3_STREAM_OUTPUT:
2590 /* For video encoding stream, set read/write rarely
2591 * flag so that they may be set to un-cached */
2592 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2593 newStream->usage |=
2594 (GRALLOC_USAGE_SW_READ_RARELY |
2595 GRALLOC_USAGE_SW_WRITE_RARELY |
2596 GRALLOC_USAGE_HW_CAMERA_WRITE);
2597 else if (IS_USAGE_ZSL(newStream->usage))
2598 {
2599 LOGD("ZSL usage flag skipping");
2600 }
2601 else if (newStream == zslStream
2602 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2603 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2604 } else
2605 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2606 break;
2607 default:
2608 LOGE("Invalid stream_type %d", newStream->stream_type);
2609 break;
2610 }
2611
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002612 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002613 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2614 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2615 QCamera3ProcessingChannel *channel = NULL;
2616 switch (newStream->format) {
2617 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2618 if ((newStream->usage &
2619 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2620 (streamList->operation_mode ==
2621 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2622 ) {
2623 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2624 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002625 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002626 this,
2627 newStream,
2628 (cam_stream_type_t)
2629 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2630 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2631 mMetadataChannel,
2632 0); //heap buffers are not required for HFR video channel
2633 if (channel == NULL) {
2634 LOGE("allocation of channel failed");
2635 pthread_mutex_unlock(&mMutex);
2636 return -ENOMEM;
2637 }
2638 //channel->getNumBuffers() will return 0 here so use
2639 //MAX_INFLIGH_HFR_REQUESTS
2640 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2641 newStream->priv = channel;
2642 LOGI("num video buffers in HFR mode: %d",
2643 MAX_INFLIGHT_HFR_REQUESTS);
2644 } else {
2645 /* Copy stream contents in HFR preview only case to create
2646 * dummy batch channel so that sensor streaming is in
2647 * HFR mode */
2648 if (!m_bIsVideo && (streamList->operation_mode ==
2649 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2650 mDummyBatchStream = *newStream;
2651 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002652 int bufferCount = MAX_INFLIGHT_REQUESTS;
2653 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2654 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002655 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2656 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2657 bufferCount = m_bIs4KVideo ?
2658 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2659 }
2660
Thierry Strudel2896d122017-02-23 19:18:03 -08002661 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002662 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2663 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002664 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002665 this,
2666 newStream,
2667 (cam_stream_type_t)
2668 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2669 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2670 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002671 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002672 if (channel == NULL) {
2673 LOGE("allocation of channel failed");
2674 pthread_mutex_unlock(&mMutex);
2675 return -ENOMEM;
2676 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002677 /* disable UBWC for preview, though supported,
2678 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002679 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002680 (previewSize.width == (int32_t)videoWidth)&&
2681 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002682 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002683 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002684 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002685 /* When goog_zoom is linked to the preview or video stream,
2686 * disable ubwc to the linked stream */
2687 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2688 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2689 channel->setUBWCEnabled(false);
2690 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002691 newStream->max_buffers = channel->getNumBuffers();
2692 newStream->priv = channel;
2693 }
2694 break;
2695 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2696 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2697 mChannelHandle,
2698 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002699 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002700 this,
2701 newStream,
2702 (cam_stream_type_t)
2703 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2704 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2705 mMetadataChannel);
2706 if (channel == NULL) {
2707 LOGE("allocation of YUV channel failed");
2708 pthread_mutex_unlock(&mMutex);
2709 return -ENOMEM;
2710 }
2711 newStream->max_buffers = channel->getNumBuffers();
2712 newStream->priv = channel;
2713 break;
2714 }
2715 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2716 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002717 case HAL_PIXEL_FORMAT_RAW10: {
2718 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2719 (HAL_DATASPACE_DEPTH != newStream->data_space))
2720 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002721 mRawChannel = new QCamera3RawChannel(
2722 mCameraHandle->camera_handle, mChannelHandle,
2723 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002724 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002725 this, newStream,
2726 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002727 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002728 if (mRawChannel == NULL) {
2729 LOGE("allocation of raw channel failed");
2730 pthread_mutex_unlock(&mMutex);
2731 return -ENOMEM;
2732 }
2733 newStream->max_buffers = mRawChannel->getNumBuffers();
2734 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2735 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002736 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002737 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002738 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2739 mDepthChannel = new QCamera3DepthChannel(
2740 mCameraHandle->camera_handle, mChannelHandle,
2741 mCameraHandle->ops, NULL, NULL, &padding_info,
2742 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2743 mMetadataChannel);
2744 if (NULL == mDepthChannel) {
2745 LOGE("Allocation of depth channel failed");
2746 pthread_mutex_unlock(&mMutex);
2747 return NO_MEMORY;
2748 }
2749 newStream->priv = mDepthChannel;
2750 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2751 } else {
2752 // Max live snapshot inflight buffer is 1. This is to mitigate
2753 // frame drop issues for video snapshot. The more buffers being
2754 // allocated, the more frame drops there are.
2755 mPictureChannel = new QCamera3PicChannel(
2756 mCameraHandle->camera_handle, mChannelHandle,
2757 mCameraHandle->ops, captureResultCb,
2758 setBufferErrorStatus, &padding_info, this, newStream,
2759 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2760 m_bIs4KVideo, isZsl, mMetadataChannel,
2761 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2762 if (mPictureChannel == NULL) {
2763 LOGE("allocation of channel failed");
2764 pthread_mutex_unlock(&mMutex);
2765 return -ENOMEM;
2766 }
2767 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2768 newStream->max_buffers = mPictureChannel->getNumBuffers();
2769 mPictureChannel->overrideYuvSize(
2770 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2771 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002772 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002773 break;
2774
2775 default:
2776 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002777 pthread_mutex_unlock(&mMutex);
2778 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002779 }
2780 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2781 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2782 } else {
2783 LOGE("Error, Unknown stream type");
2784 pthread_mutex_unlock(&mMutex);
2785 return -EINVAL;
2786 }
2787
2788 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002789 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002790 // Here we only care whether it's EIS3 or not
2791 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2792 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2793 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2794 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002795 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002796 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002797 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002798 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2799 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2800 }
2801 }
2802
2803 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2804 it != mStreamInfo.end(); it++) {
2805 if ((*it)->stream == newStream) {
2806 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2807 break;
2808 }
2809 }
2810 } else {
2811 // Channel already exists for this stream
2812 // Do nothing for now
2813 }
2814 padding_info = gCamCapability[mCameraId]->padding_info;
2815
Emilian Peev7650c122017-01-19 08:24:33 -08002816 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002817 * since there is no real stream associated with it
2818 */
Emilian Peev7650c122017-01-19 08:24:33 -08002819 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002820 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2821 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002822 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002823 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002824 }
2825
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002826 // Let buffer dispatcher know the configured streams.
2827 mOutputBufferDispatcher.configureStreams(streamList);
2828
Thierry Strudel2896d122017-02-23 19:18:03 -08002829 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2830 onlyRaw = false;
2831 }
2832
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002833 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002834 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002835 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002836 cam_analysis_info_t analysisInfo;
2837 int32_t ret = NO_ERROR;
2838 ret = mCommon.getAnalysisInfo(
2839 FALSE,
2840 analysisFeatureMask,
2841 &analysisInfo);
2842 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002843 cam_color_filter_arrangement_t analysis_color_arrangement =
2844 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2845 CAM_FILTER_ARRANGEMENT_Y :
2846 gCamCapability[mCameraId]->color_arrangement);
2847 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2848 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002849 cam_dimension_t analysisDim;
2850 analysisDim = mCommon.getMatchingDimension(previewSize,
2851 analysisInfo.analysis_recommended_res);
2852
2853 mAnalysisChannel = new QCamera3SupportChannel(
2854 mCameraHandle->camera_handle,
2855 mChannelHandle,
2856 mCameraHandle->ops,
2857 &analysisInfo.analysis_padding_info,
2858 analysisFeatureMask,
2859 CAM_STREAM_TYPE_ANALYSIS,
2860 &analysisDim,
2861 (analysisInfo.analysis_format
2862 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2863 : CAM_FORMAT_YUV_420_NV21),
2864 analysisInfo.hw_analysis_supported,
2865 gCamCapability[mCameraId]->color_arrangement,
2866 this,
2867 0); // force buffer count to 0
2868 } else {
2869 LOGW("getAnalysisInfo failed, ret = %d", ret);
2870 }
2871 if (!mAnalysisChannel) {
2872 LOGW("Analysis channel cannot be created");
2873 }
2874 }
2875
Thierry Strudel3d639192016-09-09 11:52:26 -07002876 //RAW DUMP channel
2877 if (mEnableRawDump && isRawStreamRequested == false){
2878 cam_dimension_t rawDumpSize;
2879 rawDumpSize = getMaxRawSize(mCameraId);
2880 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2881 setPAAFSupport(rawDumpFeatureMask,
2882 CAM_STREAM_TYPE_RAW,
2883 gCamCapability[mCameraId]->color_arrangement);
2884 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2885 mChannelHandle,
2886 mCameraHandle->ops,
2887 rawDumpSize,
2888 &padding_info,
2889 this, rawDumpFeatureMask);
2890 if (!mRawDumpChannel) {
2891 LOGE("Raw Dump channel cannot be created");
2892 pthread_mutex_unlock(&mMutex);
2893 return -ENOMEM;
2894 }
2895 }
2896
Thierry Strudel3d639192016-09-09 11:52:26 -07002897 if (mAnalysisChannel) {
2898 cam_analysis_info_t analysisInfo;
2899 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2900 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2901 CAM_STREAM_TYPE_ANALYSIS;
2902 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2903 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002904 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002905 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2906 &analysisInfo);
2907 if (rc != NO_ERROR) {
2908 LOGE("getAnalysisInfo failed, ret = %d", rc);
2909 pthread_mutex_unlock(&mMutex);
2910 return rc;
2911 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002912 cam_color_filter_arrangement_t analysis_color_arrangement =
2913 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2914 CAM_FILTER_ARRANGEMENT_Y :
2915 gCamCapability[mCameraId]->color_arrangement);
2916 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2917 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2918 analysis_color_arrangement);
2919
Thierry Strudel3d639192016-09-09 11:52:26 -07002920 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002921 mCommon.getMatchingDimension(previewSize,
2922 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002923 mStreamConfigInfo.num_streams++;
2924 }
2925
Thierry Strudel2896d122017-02-23 19:18:03 -08002926 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002927 cam_analysis_info_t supportInfo;
2928 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2929 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2930 setPAAFSupport(callbackFeatureMask,
2931 CAM_STREAM_TYPE_CALLBACK,
2932 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002933 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002934 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002935 if (ret != NO_ERROR) {
2936 /* Ignore the error for Mono camera
2937 * because the PAAF bit mask is only set
2938 * for CAM_STREAM_TYPE_ANALYSIS stream type
2939 */
2940 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2941 LOGW("getAnalysisInfo failed, ret = %d", ret);
2942 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002943 }
2944 mSupportChannel = new QCamera3SupportChannel(
2945 mCameraHandle->camera_handle,
2946 mChannelHandle,
2947 mCameraHandle->ops,
2948 &gCamCapability[mCameraId]->padding_info,
2949 callbackFeatureMask,
2950 CAM_STREAM_TYPE_CALLBACK,
2951 &QCamera3SupportChannel::kDim,
2952 CAM_FORMAT_YUV_420_NV21,
2953 supportInfo.hw_analysis_supported,
2954 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002955 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002956 if (!mSupportChannel) {
2957 LOGE("dummy channel cannot be created");
2958 pthread_mutex_unlock(&mMutex);
2959 return -ENOMEM;
2960 }
2961 }
2962
2963 if (mSupportChannel) {
2964 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2965 QCamera3SupportChannel::kDim;
2966 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2967 CAM_STREAM_TYPE_CALLBACK;
2968 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2969 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2970 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2971 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2972 gCamCapability[mCameraId]->color_arrangement);
2973 mStreamConfigInfo.num_streams++;
2974 }
2975
2976 if (mRawDumpChannel) {
2977 cam_dimension_t rawSize;
2978 rawSize = getMaxRawSize(mCameraId);
2979 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2980 rawSize;
2981 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2982 CAM_STREAM_TYPE_RAW;
2983 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2984 CAM_QCOM_FEATURE_NONE;
2985 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2986 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2987 gCamCapability[mCameraId]->color_arrangement);
2988 mStreamConfigInfo.num_streams++;
2989 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002990
2991 if (mHdrPlusRawSrcChannel) {
2992 cam_dimension_t rawSize;
2993 rawSize = getMaxRawSize(mCameraId);
2994 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2995 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2996 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2997 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2998 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2999 gCamCapability[mCameraId]->color_arrangement);
3000 mStreamConfigInfo.num_streams++;
3001 }
3002
Thierry Strudel3d639192016-09-09 11:52:26 -07003003 /* In HFR mode, if video stream is not added, create a dummy channel so that
3004 * ISP can create a batch mode even for preview only case. This channel is
3005 * never 'start'ed (no stream-on), it is only 'initialized' */
3006 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
3007 !m_bIsVideo) {
3008 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3009 setPAAFSupport(dummyFeatureMask,
3010 CAM_STREAM_TYPE_VIDEO,
3011 gCamCapability[mCameraId]->color_arrangement);
3012 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
3013 mChannelHandle,
3014 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003015 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07003016 this,
3017 &mDummyBatchStream,
3018 CAM_STREAM_TYPE_VIDEO,
3019 dummyFeatureMask,
3020 mMetadataChannel);
3021 if (NULL == mDummyBatchChannel) {
3022 LOGE("creation of mDummyBatchChannel failed."
3023 "Preview will use non-hfr sensor mode ");
3024 }
3025 }
3026 if (mDummyBatchChannel) {
3027 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3028 mDummyBatchStream.width;
3029 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3030 mDummyBatchStream.height;
3031 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3032 CAM_STREAM_TYPE_VIDEO;
3033 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3034 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3035 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3036 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3037 gCamCapability[mCameraId]->color_arrangement);
3038 mStreamConfigInfo.num_streams++;
3039 }
3040
3041 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3042 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003043 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003044 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003045
3046 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3047 for (pendingRequestIterator i = mPendingRequestsList.begin();
3048 i != mPendingRequestsList.end();) {
3049 i = erasePendingRequest(i);
3050 }
3051 mPendingFrameDropList.clear();
3052 // Initialize/Reset the pending buffers list
3053 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3054 req.mPendingBufferList.clear();
3055 }
3056 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003057 mExpectedInflightDuration = 0;
3058 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003059
Thierry Strudel3d639192016-09-09 11:52:26 -07003060 mCurJpegMeta.clear();
3061 //Get min frame duration for this streams configuration
3062 deriveMinFrameDuration();
3063
Chien-Yu Chenee335912017-02-09 17:53:20 -08003064 mFirstPreviewIntentSeen = false;
3065
3066 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003067 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003068 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3069 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003070 disableHdrPlusModeLocked();
3071 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003072
Thierry Strudel3d639192016-09-09 11:52:26 -07003073 // Update state
3074 mState = CONFIGURED;
3075
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003076 mFirstMetadataCallback = true;
3077
Thierry Strudel3d639192016-09-09 11:52:26 -07003078 pthread_mutex_unlock(&mMutex);
3079
3080 return rc;
3081}
3082
3083/*===========================================================================
3084 * FUNCTION : validateCaptureRequest
3085 *
3086 * DESCRIPTION: validate a capture request from camera service
3087 *
3088 * PARAMETERS :
3089 * @request : request from framework to process
3090 *
3091 * RETURN :
3092 *
3093 *==========================================================================*/
3094int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003095 camera3_capture_request_t *request,
3096 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003097{
3098 ssize_t idx = 0;
3099 const camera3_stream_buffer_t *b;
3100 CameraMetadata meta;
3101
3102 /* Sanity check the request */
3103 if (request == NULL) {
3104 LOGE("NULL capture request");
3105 return BAD_VALUE;
3106 }
3107
3108 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3109 /*settings cannot be null for the first request*/
3110 return BAD_VALUE;
3111 }
3112
3113 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003114 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3115 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003116 LOGE("Request %d: No output buffers provided!",
3117 __FUNCTION__, frameNumber);
3118 return BAD_VALUE;
3119 }
3120 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3121 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3122 request->num_output_buffers, MAX_NUM_STREAMS);
3123 return BAD_VALUE;
3124 }
3125 if (request->input_buffer != NULL) {
3126 b = request->input_buffer;
3127 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3128 LOGE("Request %d: Buffer %ld: Status not OK!",
3129 frameNumber, (long)idx);
3130 return BAD_VALUE;
3131 }
3132 if (b->release_fence != -1) {
3133 LOGE("Request %d: Buffer %ld: Has a release fence!",
3134 frameNumber, (long)idx);
3135 return BAD_VALUE;
3136 }
3137 if (b->buffer == NULL) {
3138 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3139 frameNumber, (long)idx);
3140 return BAD_VALUE;
3141 }
3142 }
3143
3144 // Validate all buffers
3145 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003146 if (b == NULL) {
3147 return BAD_VALUE;
3148 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003149 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003150 QCamera3ProcessingChannel *channel =
3151 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3152 if (channel == NULL) {
3153 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3154 frameNumber, (long)idx);
3155 return BAD_VALUE;
3156 }
3157 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3158 LOGE("Request %d: Buffer %ld: Status not OK!",
3159 frameNumber, (long)idx);
3160 return BAD_VALUE;
3161 }
3162 if (b->release_fence != -1) {
3163 LOGE("Request %d: Buffer %ld: Has a release fence!",
3164 frameNumber, (long)idx);
3165 return BAD_VALUE;
3166 }
3167 if (b->buffer == NULL) {
3168 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3169 frameNumber, (long)idx);
3170 return BAD_VALUE;
3171 }
3172 if (*(b->buffer) == NULL) {
3173 LOGE("Request %d: Buffer %ld: NULL private handle!",
3174 frameNumber, (long)idx);
3175 return BAD_VALUE;
3176 }
3177 idx++;
3178 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003179 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003180 return NO_ERROR;
3181}
3182
3183/*===========================================================================
3184 * FUNCTION : deriveMinFrameDuration
3185 *
3186 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3187 * on currently configured streams.
3188 *
3189 * PARAMETERS : NONE
3190 *
3191 * RETURN : NONE
3192 *
3193 *==========================================================================*/
3194void QCamera3HardwareInterface::deriveMinFrameDuration()
3195{
3196 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003197 bool hasRaw = false;
3198
3199 mMinRawFrameDuration = 0;
3200 mMinJpegFrameDuration = 0;
3201 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003202
3203 maxJpegDim = 0;
3204 maxProcessedDim = 0;
3205 maxRawDim = 0;
3206
3207 // Figure out maximum jpeg, processed, and raw dimensions
3208 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3209 it != mStreamInfo.end(); it++) {
3210
3211 // Input stream doesn't have valid stream_type
3212 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3213 continue;
3214
3215 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3216 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3217 if (dimension > maxJpegDim)
3218 maxJpegDim = dimension;
3219 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3220 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3221 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003222 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003223 if (dimension > maxRawDim)
3224 maxRawDim = dimension;
3225 } else {
3226 if (dimension > maxProcessedDim)
3227 maxProcessedDim = dimension;
3228 }
3229 }
3230
3231 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3232 MAX_SIZES_CNT);
3233
3234 //Assume all jpeg dimensions are in processed dimensions.
3235 if (maxJpegDim > maxProcessedDim)
3236 maxProcessedDim = maxJpegDim;
3237 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003238 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003239 maxRawDim = INT32_MAX;
3240
3241 for (size_t i = 0; i < count; i++) {
3242 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3243 gCamCapability[mCameraId]->raw_dim[i].height;
3244 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3245 maxRawDim = dimension;
3246 }
3247 }
3248
3249 //Find minimum durations for processed, jpeg, and raw
3250 for (size_t i = 0; i < count; i++) {
3251 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3252 gCamCapability[mCameraId]->raw_dim[i].height) {
3253 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3254 break;
3255 }
3256 }
3257 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3258 for (size_t i = 0; i < count; i++) {
3259 if (maxProcessedDim ==
3260 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3261 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3262 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3263 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3264 break;
3265 }
3266 }
3267}
3268
3269/*===========================================================================
3270 * FUNCTION : getMinFrameDuration
3271 *
3272 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3273 * and current request configuration.
3274 *
3275 * PARAMETERS : @request: requset sent by the frameworks
3276 *
3277 * RETURN : min farme duration for a particular request
3278 *
3279 *==========================================================================*/
3280int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3281{
3282 bool hasJpegStream = false;
3283 bool hasRawStream = false;
3284 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3285 const camera3_stream_t *stream = request->output_buffers[i].stream;
3286 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3287 hasJpegStream = true;
3288 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3289 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3290 stream->format == HAL_PIXEL_FORMAT_RAW16)
3291 hasRawStream = true;
3292 }
3293
3294 if (!hasJpegStream)
3295 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3296 else
3297 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3298}
3299
3300/*===========================================================================
3301 * FUNCTION : handleBuffersDuringFlushLock
3302 *
3303 * DESCRIPTION: Account for buffers returned from back-end during flush
3304 * This function is executed while mMutex is held by the caller.
3305 *
3306 * PARAMETERS :
3307 * @buffer: image buffer for the callback
3308 *
3309 * RETURN :
3310 *==========================================================================*/
3311void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3312{
3313 bool buffer_found = false;
3314 for (List<PendingBuffersInRequest>::iterator req =
3315 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3316 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3317 for (List<PendingBufferInfo>::iterator i =
3318 req->mPendingBufferList.begin();
3319 i != req->mPendingBufferList.end(); i++) {
3320 if (i->buffer == buffer->buffer) {
3321 mPendingBuffersMap.numPendingBufsAtFlush--;
3322 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3323 buffer->buffer, req->frame_number,
3324 mPendingBuffersMap.numPendingBufsAtFlush);
3325 buffer_found = true;
3326 break;
3327 }
3328 }
3329 if (buffer_found) {
3330 break;
3331 }
3332 }
3333 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3334 //signal the flush()
3335 LOGD("All buffers returned to HAL. Continue flush");
3336 pthread_cond_signal(&mBuffersCond);
3337 }
3338}
3339
Thierry Strudel3d639192016-09-09 11:52:26 -07003340/*===========================================================================
3341 * FUNCTION : handleBatchMetadata
3342 *
3343 * DESCRIPTION: Handles metadata buffer callback in batch mode
3344 *
3345 * PARAMETERS : @metadata_buf: metadata buffer
3346 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3347 * the meta buf in this method
3348 *
3349 * RETURN :
3350 *
3351 *==========================================================================*/
3352void QCamera3HardwareInterface::handleBatchMetadata(
3353 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3354{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003355 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003356
3357 if (NULL == metadata_buf) {
3358 LOGE("metadata_buf is NULL");
3359 return;
3360 }
3361 /* In batch mode, the metdata will contain the frame number and timestamp of
3362 * the last frame in the batch. Eg: a batch containing buffers from request
3363 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3364 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3365 * multiple process_capture_results */
3366 metadata_buffer_t *metadata =
3367 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3368 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3369 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3370 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3371 uint32_t frame_number = 0, urgent_frame_number = 0;
3372 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3373 bool invalid_metadata = false;
3374 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3375 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003376 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003377
3378 int32_t *p_frame_number_valid =
3379 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3380 uint32_t *p_frame_number =
3381 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3382 int64_t *p_capture_time =
3383 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3384 int32_t *p_urgent_frame_number_valid =
3385 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3386 uint32_t *p_urgent_frame_number =
3387 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3388
3389 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3390 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3391 (NULL == p_urgent_frame_number)) {
3392 LOGE("Invalid metadata");
3393 invalid_metadata = true;
3394 } else {
3395 frame_number_valid = *p_frame_number_valid;
3396 last_frame_number = *p_frame_number;
3397 last_frame_capture_time = *p_capture_time;
3398 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3399 last_urgent_frame_number = *p_urgent_frame_number;
3400 }
3401
3402 /* In batchmode, when no video buffers are requested, set_parms are sent
3403 * for every capture_request. The difference between consecutive urgent
3404 * frame numbers and frame numbers should be used to interpolate the
3405 * corresponding frame numbers and time stamps */
3406 pthread_mutex_lock(&mMutex);
3407 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003408 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3409 if(idx < 0) {
3410 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3411 last_urgent_frame_number);
3412 mState = ERROR;
3413 pthread_mutex_unlock(&mMutex);
3414 return;
3415 }
3416 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003417 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3418 first_urgent_frame_number;
3419
3420 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3421 urgent_frame_number_valid,
3422 first_urgent_frame_number, last_urgent_frame_number);
3423 }
3424
3425 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003426 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3427 if(idx < 0) {
3428 LOGE("Invalid frame number received: %d. Irrecoverable error",
3429 last_frame_number);
3430 mState = ERROR;
3431 pthread_mutex_unlock(&mMutex);
3432 return;
3433 }
3434 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003435 frameNumDiff = last_frame_number + 1 -
3436 first_frame_number;
3437 mPendingBatchMap.removeItem(last_frame_number);
3438
3439 LOGD("frm: valid: %d frm_num: %d - %d",
3440 frame_number_valid,
3441 first_frame_number, last_frame_number);
3442
3443 }
3444 pthread_mutex_unlock(&mMutex);
3445
3446 if (urgent_frame_number_valid || frame_number_valid) {
3447 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3448 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3449 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3450 urgentFrameNumDiff, last_urgent_frame_number);
3451 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3452 LOGE("frameNumDiff: %d frameNum: %d",
3453 frameNumDiff, last_frame_number);
3454 }
3455
3456 for (size_t i = 0; i < loopCount; i++) {
3457 /* handleMetadataWithLock is called even for invalid_metadata for
3458 * pipeline depth calculation */
3459 if (!invalid_metadata) {
3460 /* Infer frame number. Batch metadata contains frame number of the
3461 * last frame */
3462 if (urgent_frame_number_valid) {
3463 if (i < urgentFrameNumDiff) {
3464 urgent_frame_number =
3465 first_urgent_frame_number + i;
3466 LOGD("inferred urgent frame_number: %d",
3467 urgent_frame_number);
3468 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3469 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3470 } else {
3471 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3472 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3473 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3474 }
3475 }
3476
3477 /* Infer frame number. Batch metadata contains frame number of the
3478 * last frame */
3479 if (frame_number_valid) {
3480 if (i < frameNumDiff) {
3481 frame_number = first_frame_number + i;
3482 LOGD("inferred frame_number: %d", frame_number);
3483 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3484 CAM_INTF_META_FRAME_NUMBER, frame_number);
3485 } else {
3486 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3487 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3488 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3489 }
3490 }
3491
3492 if (last_frame_capture_time) {
3493 //Infer timestamp
3494 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003495 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003496 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003497 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003498 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3499 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3500 LOGD("batch capture_time: %lld, capture_time: %lld",
3501 last_frame_capture_time, capture_time);
3502 }
3503 }
3504 pthread_mutex_lock(&mMutex);
3505 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003506 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003507 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3508 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003509 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003510 pthread_mutex_unlock(&mMutex);
3511 }
3512
3513 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003514 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003515 mMetadataChannel->bufDone(metadata_buf);
3516 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003517 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003518 }
3519}
3520
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003521void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3522 camera3_error_msg_code_t errorCode)
3523{
3524 camera3_notify_msg_t notify_msg;
3525 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3526 notify_msg.type = CAMERA3_MSG_ERROR;
3527 notify_msg.message.error.error_code = errorCode;
3528 notify_msg.message.error.error_stream = NULL;
3529 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003530 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003531
3532 return;
3533}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003534
3535/*===========================================================================
3536 * FUNCTION : sendPartialMetadataWithLock
3537 *
3538 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3539 *
3540 * PARAMETERS : @metadata: metadata buffer
3541 * @requestIter: The iterator for the pending capture request for
3542 * which the partial result is being sen
3543 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3544 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003545 * @isJumpstartMetadata: Whether this is a partial metadata for
3546 * jumpstart, i.e. even though it doesn't map to a valid partial
3547 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003548 *
3549 * RETURN :
3550 *
3551 *==========================================================================*/
3552
3553void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3554 metadata_buffer_t *metadata,
3555 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003556 bool lastUrgentMetadataInBatch,
3557 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003558{
3559 camera3_capture_result_t result;
3560 memset(&result, 0, sizeof(camera3_capture_result_t));
3561
3562 requestIter->partial_result_cnt++;
3563
3564 // Extract 3A metadata
3565 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003566 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3567 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003568 // Populate metadata result
3569 result.frame_number = requestIter->frame_number;
3570 result.num_output_buffers = 0;
3571 result.output_buffers = NULL;
3572 result.partial_result = requestIter->partial_result_cnt;
3573
3574 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003575 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003576 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3577 // Notify HDR+ client about the partial metadata.
3578 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3579 result.partial_result == PARTIAL_RESULT_COUNT);
3580 }
3581 }
3582
3583 orchestrateResult(&result);
3584 LOGD("urgent frame_number = %u", result.frame_number);
3585 free_camera_metadata((camera_metadata_t *)result.result);
3586}
3587
Thierry Strudel3d639192016-09-09 11:52:26 -07003588/*===========================================================================
3589 * FUNCTION : handleMetadataWithLock
3590 *
3591 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3592 *
3593 * PARAMETERS : @metadata_buf: metadata buffer
3594 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3595 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003596 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3597 * last urgent metadata in a batch. Always true for non-batch mode
3598 * @lastMetadataInBatch: Boolean to indicate whether this is the
3599 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003600 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3601 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003602 *
3603 * RETURN :
3604 *
3605 *==========================================================================*/
3606void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003607 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003608 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3609 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003610{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003611 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003612 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3613 //during flush do not send metadata from this thread
3614 LOGD("not sending metadata during flush or when mState is error");
3615 if (free_and_bufdone_meta_buf) {
3616 mMetadataChannel->bufDone(metadata_buf);
3617 free(metadata_buf);
3618 }
3619 return;
3620 }
3621
3622 //not in flush
3623 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3624 int32_t frame_number_valid, urgent_frame_number_valid;
3625 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003626 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003627 nsecs_t currentSysTime;
3628
3629 int32_t *p_frame_number_valid =
3630 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3631 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3632 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003633 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003634 int32_t *p_urgent_frame_number_valid =
3635 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3636 uint32_t *p_urgent_frame_number =
3637 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3638 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3639 metadata) {
3640 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3641 *p_frame_number_valid, *p_frame_number);
3642 }
3643
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003644 camera_metadata_t *resultMetadata = nullptr;
3645
Thierry Strudel3d639192016-09-09 11:52:26 -07003646 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3647 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3648 LOGE("Invalid metadata");
3649 if (free_and_bufdone_meta_buf) {
3650 mMetadataChannel->bufDone(metadata_buf);
3651 free(metadata_buf);
3652 }
3653 goto done_metadata;
3654 }
3655 frame_number_valid = *p_frame_number_valid;
3656 frame_number = *p_frame_number;
3657 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003658 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003659 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3660 urgent_frame_number = *p_urgent_frame_number;
3661 currentSysTime = systemTime(CLOCK_MONOTONIC);
3662
Jason Lee603176d2017-05-31 11:43:27 -07003663 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3664 const int tries = 3;
3665 nsecs_t bestGap, measured;
3666 for (int i = 0; i < tries; ++i) {
3667 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3668 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3669 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3670 const nsecs_t gap = tmono2 - tmono;
3671 if (i == 0 || gap < bestGap) {
3672 bestGap = gap;
3673 measured = tbase - ((tmono + tmono2) >> 1);
3674 }
3675 }
3676 capture_time -= measured;
3677 }
3678
Thierry Strudel3d639192016-09-09 11:52:26 -07003679 // Detect if buffers from any requests are overdue
3680 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003681 int64_t timeout;
3682 {
3683 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3684 // If there is a pending HDR+ request, the following requests may be blocked until the
3685 // HDR+ request is done. So allow a longer timeout.
3686 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3687 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003688 if (timeout < mExpectedInflightDuration) {
3689 timeout = mExpectedInflightDuration;
3690 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003691 }
3692
3693 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003694 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003695 assert(missed.stream->priv);
3696 if (missed.stream->priv) {
3697 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3698 assert(ch->mStreams[0]);
3699 if (ch->mStreams[0]) {
3700 LOGE("Cancel missing frame = %d, buffer = %p,"
3701 "stream type = %d, stream format = %d",
3702 req.frame_number, missed.buffer,
3703 ch->mStreams[0]->getMyType(), missed.stream->format);
3704 ch->timeoutFrame(req.frame_number);
3705 }
3706 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003707 }
3708 }
3709 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003710 //For the very first metadata callback, regardless whether it contains valid
3711 //frame number, send the partial metadata for the jumpstarting requests.
3712 //Note that this has to be done even if the metadata doesn't contain valid
3713 //urgent frame number, because in the case only 1 request is ever submitted
3714 //to HAL, there won't be subsequent valid urgent frame number.
3715 if (mFirstMetadataCallback) {
3716 for (pendingRequestIterator i =
3717 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3718 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003719 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3720 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003721 }
3722 }
3723 mFirstMetadataCallback = false;
3724 }
3725
Thierry Strudel3d639192016-09-09 11:52:26 -07003726 //Partial result on process_capture_result for timestamp
3727 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003728 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003729
3730 //Recieved an urgent Frame Number, handle it
3731 //using partial results
3732 for (pendingRequestIterator i =
3733 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3734 LOGD("Iterator Frame = %d urgent frame = %d",
3735 i->frame_number, urgent_frame_number);
3736
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003737 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003738 (i->partial_result_cnt == 0)) {
3739 LOGE("Error: HAL missed urgent metadata for frame number %d",
3740 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003741 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003742 }
3743
3744 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003745 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003746 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3747 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003748 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3749 // Instant AEC settled for this frame.
3750 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3751 mInstantAECSettledFrameNumber = urgent_frame_number;
3752 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003753 break;
3754 }
3755 }
3756 }
3757
3758 if (!frame_number_valid) {
3759 LOGD("Not a valid normal frame number, used as SOF only");
3760 if (free_and_bufdone_meta_buf) {
3761 mMetadataChannel->bufDone(metadata_buf);
3762 free(metadata_buf);
3763 }
3764 goto done_metadata;
3765 }
3766 LOGH("valid frame_number = %u, capture_time = %lld",
3767 frame_number, capture_time);
3768
Emilian Peev4e0fe952017-06-30 12:40:09 -07003769 handleDepthDataLocked(metadata->depth_data, frame_number,
3770 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003771
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003772 // Check whether any stream buffer corresponding to this is dropped or not
3773 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3774 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3775 for (auto & pendingRequest : mPendingRequestsList) {
3776 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3777 mInstantAECSettledFrameNumber)) {
3778 camera3_notify_msg_t notify_msg = {};
3779 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003780 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003781 QCamera3ProcessingChannel *channel =
3782 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003783 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003784 if (p_cam_frame_drop) {
3785 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003786 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003787 // Got the stream ID for drop frame.
3788 dropFrame = true;
3789 break;
3790 }
3791 }
3792 } else {
3793 // This is instant AEC case.
3794 // For instant AEC drop the stream untill AEC is settled.
3795 dropFrame = true;
3796 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003797
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003798 if (dropFrame) {
3799 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3800 if (p_cam_frame_drop) {
3801 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003802 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003803 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003804 } else {
3805 // For instant AEC, inform frame drop and frame number
3806 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3807 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003808 pendingRequest.frame_number, streamID,
3809 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003810 }
3811 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003812 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003813 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003814 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003815 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003816 if (p_cam_frame_drop) {
3817 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003818 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003819 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003820 } else {
3821 // For instant AEC, inform frame drop and frame number
3822 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3823 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003824 pendingRequest.frame_number, streamID,
3825 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003826 }
3827 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003828 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003829 PendingFrameDrop.stream_ID = streamID;
3830 // Add the Frame drop info to mPendingFrameDropList
3831 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003832 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003833 }
3834 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003835 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003836
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003837 for (auto & pendingRequest : mPendingRequestsList) {
3838 // Find the pending request with the frame number.
3839 if (pendingRequest.frame_number == frame_number) {
3840 // Update the sensor timestamp.
3841 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003842
Thierry Strudel3d639192016-09-09 11:52:26 -07003843
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003844 /* Set the timestamp in display metadata so that clients aware of
3845 private_handle such as VT can use this un-modified timestamps.
3846 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003847 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003848
Thierry Strudel3d639192016-09-09 11:52:26 -07003849 // Find channel requiring metadata, meaning internal offline postprocess
3850 // is needed.
3851 //TODO: for now, we don't support two streams requiring metadata at the same time.
3852 // (because we are not making copies, and metadata buffer is not reference counted.
3853 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003854 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3855 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003856 if (iter->need_metadata) {
3857 internalPproc = true;
3858 QCamera3ProcessingChannel *channel =
3859 (QCamera3ProcessingChannel *)iter->stream->priv;
3860 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003861 if(p_is_metabuf_queued != NULL) {
3862 *p_is_metabuf_queued = true;
3863 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003864 break;
3865 }
3866 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003867 for (auto itr = pendingRequest.internalRequestList.begin();
3868 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003869 if (itr->need_metadata) {
3870 internalPproc = true;
3871 QCamera3ProcessingChannel *channel =
3872 (QCamera3ProcessingChannel *)itr->stream->priv;
3873 channel->queueReprocMetadata(metadata_buf);
3874 break;
3875 }
3876 }
3877
Thierry Strudel54dc9782017-02-15 12:12:10 -08003878 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003879
3880 bool *enableZsl = nullptr;
3881 if (gExposeEnableZslKey) {
3882 enableZsl = &pendingRequest.enableZsl;
3883 }
3884
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003885 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003886 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003887 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003888
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003889 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003890
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003891 if (pendingRequest.blob_request) {
3892 //Dump tuning metadata if enabled and available
3893 char prop[PROPERTY_VALUE_MAX];
3894 memset(prop, 0, sizeof(prop));
3895 property_get("persist.camera.dumpmetadata", prop, "0");
3896 int32_t enabled = atoi(prop);
3897 if (enabled && metadata->is_tuning_params_valid) {
3898 dumpMetadataToFile(metadata->tuning_params,
3899 mMetaFrameCount,
3900 enabled,
3901 "Snapshot",
3902 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003903 }
3904 }
3905
3906 if (!internalPproc) {
3907 LOGD("couldn't find need_metadata for this metadata");
3908 // Return metadata buffer
3909 if (free_and_bufdone_meta_buf) {
3910 mMetadataChannel->bufDone(metadata_buf);
3911 free(metadata_buf);
3912 }
3913 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003914
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003915 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003916 }
3917 }
3918
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003919 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3920
3921 // Try to send out capture result metadata.
3922 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003923 return;
3924
Thierry Strudel3d639192016-09-09 11:52:26 -07003925done_metadata:
3926 for (pendingRequestIterator i = mPendingRequestsList.begin();
3927 i != mPendingRequestsList.end() ;i++) {
3928 i->pipeline_depth++;
3929 }
3930 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3931 unblockRequestIfNecessary();
3932}
3933
3934/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003935 * FUNCTION : handleDepthDataWithLock
3936 *
3937 * DESCRIPTION: Handles incoming depth data
3938 *
3939 * PARAMETERS : @depthData : Depth data
3940 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003941 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003942 *
3943 * RETURN :
3944 *
3945 *==========================================================================*/
3946void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003947 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003948 uint32_t currentFrameNumber;
3949 buffer_handle_t *depthBuffer;
3950
3951 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003952 return;
3953 }
3954
3955 camera3_stream_buffer_t resultBuffer =
3956 {.acquire_fence = -1,
3957 .release_fence = -1,
3958 .status = CAMERA3_BUFFER_STATUS_OK,
3959 .buffer = nullptr,
3960 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003961 do {
3962 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3963 if (nullptr == depthBuffer) {
3964 break;
3965 }
3966
Emilian Peev7650c122017-01-19 08:24:33 -08003967 resultBuffer.buffer = depthBuffer;
3968 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003969 if (valid) {
3970 int32_t rc = mDepthChannel->populateDepthData(depthData,
3971 frameNumber);
3972 if (NO_ERROR != rc) {
3973 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3974 } else {
3975 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3976 }
Emilian Peev7650c122017-01-19 08:24:33 -08003977 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003978 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003979 }
3980 } else if (currentFrameNumber > frameNumber) {
3981 break;
3982 } else {
3983 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3984 {{currentFrameNumber, mDepthChannel->getStream(),
3985 CAMERA3_MSG_ERROR_BUFFER}}};
3986 orchestrateNotify(&notify_msg);
3987
3988 LOGE("Depth buffer for frame number: %d is missing "
3989 "returning back!", currentFrameNumber);
3990 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3991 }
3992 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003993 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003994 } while (currentFrameNumber < frameNumber);
3995}
3996
3997/*===========================================================================
3998 * FUNCTION : notifyErrorFoPendingDepthData
3999 *
4000 * DESCRIPTION: Returns error for any pending depth buffers
4001 *
4002 * PARAMETERS : depthCh - depth channel that needs to get flushed
4003 *
4004 * RETURN :
4005 *
4006 *==========================================================================*/
4007void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
4008 QCamera3DepthChannel *depthCh) {
4009 uint32_t currentFrameNumber;
4010 buffer_handle_t *depthBuffer;
4011
4012 if (nullptr == depthCh) {
4013 return;
4014 }
4015
4016 camera3_notify_msg_t notify_msg =
4017 {.type = CAMERA3_MSG_ERROR,
4018 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4019 camera3_stream_buffer_t resultBuffer =
4020 {.acquire_fence = -1,
4021 .release_fence = -1,
4022 .buffer = nullptr,
4023 .stream = depthCh->getStream(),
4024 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004025
4026 while (nullptr !=
4027 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4028 depthCh->unmapBuffer(currentFrameNumber);
4029
4030 notify_msg.message.error.frame_number = currentFrameNumber;
4031 orchestrateNotify(&notify_msg);
4032
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004033 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004034 };
4035}
4036
4037/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004038 * FUNCTION : hdrPlusPerfLock
4039 *
4040 * DESCRIPTION: perf lock for HDR+ using custom intent
4041 *
4042 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4043 *
4044 * RETURN : None
4045 *
4046 *==========================================================================*/
4047void QCamera3HardwareInterface::hdrPlusPerfLock(
4048 mm_camera_super_buf_t *metadata_buf)
4049{
4050 if (NULL == metadata_buf) {
4051 LOGE("metadata_buf is NULL");
4052 return;
4053 }
4054 metadata_buffer_t *metadata =
4055 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4056 int32_t *p_frame_number_valid =
4057 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4058 uint32_t *p_frame_number =
4059 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4060
4061 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4062 LOGE("%s: Invalid metadata", __func__);
4063 return;
4064 }
4065
Wei Wang01385482017-08-03 10:49:34 -07004066 //acquire perf lock for 2 secs after the last HDR frame is captured
4067 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004068 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4069 if ((p_frame_number != NULL) &&
4070 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004071 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004072 }
4073 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004074}
4075
4076/*===========================================================================
4077 * FUNCTION : handleInputBufferWithLock
4078 *
4079 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4080 *
4081 * PARAMETERS : @frame_number: frame number of the input buffer
4082 *
4083 * RETURN :
4084 *
4085 *==========================================================================*/
4086void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4087{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004088 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004089 pendingRequestIterator i = mPendingRequestsList.begin();
4090 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4091 i++;
4092 }
4093 if (i != mPendingRequestsList.end() && i->input_buffer) {
4094 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004095 CameraMetadata settings;
4096 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4097 if(i->settings) {
4098 settings = i->settings;
4099 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4100 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004101 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004102 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004103 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004104 } else {
4105 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004106 }
4107
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004108 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4109 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4110 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004111
4112 camera3_capture_result result;
4113 memset(&result, 0, sizeof(camera3_capture_result));
4114 result.frame_number = frame_number;
4115 result.result = i->settings;
4116 result.input_buffer = i->input_buffer;
4117 result.partial_result = PARTIAL_RESULT_COUNT;
4118
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004119 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004120 LOGD("Input request metadata and input buffer frame_number = %u",
4121 i->frame_number);
4122 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004123
4124 // Dispatch result metadata that may be just unblocked by this reprocess result.
4125 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004126 } else {
4127 LOGE("Could not find input request for frame number %d", frame_number);
4128 }
4129}
4130
4131/*===========================================================================
4132 * FUNCTION : handleBufferWithLock
4133 *
4134 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4135 *
4136 * PARAMETERS : @buffer: image buffer for the callback
4137 * @frame_number: frame number of the image buffer
4138 *
4139 * RETURN :
4140 *
4141 *==========================================================================*/
4142void QCamera3HardwareInterface::handleBufferWithLock(
4143 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4144{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004145 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004146
4147 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4148 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4149 }
4150
Thierry Strudel3d639192016-09-09 11:52:26 -07004151 /* Nothing to be done during error state */
4152 if ((ERROR == mState) || (DEINIT == mState)) {
4153 return;
4154 }
4155 if (mFlushPerf) {
4156 handleBuffersDuringFlushLock(buffer);
4157 return;
4158 }
4159 //not in flush
4160 // If the frame number doesn't exist in the pending request list,
4161 // directly send the buffer to the frameworks, and update pending buffers map
4162 // Otherwise, book-keep the buffer.
4163 pendingRequestIterator i = mPendingRequestsList.begin();
4164 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4165 i++;
4166 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004167
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004168 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004169 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004170 // For a reprocessing request, try to send out result metadata.
4171 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004172 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004173 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004174
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004175 // Check if this frame was dropped.
4176 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4177 m != mPendingFrameDropList.end(); m++) {
4178 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4179 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4180 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4181 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4182 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4183 frame_number, streamID);
4184 m = mPendingFrameDropList.erase(m);
4185 break;
4186 }
4187 }
4188
Binhao Lin09245482017-08-31 18:25:29 -07004189 // WAR for encoder avtimer timestamp issue
4190 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4191 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
4192 m_bAVTimerEnabled) {
4193 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
4194 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
4195 if (req->frame_number != frame_number)
4196 continue;
4197 if(req->av_timestamp == 0) {
4198 buffer->status |= CAMERA3_BUFFER_STATUS_ERROR;
4199 }
4200 else {
4201 struct private_handle_t *priv_handle =
4202 (struct private_handle_t *) (*(buffer->buffer));
4203 setMetaData(priv_handle, SET_VT_TIMESTAMP, &(req->av_timestamp));
4204 }
4205 }
4206 }
4207
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004208 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4209 LOGH("result frame_number = %d, buffer = %p",
4210 frame_number, buffer->buffer);
4211
4212 mPendingBuffersMap.removeBuf(buffer->buffer);
4213 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4214
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004215 if (mPreviewStarted == false) {
4216 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4217 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004218 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4219
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004220 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4221 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4222 mPreviewStarted = true;
4223
4224 // Set power hint for preview
4225 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4226 }
4227 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004228}
4229
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004230void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004231 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004232{
4233 // Find the pending request for this result metadata.
4234 auto requestIter = mPendingRequestsList.begin();
4235 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4236 requestIter++;
4237 }
4238
4239 if (requestIter == mPendingRequestsList.end()) {
4240 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4241 return;
4242 }
4243
4244 // Update the result metadata
4245 requestIter->resultMetadata = resultMetadata;
4246
4247 // Check what type of request this is.
4248 bool liveRequest = false;
4249 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004250 // HDR+ request doesn't have partial results.
4251 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004252 } else if (requestIter->input_buffer != nullptr) {
4253 // Reprocessing request result is the same as settings.
4254 requestIter->resultMetadata = requestIter->settings;
4255 // Reprocessing request doesn't have partial results.
4256 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4257 } else {
4258 liveRequest = true;
Chien-Yu Chen0a921f92017-08-27 17:25:33 -07004259 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004260 mPendingLiveRequest--;
4261
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004262 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004263 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004264 // For a live request, send the metadata to HDR+ client.
4265 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4266 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4267 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4268 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004269 }
4270 }
4271
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004272 // Remove len shading map if it's not requested.
4273 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4274 CameraMetadata metadata;
4275 metadata.acquire(resultMetadata);
4276 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4277 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4278 &requestIter->requestedLensShadingMapMode, 1);
4279
4280 requestIter->resultMetadata = metadata.release();
4281 }
4282
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004283 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4284}
4285
4286void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4287 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004288 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4289 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004290 bool readyToSend = true;
4291
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004292 // Iterate through the pending requests to send out result metadata that are ready. Also if
4293 // this result metadata belongs to a live request, notify errors for previous live requests
4294 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004295 auto iter = mPendingRequestsList.begin();
4296 while (iter != mPendingRequestsList.end()) {
4297 // Check if current pending request is ready. If it's not ready, the following pending
4298 // requests are also not ready.
4299 if (readyToSend && iter->resultMetadata == nullptr) {
4300 readyToSend = false;
4301 }
4302
4303 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4304
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004305 camera3_capture_result_t result = {};
4306 result.frame_number = iter->frame_number;
4307 result.result = iter->resultMetadata;
4308 result.partial_result = iter->partial_result_cnt;
4309
4310 // If this pending buffer has result metadata, we may be able to send out shutter callback
4311 // and result metadata.
4312 if (iter->resultMetadata != nullptr) {
4313 if (!readyToSend) {
4314 // If any of the previous pending request is not ready, this pending request is
4315 // also not ready to send in order to keep shutter callbacks and result metadata
4316 // in order.
4317 iter++;
4318 continue;
4319 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004320 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004321 // If the result metadata belongs to a live request, notify errors for previous pending
4322 // live requests.
4323 mPendingLiveRequest--;
4324
4325 CameraMetadata dummyMetadata;
4326 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4327 result.result = dummyMetadata.release();
4328
4329 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004330
4331 // partial_result should be PARTIAL_RESULT_CNT in case of
4332 // ERROR_RESULT.
4333 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4334 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004335 } else {
4336 iter++;
4337 continue;
4338 }
4339
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004340 result.output_buffers = nullptr;
4341 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004342 orchestrateResult(&result);
4343
4344 // For reprocessing, result metadata is the same as settings so do not free it here to
4345 // avoid double free.
4346 if (result.result != iter->settings) {
4347 free_camera_metadata((camera_metadata_t *)result.result);
4348 }
4349 iter->resultMetadata = nullptr;
4350 iter = erasePendingRequest(iter);
4351 }
4352
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004353 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004354 for (auto &iter : mPendingRequestsList) {
4355 // Increment pipeline depth for the following pending requests.
4356 if (iter.frame_number > frameNumber) {
4357 iter.pipeline_depth++;
4358 }
4359 }
4360 }
4361
4362 unblockRequestIfNecessary();
4363}
4364
Thierry Strudel3d639192016-09-09 11:52:26 -07004365/*===========================================================================
4366 * FUNCTION : unblockRequestIfNecessary
4367 *
4368 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4369 * that mMutex is held when this function is called.
4370 *
4371 * PARAMETERS :
4372 *
4373 * RETURN :
4374 *
4375 *==========================================================================*/
4376void QCamera3HardwareInterface::unblockRequestIfNecessary()
4377{
4378 // Unblock process_capture_request
4379 pthread_cond_signal(&mRequestCond);
4380}
4381
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004382/*===========================================================================
4383 * FUNCTION : isHdrSnapshotRequest
4384 *
4385 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4386 *
4387 * PARAMETERS : camera3 request structure
4388 *
4389 * RETURN : boolean decision variable
4390 *
4391 *==========================================================================*/
4392bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4393{
4394 if (request == NULL) {
4395 LOGE("Invalid request handle");
4396 assert(0);
4397 return false;
4398 }
4399
4400 if (!mForceHdrSnapshot) {
4401 CameraMetadata frame_settings;
4402 frame_settings = request->settings;
4403
4404 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4405 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4406 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4407 return false;
4408 }
4409 } else {
4410 return false;
4411 }
4412
4413 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4414 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4415 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4416 return false;
4417 }
4418 } else {
4419 return false;
4420 }
4421 }
4422
4423 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4424 if (request->output_buffers[i].stream->format
4425 == HAL_PIXEL_FORMAT_BLOB) {
4426 return true;
4427 }
4428 }
4429
4430 return false;
4431}
4432/*===========================================================================
4433 * FUNCTION : orchestrateRequest
4434 *
4435 * DESCRIPTION: Orchestrates a capture request from camera service
4436 *
4437 * PARAMETERS :
4438 * @request : request from framework to process
4439 *
4440 * RETURN : Error status codes
4441 *
4442 *==========================================================================*/
4443int32_t QCamera3HardwareInterface::orchestrateRequest(
4444 camera3_capture_request_t *request)
4445{
4446
4447 uint32_t originalFrameNumber = request->frame_number;
4448 uint32_t originalOutputCount = request->num_output_buffers;
4449 const camera_metadata_t *original_settings = request->settings;
4450 List<InternalRequest> internallyRequestedStreams;
4451 List<InternalRequest> emptyInternalList;
4452
4453 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4454 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4455 uint32_t internalFrameNumber;
4456 CameraMetadata modified_meta;
4457
4458
4459 /* Add Blob channel to list of internally requested streams */
4460 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4461 if (request->output_buffers[i].stream->format
4462 == HAL_PIXEL_FORMAT_BLOB) {
4463 InternalRequest streamRequested;
4464 streamRequested.meteringOnly = 1;
4465 streamRequested.need_metadata = 0;
4466 streamRequested.stream = request->output_buffers[i].stream;
4467 internallyRequestedStreams.push_back(streamRequested);
4468 }
4469 }
4470 request->num_output_buffers = 0;
4471 auto itr = internallyRequestedStreams.begin();
4472
4473 /* Modify setting to set compensation */
4474 modified_meta = request->settings;
4475 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4476 uint8_t aeLock = 1;
4477 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4478 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4479 camera_metadata_t *modified_settings = modified_meta.release();
4480 request->settings = modified_settings;
4481
4482 /* Capture Settling & -2x frame */
4483 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4484 request->frame_number = internalFrameNumber;
4485 processCaptureRequest(request, internallyRequestedStreams);
4486
4487 request->num_output_buffers = originalOutputCount;
4488 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4489 request->frame_number = internalFrameNumber;
4490 processCaptureRequest(request, emptyInternalList);
4491 request->num_output_buffers = 0;
4492
4493 modified_meta = modified_settings;
4494 expCompensation = 0;
4495 aeLock = 1;
4496 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4497 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4498 modified_settings = modified_meta.release();
4499 request->settings = modified_settings;
4500
4501 /* Capture Settling & 0X frame */
4502
4503 itr = internallyRequestedStreams.begin();
4504 if (itr == internallyRequestedStreams.end()) {
4505 LOGE("Error Internally Requested Stream list is empty");
4506 assert(0);
4507 } else {
4508 itr->need_metadata = 0;
4509 itr->meteringOnly = 1;
4510 }
4511
4512 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4513 request->frame_number = internalFrameNumber;
4514 processCaptureRequest(request, internallyRequestedStreams);
4515
4516 itr = internallyRequestedStreams.begin();
4517 if (itr == internallyRequestedStreams.end()) {
4518 ALOGE("Error Internally Requested Stream list is empty");
4519 assert(0);
4520 } else {
4521 itr->need_metadata = 1;
4522 itr->meteringOnly = 0;
4523 }
4524
4525 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4526 request->frame_number = internalFrameNumber;
4527 processCaptureRequest(request, internallyRequestedStreams);
4528
4529 /* Capture 2X frame*/
4530 modified_meta = modified_settings;
4531 expCompensation = GB_HDR_2X_STEP_EV;
4532 aeLock = 1;
4533 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4534 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4535 modified_settings = modified_meta.release();
4536 request->settings = modified_settings;
4537
4538 itr = internallyRequestedStreams.begin();
4539 if (itr == internallyRequestedStreams.end()) {
4540 ALOGE("Error Internally Requested Stream list is empty");
4541 assert(0);
4542 } else {
4543 itr->need_metadata = 0;
4544 itr->meteringOnly = 1;
4545 }
4546 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4547 request->frame_number = internalFrameNumber;
4548 processCaptureRequest(request, internallyRequestedStreams);
4549
4550 itr = internallyRequestedStreams.begin();
4551 if (itr == internallyRequestedStreams.end()) {
4552 ALOGE("Error Internally Requested Stream list is empty");
4553 assert(0);
4554 } else {
4555 itr->need_metadata = 1;
4556 itr->meteringOnly = 0;
4557 }
4558
4559 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4560 request->frame_number = internalFrameNumber;
4561 processCaptureRequest(request, internallyRequestedStreams);
4562
4563
4564 /* Capture 2X on original streaming config*/
4565 internallyRequestedStreams.clear();
4566
4567 /* Restore original settings pointer */
4568 request->settings = original_settings;
4569 } else {
4570 uint32_t internalFrameNumber;
4571 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4572 request->frame_number = internalFrameNumber;
4573 return processCaptureRequest(request, internallyRequestedStreams);
4574 }
4575
4576 return NO_ERROR;
4577}
4578
4579/*===========================================================================
4580 * FUNCTION : orchestrateResult
4581 *
4582 * DESCRIPTION: Orchestrates a capture result to camera service
4583 *
4584 * PARAMETERS :
4585 * @request : request from framework to process
4586 *
4587 * RETURN :
4588 *
4589 *==========================================================================*/
4590void QCamera3HardwareInterface::orchestrateResult(
4591 camera3_capture_result_t *result)
4592{
4593 uint32_t frameworkFrameNumber;
4594 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4595 frameworkFrameNumber);
4596 if (rc != NO_ERROR) {
4597 LOGE("Cannot find translated frameworkFrameNumber");
4598 assert(0);
4599 } else {
4600 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004601 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004602 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004603 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004604 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4605 camera_metadata_entry_t entry;
4606 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4607 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004608 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004609 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4610 if (ret != OK)
4611 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004612 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004613 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004614 result->frame_number = frameworkFrameNumber;
4615 mCallbackOps->process_capture_result(mCallbackOps, result);
4616 }
4617 }
4618}
4619
4620/*===========================================================================
4621 * FUNCTION : orchestrateNotify
4622 *
4623 * DESCRIPTION: Orchestrates a notify to camera service
4624 *
4625 * PARAMETERS :
4626 * @request : request from framework to process
4627 *
4628 * RETURN :
4629 *
4630 *==========================================================================*/
4631void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4632{
4633 uint32_t frameworkFrameNumber;
4634 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004635 int32_t rc = NO_ERROR;
4636
4637 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004638 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004639
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004640 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004641 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4642 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4643 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004644 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004645 LOGE("Cannot find translated frameworkFrameNumber");
4646 assert(0);
4647 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004648 }
4649 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004650
4651 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4652 LOGD("Internal Request drop the notifyCb");
4653 } else {
4654 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4655 mCallbackOps->notify(mCallbackOps, notify_msg);
4656 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004657}
4658
4659/*===========================================================================
4660 * FUNCTION : FrameNumberRegistry
4661 *
4662 * DESCRIPTION: Constructor
4663 *
4664 * PARAMETERS :
4665 *
4666 * RETURN :
4667 *
4668 *==========================================================================*/
4669FrameNumberRegistry::FrameNumberRegistry()
4670{
4671 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4672}
4673
4674/*===========================================================================
4675 * FUNCTION : ~FrameNumberRegistry
4676 *
4677 * DESCRIPTION: Destructor
4678 *
4679 * PARAMETERS :
4680 *
4681 * RETURN :
4682 *
4683 *==========================================================================*/
4684FrameNumberRegistry::~FrameNumberRegistry()
4685{
4686}
4687
4688/*===========================================================================
4689 * FUNCTION : PurgeOldEntriesLocked
4690 *
4691 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4692 *
4693 * PARAMETERS :
4694 *
4695 * RETURN : NONE
4696 *
4697 *==========================================================================*/
4698void FrameNumberRegistry::purgeOldEntriesLocked()
4699{
4700 while (_register.begin() != _register.end()) {
4701 auto itr = _register.begin();
4702 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4703 _register.erase(itr);
4704 } else {
4705 return;
4706 }
4707 }
4708}
4709
4710/*===========================================================================
4711 * FUNCTION : allocStoreInternalFrameNumber
4712 *
4713 * DESCRIPTION: Method to note down a framework request and associate a new
4714 * internal request number against it
4715 *
4716 * PARAMETERS :
4717 * @fFrameNumber: Identifier given by framework
4718 * @internalFN : Output parameter which will have the newly generated internal
4719 * entry
4720 *
4721 * RETURN : Error code
4722 *
4723 *==========================================================================*/
4724int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4725 uint32_t &internalFrameNumber)
4726{
4727 Mutex::Autolock lock(mRegistryLock);
4728 internalFrameNumber = _nextFreeInternalNumber++;
4729 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4730 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4731 purgeOldEntriesLocked();
4732 return NO_ERROR;
4733}
4734
4735/*===========================================================================
4736 * FUNCTION : generateStoreInternalFrameNumber
4737 *
4738 * DESCRIPTION: Method to associate a new internal request number independent
4739 * of any associate with framework requests
4740 *
4741 * PARAMETERS :
4742 * @internalFrame#: Output parameter which will have the newly generated internal
4743 *
4744 *
4745 * RETURN : Error code
4746 *
4747 *==========================================================================*/
4748int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4749{
4750 Mutex::Autolock lock(mRegistryLock);
4751 internalFrameNumber = _nextFreeInternalNumber++;
4752 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4753 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4754 purgeOldEntriesLocked();
4755 return NO_ERROR;
4756}
4757
4758/*===========================================================================
4759 * FUNCTION : getFrameworkFrameNumber
4760 *
4761 * DESCRIPTION: Method to query the framework framenumber given an internal #
4762 *
4763 * PARAMETERS :
4764 * @internalFrame#: Internal reference
4765 * @frameworkframenumber: Output parameter holding framework frame entry
4766 *
4767 * RETURN : Error code
4768 *
4769 *==========================================================================*/
4770int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4771 uint32_t &frameworkFrameNumber)
4772{
4773 Mutex::Autolock lock(mRegistryLock);
4774 auto itr = _register.find(internalFrameNumber);
4775 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004776 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004777 return -ENOENT;
4778 }
4779
4780 frameworkFrameNumber = itr->second;
4781 purgeOldEntriesLocked();
4782 return NO_ERROR;
4783}
Thierry Strudel3d639192016-09-09 11:52:26 -07004784
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004785status_t QCamera3HardwareInterface::fillPbStreamConfig(
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004786 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
4787 uint32_t streamIndex) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004788 if (config == nullptr) {
4789 LOGE("%s: config is null", __FUNCTION__);
4790 return BAD_VALUE;
4791 }
4792
4793 if (channel == nullptr) {
4794 LOGE("%s: channel is null", __FUNCTION__);
4795 return BAD_VALUE;
4796 }
4797
4798 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4799 if (stream == nullptr) {
4800 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4801 return NAME_NOT_FOUND;
4802 }
4803
4804 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4805 if (streamInfo == nullptr) {
4806 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4807 return NAME_NOT_FOUND;
4808 }
4809
4810 config->id = pbStreamId;
4811 config->image.width = streamInfo->dim.width;
4812 config->image.height = streamInfo->dim.height;
4813 config->image.padding = 0;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004814
4815 int bytesPerPixel = 0;
4816
4817 switch (streamInfo->fmt) {
4818 case CAM_FORMAT_YUV_420_NV21:
4819 config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
4820 bytesPerPixel = 1;
4821 break;
4822 case CAM_FORMAT_YUV_420_NV12:
4823 case CAM_FORMAT_YUV_420_NV12_VENUS:
4824 config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
4825 bytesPerPixel = 1;
4826 break;
4827 default:
4828 ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
4829 return BAD_VALUE;
4830 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004831
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004832 uint32_t totalPlaneSize = 0;
4833
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004834 // Fill plane information.
4835 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4836 pbcamera::PlaneConfiguration plane;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004837 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004838 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4839 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004840
4841 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004842 }
4843
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004844 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004845 return OK;
4846}
4847
Thierry Strudel3d639192016-09-09 11:52:26 -07004848/*===========================================================================
4849 * FUNCTION : processCaptureRequest
4850 *
4851 * DESCRIPTION: process a capture request from camera service
4852 *
4853 * PARAMETERS :
4854 * @request : request from framework to process
4855 *
4856 * RETURN :
4857 *
4858 *==========================================================================*/
4859int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004860 camera3_capture_request_t *request,
4861 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004862{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004863 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004864 int rc = NO_ERROR;
4865 int32_t request_id;
4866 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004867 bool isVidBufRequested = false;
4868 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004869 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004870
4871 pthread_mutex_lock(&mMutex);
4872
4873 // Validate current state
4874 switch (mState) {
4875 case CONFIGURED:
4876 case STARTED:
4877 /* valid state */
4878 break;
4879
4880 case ERROR:
4881 pthread_mutex_unlock(&mMutex);
4882 handleCameraDeviceError();
4883 return -ENODEV;
4884
4885 default:
4886 LOGE("Invalid state %d", mState);
4887 pthread_mutex_unlock(&mMutex);
4888 return -ENODEV;
4889 }
4890
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004891 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004892 if (rc != NO_ERROR) {
4893 LOGE("incoming request is not valid");
4894 pthread_mutex_unlock(&mMutex);
4895 return rc;
4896 }
4897
4898 meta = request->settings;
4899
4900 // For first capture request, send capture intent, and
4901 // stream on all streams
4902 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004903 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004904 // send an unconfigure to the backend so that the isp
4905 // resources are deallocated
4906 if (!mFirstConfiguration) {
4907 cam_stream_size_info_t stream_config_info;
4908 int32_t hal_version = CAM_HAL_V3;
4909 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4910 stream_config_info.buffer_info.min_buffers =
4911 MIN_INFLIGHT_REQUESTS;
4912 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004913 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004914 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004915 clear_metadata_buffer(mParameters);
4916 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4917 CAM_INTF_PARM_HAL_VERSION, hal_version);
4918 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4919 CAM_INTF_META_STREAM_INFO, stream_config_info);
4920 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4921 mParameters);
4922 if (rc < 0) {
4923 LOGE("set_parms for unconfigure failed");
4924 pthread_mutex_unlock(&mMutex);
4925 return rc;
4926 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004927
Thierry Strudel3d639192016-09-09 11:52:26 -07004928 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004929 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004930 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004931 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004932 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004933 property_get("persist.camera.is_type", is_type_value, "4");
4934 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4935 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4936 property_get("persist.camera.is_type_preview", is_type_value, "4");
4937 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4938 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004939
4940 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4941 int32_t hal_version = CAM_HAL_V3;
4942 uint8_t captureIntent =
4943 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4944 mCaptureIntent = captureIntent;
4945 clear_metadata_buffer(mParameters);
4946 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4947 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4948 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004949 if (mFirstConfiguration) {
4950 // configure instant AEC
4951 // Instant AEC is a session based parameter and it is needed only
4952 // once per complete session after open camera.
4953 // i.e. This is set only once for the first capture request, after open camera.
4954 setInstantAEC(meta);
4955 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004956 uint8_t fwkVideoStabMode=0;
4957 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4958 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4959 }
4960
Xue Tuecac74e2017-04-17 13:58:15 -07004961 // If EIS setprop is enabled then only turn it on for video/preview
4962 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004963 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004964 int32_t vsMode;
4965 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4966 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4967 rc = BAD_VALUE;
4968 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004969 LOGD("setEis %d", setEis);
4970 bool eis3Supported = false;
4971 size_t count = IS_TYPE_MAX;
4972 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4973 for (size_t i = 0; i < count; i++) {
4974 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4975 eis3Supported = true;
4976 break;
4977 }
4978 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004979
4980 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004981 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004982 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4983 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004984 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4985 is_type = isTypePreview;
4986 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4987 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4988 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004989 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004990 } else {
4991 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004992 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004993 } else {
4994 is_type = IS_TYPE_NONE;
4995 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004996 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004997 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004998 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4999 }
5000 }
5001
5002 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5003 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
5004
Thierry Strudel54dc9782017-02-15 12:12:10 -08005005 //Disable tintless only if the property is set to 0
5006 memset(prop, 0, sizeof(prop));
5007 property_get("persist.camera.tintless.enable", prop, "1");
5008 int32_t tintless_value = atoi(prop);
5009
Thierry Strudel3d639192016-09-09 11:52:26 -07005010 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5011 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08005012
Thierry Strudel3d639192016-09-09 11:52:26 -07005013 //Disable CDS for HFR mode or if DIS/EIS is on.
5014 //CDS is a session parameter in the backend/ISP, so need to be set/reset
5015 //after every configure_stream
5016 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
5017 (m_bIsVideo)) {
5018 int32_t cds = CAM_CDS_MODE_OFF;
5019 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5020 CAM_INTF_PARM_CDS_MODE, cds))
5021 LOGE("Failed to disable CDS for HFR mode");
5022
5023 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005024
5025 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
5026 uint8_t* use_av_timer = NULL;
5027
5028 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005029 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005030 use_av_timer = &m_debug_avtimer;
Binhao Lin09245482017-08-31 18:25:29 -07005031 m_bAVTimerEnabled = true;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005032 }
5033 else{
5034 use_av_timer =
5035 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005036 if (use_av_timer) {
Binhao Lin09245482017-08-31 18:25:29 -07005037 m_bAVTimerEnabled = true;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005038 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
5039 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005040 }
5041
5042 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
5043 rc = BAD_VALUE;
5044 }
5045 }
5046
Thierry Strudel3d639192016-09-09 11:52:26 -07005047 setMobicat();
5048
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005049 uint8_t nrMode = 0;
5050 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5051 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5052 }
5053
Thierry Strudel3d639192016-09-09 11:52:26 -07005054 /* Set fps and hfr mode while sending meta stream info so that sensor
5055 * can configure appropriate streaming mode */
5056 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005057 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5058 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005059 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5060 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005061 if (rc == NO_ERROR) {
5062 int32_t max_fps =
5063 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005064 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005065 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5066 }
5067 /* For HFR, more buffers are dequeued upfront to improve the performance */
5068 if (mBatchSize) {
5069 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5070 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5071 }
5072 }
5073 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005074 LOGE("setHalFpsRange failed");
5075 }
5076 }
5077 if (meta.exists(ANDROID_CONTROL_MODE)) {
5078 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5079 rc = extractSceneMode(meta, metaMode, mParameters);
5080 if (rc != NO_ERROR) {
5081 LOGE("extractSceneMode failed");
5082 }
5083 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005084 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005085
Thierry Strudel04e026f2016-10-10 11:27:36 -07005086 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5087 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5088 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5089 rc = setVideoHdrMode(mParameters, vhdr);
5090 if (rc != NO_ERROR) {
5091 LOGE("setVideoHDR is failed");
5092 }
5093 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005094
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005095 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005096 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005097 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005098 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5099 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5100 sensorModeFullFov)) {
5101 rc = BAD_VALUE;
5102 }
5103 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005104 //TODO: validate the arguments, HSV scenemode should have only the
5105 //advertised fps ranges
5106
5107 /*set the capture intent, hal version, tintless, stream info,
5108 *and disenable parameters to the backend*/
5109 LOGD("set_parms META_STREAM_INFO " );
5110 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005111 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5112 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005113 mStreamConfigInfo.type[i],
5114 mStreamConfigInfo.stream_sizes[i].width,
5115 mStreamConfigInfo.stream_sizes[i].height,
5116 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005117 mStreamConfigInfo.format[i],
5118 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005119 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005120
Thierry Strudel3d639192016-09-09 11:52:26 -07005121 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5122 mParameters);
5123 if (rc < 0) {
5124 LOGE("set_parms failed for hal version, stream info");
5125 }
5126
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005127 cam_sensor_mode_info_t sensorModeInfo = {};
5128 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005129 if (rc != NO_ERROR) {
5130 LOGE("Failed to get sensor output size");
5131 pthread_mutex_unlock(&mMutex);
5132 goto error_exit;
5133 }
5134
5135 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5136 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005137 sensorModeInfo.active_array_size.width,
5138 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005139
5140 /* Set batchmode before initializing channel. Since registerBuffer
5141 * internally initializes some of the channels, better set batchmode
5142 * even before first register buffer */
5143 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5144 it != mStreamInfo.end(); it++) {
5145 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5146 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5147 && mBatchSize) {
5148 rc = channel->setBatchSize(mBatchSize);
5149 //Disable per frame map unmap for HFR/batchmode case
5150 rc |= channel->setPerFrameMapUnmap(false);
5151 if (NO_ERROR != rc) {
5152 LOGE("Channel init failed %d", rc);
5153 pthread_mutex_unlock(&mMutex);
5154 goto error_exit;
5155 }
5156 }
5157 }
5158
5159 //First initialize all streams
5160 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5161 it != mStreamInfo.end(); it++) {
5162 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005163
5164 /* Initial value of NR mode is needed before stream on */
5165 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005166 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5167 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005168 setEis) {
5169 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5170 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5171 is_type = mStreamConfigInfo.is_type[i];
5172 break;
5173 }
5174 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005175 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005176 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005177 rc = channel->initialize(IS_TYPE_NONE);
5178 }
5179 if (NO_ERROR != rc) {
5180 LOGE("Channel initialization failed %d", rc);
5181 pthread_mutex_unlock(&mMutex);
5182 goto error_exit;
5183 }
5184 }
5185
5186 if (mRawDumpChannel) {
5187 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5188 if (rc != NO_ERROR) {
5189 LOGE("Error: Raw Dump Channel init failed");
5190 pthread_mutex_unlock(&mMutex);
5191 goto error_exit;
5192 }
5193 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005194 if (mHdrPlusRawSrcChannel) {
5195 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5196 if (rc != NO_ERROR) {
5197 LOGE("Error: HDR+ RAW Source Channel init failed");
5198 pthread_mutex_unlock(&mMutex);
5199 goto error_exit;
5200 }
5201 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005202 if (mSupportChannel) {
5203 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5204 if (rc < 0) {
5205 LOGE("Support channel initialization failed");
5206 pthread_mutex_unlock(&mMutex);
5207 goto error_exit;
5208 }
5209 }
5210 if (mAnalysisChannel) {
5211 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5212 if (rc < 0) {
5213 LOGE("Analysis channel initialization failed");
5214 pthread_mutex_unlock(&mMutex);
5215 goto error_exit;
5216 }
5217 }
5218 if (mDummyBatchChannel) {
5219 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5220 if (rc < 0) {
5221 LOGE("mDummyBatchChannel setBatchSize failed");
5222 pthread_mutex_unlock(&mMutex);
5223 goto error_exit;
5224 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005225 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005226 if (rc < 0) {
5227 LOGE("mDummyBatchChannel initialization failed");
5228 pthread_mutex_unlock(&mMutex);
5229 goto error_exit;
5230 }
5231 }
5232
5233 // Set bundle info
5234 rc = setBundleInfo();
5235 if (rc < 0) {
5236 LOGE("setBundleInfo failed %d", rc);
5237 pthread_mutex_unlock(&mMutex);
5238 goto error_exit;
5239 }
5240
5241 //update settings from app here
5242 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5243 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5244 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5245 }
5246 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5247 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5248 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5249 }
5250 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5251 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5252 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5253
5254 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5255 (mLinkedCameraId != mCameraId) ) {
5256 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5257 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005258 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005259 goto error_exit;
5260 }
5261 }
5262
5263 // add bundle related cameras
5264 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5265 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005266 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5267 &m_pDualCamCmdPtr->bundle_info;
5268 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005269 if (mIsDeviceLinked)
5270 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5271 else
5272 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5273
5274 pthread_mutex_lock(&gCamLock);
5275
5276 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5277 LOGE("Dualcam: Invalid Session Id ");
5278 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005279 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005280 goto error_exit;
5281 }
5282
5283 if (mIsMainCamera == 1) {
5284 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5285 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005286 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005287 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005288 // related session id should be session id of linked session
5289 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5290 } else {
5291 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5292 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005293 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005294 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005295 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5296 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005297 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005298 pthread_mutex_unlock(&gCamLock);
5299
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005300 rc = mCameraHandle->ops->set_dual_cam_cmd(
5301 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005302 if (rc < 0) {
5303 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005304 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005305 goto error_exit;
5306 }
5307 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005308 goto no_error;
5309error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005310 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005311 return rc;
5312no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005313 mWokenUpByDaemon = false;
5314 mPendingLiveRequest = 0;
5315 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005316 }
5317
5318 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005319 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005320
5321 if (mFlushPerf) {
5322 //we cannot accept any requests during flush
5323 LOGE("process_capture_request cannot proceed during flush");
5324 pthread_mutex_unlock(&mMutex);
5325 return NO_ERROR; //should return an error
5326 }
5327
5328 if (meta.exists(ANDROID_REQUEST_ID)) {
5329 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5330 mCurrentRequestId = request_id;
5331 LOGD("Received request with id: %d", request_id);
5332 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5333 LOGE("Unable to find request id field, \
5334 & no previous id available");
5335 pthread_mutex_unlock(&mMutex);
5336 return NAME_NOT_FOUND;
5337 } else {
5338 LOGD("Re-using old request id");
5339 request_id = mCurrentRequestId;
5340 }
5341
5342 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5343 request->num_output_buffers,
5344 request->input_buffer,
5345 frameNumber);
5346 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005347 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005348 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005349 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005350 uint32_t snapshotStreamId = 0;
5351 for (size_t i = 0; i < request->num_output_buffers; i++) {
5352 const camera3_stream_buffer_t& output = request->output_buffers[i];
5353 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5354
Emilian Peev7650c122017-01-19 08:24:33 -08005355 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5356 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005357 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005358 blob_request = 1;
5359 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5360 }
5361
5362 if (output.acquire_fence != -1) {
5363 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5364 close(output.acquire_fence);
5365 if (rc != OK) {
5366 LOGE("sync wait failed %d", rc);
5367 pthread_mutex_unlock(&mMutex);
5368 return rc;
5369 }
5370 }
5371
Emilian Peev0f3c3162017-03-15 12:57:46 +00005372 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5373 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005374 depthRequestPresent = true;
5375 continue;
5376 }
5377
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005378 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005379 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005380
5381 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5382 isVidBufRequested = true;
5383 }
5384 }
5385
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005386 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5387 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5388 itr++) {
5389 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5390 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5391 channel->getStreamID(channel->getStreamTypeMask());
5392
5393 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5394 isVidBufRequested = true;
5395 }
5396 }
5397
Thierry Strudel3d639192016-09-09 11:52:26 -07005398 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005399 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005400 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005401 }
5402 if (blob_request && mRawDumpChannel) {
5403 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005404 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005405 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005406 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005407 }
5408
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005409 {
5410 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5411 // Request a RAW buffer if
5412 // 1. mHdrPlusRawSrcChannel is valid.
5413 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5414 // 3. There is no pending HDR+ request.
5415 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5416 mHdrPlusPendingRequests.size() == 0) {
5417 streamsArray.stream_request[streamsArray.num_streams].streamID =
5418 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5419 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5420 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005421 }
5422
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005423 //extract capture intent
5424 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5425 mCaptureIntent =
5426 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5427 }
5428
5429 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5430 mCacMode =
5431 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5432 }
5433
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005434 uint8_t requestedLensShadingMapMode;
5435 // Get the shading map mode.
5436 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5437 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5438 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5439 } else {
5440 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5441 }
5442
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005443 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005444 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005445
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005446 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005447 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005448 // If this request has a still capture intent, try to submit an HDR+ request.
5449 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5450 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5451 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5452 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005453 }
5454
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005455 if (hdrPlusRequest) {
5456 // For a HDR+ request, just set the frame parameters.
5457 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5458 if (rc < 0) {
5459 LOGE("fail to set frame parameters");
5460 pthread_mutex_unlock(&mMutex);
5461 return rc;
5462 }
5463 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005464 /* Parse the settings:
5465 * - For every request in NORMAL MODE
5466 * - For every request in HFR mode during preview only case
5467 * - For first request of every batch in HFR mode during video
5468 * recording. In batchmode the same settings except frame number is
5469 * repeated in each request of the batch.
5470 */
5471 if (!mBatchSize ||
5472 (mBatchSize && !isVidBufRequested) ||
5473 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005474 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005475 if (rc < 0) {
5476 LOGE("fail to set frame parameters");
5477 pthread_mutex_unlock(&mMutex);
5478 return rc;
5479 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005480
5481 {
5482 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5483 // will be reported in result metadata.
5484 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5485 if (mHdrPlusModeEnabled) {
5486 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5487 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5488 }
5489 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005490 }
5491 /* For batchMode HFR, setFrameParameters is not called for every
5492 * request. But only frame number of the latest request is parsed.
5493 * Keep track of first and last frame numbers in a batch so that
5494 * metadata for the frame numbers of batch can be duplicated in
5495 * handleBatchMetadta */
5496 if (mBatchSize) {
5497 if (!mToBeQueuedVidBufs) {
5498 //start of the batch
5499 mFirstFrameNumberInBatch = request->frame_number;
5500 }
5501 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5502 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5503 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005504 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005505 return BAD_VALUE;
5506 }
5507 }
5508 if (mNeedSensorRestart) {
5509 /* Unlock the mutex as restartSensor waits on the channels to be
5510 * stopped, which in turn calls stream callback functions -
5511 * handleBufferWithLock and handleMetadataWithLock */
5512 pthread_mutex_unlock(&mMutex);
5513 rc = dynamicUpdateMetaStreamInfo();
5514 if (rc != NO_ERROR) {
5515 LOGE("Restarting the sensor failed");
5516 return BAD_VALUE;
5517 }
5518 mNeedSensorRestart = false;
5519 pthread_mutex_lock(&mMutex);
5520 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005521 if(mResetInstantAEC) {
5522 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5523 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5524 mResetInstantAEC = false;
5525 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005526 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005527 if (request->input_buffer->acquire_fence != -1) {
5528 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5529 close(request->input_buffer->acquire_fence);
5530 if (rc != OK) {
5531 LOGE("input buffer sync wait failed %d", rc);
5532 pthread_mutex_unlock(&mMutex);
5533 return rc;
5534 }
5535 }
5536 }
5537
5538 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5539 mLastCustIntentFrmNum = frameNumber;
5540 }
5541 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005542 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005543 pendingRequestIterator latestRequest;
5544 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005545 pendingRequest.num_buffers = depthRequestPresent ?
5546 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005547 pendingRequest.request_id = request_id;
5548 pendingRequest.blob_request = blob_request;
5549 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005550 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005551 if (request->input_buffer) {
5552 pendingRequest.input_buffer =
5553 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5554 *(pendingRequest.input_buffer) = *(request->input_buffer);
5555 pInputBuffer = pendingRequest.input_buffer;
5556 } else {
5557 pendingRequest.input_buffer = NULL;
5558 pInputBuffer = NULL;
5559 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005560 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005561
5562 pendingRequest.pipeline_depth = 0;
5563 pendingRequest.partial_result_cnt = 0;
5564 extractJpegMetadata(mCurJpegMeta, request);
5565 pendingRequest.jpegMetadata = mCurJpegMeta;
5566 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005567 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005568 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
Shuzhen Wang77b049a2017-08-30 12:24:36 -07005569 pendingRequest.hybrid_ae_enable =
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005570 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5571 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005572
Samuel Ha68ba5172016-12-15 18:41:12 -08005573 /* DevCamDebug metadata processCaptureRequest */
5574 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5575 mDevCamDebugMetaEnable =
5576 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5577 }
5578 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5579 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005580
5581 //extract CAC info
5582 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5583 mCacMode =
5584 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5585 }
5586 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005587 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005588 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5589 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005590
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005591 // extract enableZsl info
5592 if (gExposeEnableZslKey) {
5593 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5594 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5595 mZslEnabled = pendingRequest.enableZsl;
5596 } else {
5597 pendingRequest.enableZsl = mZslEnabled;
5598 }
5599 }
5600
Thierry Strudel3d639192016-09-09 11:52:26 -07005601 PendingBuffersInRequest bufsForCurRequest;
5602 bufsForCurRequest.frame_number = frameNumber;
5603 // Mark current timestamp for the new request
5604 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Binhao Lin09245482017-08-31 18:25:29 -07005605 bufsForCurRequest.av_timestamp = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005606 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005607
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005608 if (hdrPlusRequest) {
5609 // Save settings for this request.
5610 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5611 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5612
5613 // Add to pending HDR+ request queue.
5614 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5615 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5616
5617 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5618 }
5619
Thierry Strudel3d639192016-09-09 11:52:26 -07005620 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005621 if ((request->output_buffers[i].stream->data_space ==
5622 HAL_DATASPACE_DEPTH) &&
5623 (HAL_PIXEL_FORMAT_BLOB ==
5624 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005625 continue;
5626 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005627 RequestedBufferInfo requestedBuf;
5628 memset(&requestedBuf, 0, sizeof(requestedBuf));
5629 requestedBuf.stream = request->output_buffers[i].stream;
5630 requestedBuf.buffer = NULL;
5631 pendingRequest.buffers.push_back(requestedBuf);
5632
5633 // Add to buffer handle the pending buffers list
5634 PendingBufferInfo bufferInfo;
5635 bufferInfo.buffer = request->output_buffers[i].buffer;
5636 bufferInfo.stream = request->output_buffers[i].stream;
5637 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5638 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5639 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5640 frameNumber, bufferInfo.buffer,
5641 channel->getStreamTypeMask(), bufferInfo.stream->format);
5642 }
5643 // Add this request packet into mPendingBuffersMap
5644 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5645 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5646 mPendingBuffersMap.get_num_overall_buffers());
5647
5648 latestRequest = mPendingRequestsList.insert(
5649 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005650
5651 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5652 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005653 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005654 for (size_t i = 0; i < request->num_output_buffers; i++) {
5655 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5656 }
5657
Thierry Strudel3d639192016-09-09 11:52:26 -07005658 if(mFlush) {
5659 LOGI("mFlush is true");
5660 pthread_mutex_unlock(&mMutex);
5661 return NO_ERROR;
5662 }
5663
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005664 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5665 // channel.
5666 if (!hdrPlusRequest) {
5667 int indexUsed;
5668 // Notify metadata channel we receive a request
5669 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005670
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005671 if(request->input_buffer != NULL){
5672 LOGD("Input request, frame_number %d", frameNumber);
5673 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5674 if (NO_ERROR != rc) {
5675 LOGE("fail to set reproc parameters");
5676 pthread_mutex_unlock(&mMutex);
5677 return rc;
5678 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005679 }
5680
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005681 // Call request on other streams
5682 uint32_t streams_need_metadata = 0;
5683 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5684 for (size_t i = 0; i < request->num_output_buffers; i++) {
5685 const camera3_stream_buffer_t& output = request->output_buffers[i];
5686 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5687
5688 if (channel == NULL) {
5689 LOGW("invalid channel pointer for stream");
5690 continue;
5691 }
5692
5693 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5694 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5695 output.buffer, request->input_buffer, frameNumber);
5696 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005697 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005698 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5699 if (rc < 0) {
5700 LOGE("Fail to request on picture channel");
5701 pthread_mutex_unlock(&mMutex);
5702 return rc;
5703 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005704 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005705 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5706 assert(NULL != mDepthChannel);
5707 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005708
Emilian Peev7650c122017-01-19 08:24:33 -08005709 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5710 if (rc < 0) {
5711 LOGE("Fail to map on depth buffer");
5712 pthread_mutex_unlock(&mMutex);
5713 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005714 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005715 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005716 } else {
5717 LOGD("snapshot request with buffer %p, frame_number %d",
5718 output.buffer, frameNumber);
5719 if (!request->settings) {
5720 rc = channel->request(output.buffer, frameNumber,
5721 NULL, mPrevParameters, indexUsed);
5722 } else {
5723 rc = channel->request(output.buffer, frameNumber,
5724 NULL, mParameters, indexUsed);
5725 }
5726 if (rc < 0) {
5727 LOGE("Fail to request on picture channel");
5728 pthread_mutex_unlock(&mMutex);
5729 return rc;
5730 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005731
Emilian Peev7650c122017-01-19 08:24:33 -08005732 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5733 uint32_t j = 0;
5734 for (j = 0; j < streamsArray.num_streams; j++) {
5735 if (streamsArray.stream_request[j].streamID == streamId) {
5736 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5737 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5738 else
5739 streamsArray.stream_request[j].buf_index = indexUsed;
5740 break;
5741 }
5742 }
5743 if (j == streamsArray.num_streams) {
5744 LOGE("Did not find matching stream to update index");
5745 assert(0);
5746 }
5747
5748 pendingBufferIter->need_metadata = true;
5749 streams_need_metadata++;
5750 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005751 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005752 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5753 bool needMetadata = false;
5754 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5755 rc = yuvChannel->request(output.buffer, frameNumber,
5756 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5757 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005758 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005759 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005760 pthread_mutex_unlock(&mMutex);
5761 return rc;
5762 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005763
5764 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5765 uint32_t j = 0;
5766 for (j = 0; j < streamsArray.num_streams; j++) {
5767 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005768 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5769 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5770 else
5771 streamsArray.stream_request[j].buf_index = indexUsed;
5772 break;
5773 }
5774 }
5775 if (j == streamsArray.num_streams) {
5776 LOGE("Did not find matching stream to update index");
5777 assert(0);
5778 }
5779
5780 pendingBufferIter->need_metadata = needMetadata;
5781 if (needMetadata)
5782 streams_need_metadata += 1;
5783 LOGD("calling YUV channel request, need_metadata is %d",
5784 needMetadata);
5785 } else {
5786 LOGD("request with buffer %p, frame_number %d",
5787 output.buffer, frameNumber);
5788
5789 rc = channel->request(output.buffer, frameNumber, indexUsed);
5790
5791 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5792 uint32_t j = 0;
5793 for (j = 0; j < streamsArray.num_streams; j++) {
5794 if (streamsArray.stream_request[j].streamID == streamId) {
5795 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5796 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5797 else
5798 streamsArray.stream_request[j].buf_index = indexUsed;
5799 break;
5800 }
5801 }
5802 if (j == streamsArray.num_streams) {
5803 LOGE("Did not find matching stream to update index");
5804 assert(0);
5805 }
5806
5807 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5808 && mBatchSize) {
5809 mToBeQueuedVidBufs++;
5810 if (mToBeQueuedVidBufs == mBatchSize) {
5811 channel->queueBatchBuf();
5812 }
5813 }
5814 if (rc < 0) {
5815 LOGE("request failed");
5816 pthread_mutex_unlock(&mMutex);
5817 return rc;
5818 }
5819 }
5820 pendingBufferIter++;
5821 }
5822
5823 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5824 itr++) {
5825 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5826
5827 if (channel == NULL) {
5828 LOGE("invalid channel pointer for stream");
5829 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005830 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005831 return BAD_VALUE;
5832 }
5833
5834 InternalRequest requestedStream;
5835 requestedStream = (*itr);
5836
5837
5838 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5839 LOGD("snapshot request internally input buffer %p, frame_number %d",
5840 request->input_buffer, frameNumber);
5841 if(request->input_buffer != NULL){
5842 rc = channel->request(NULL, frameNumber,
5843 pInputBuffer, &mReprocMeta, indexUsed, true,
5844 requestedStream.meteringOnly);
5845 if (rc < 0) {
5846 LOGE("Fail to request on picture channel");
5847 pthread_mutex_unlock(&mMutex);
5848 return rc;
5849 }
5850 } else {
5851 LOGD("snapshot request with frame_number %d", frameNumber);
5852 if (!request->settings) {
5853 rc = channel->request(NULL, frameNumber,
5854 NULL, mPrevParameters, indexUsed, true,
5855 requestedStream.meteringOnly);
5856 } else {
5857 rc = channel->request(NULL, frameNumber,
5858 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5859 }
5860 if (rc < 0) {
5861 LOGE("Fail to request on picture channel");
5862 pthread_mutex_unlock(&mMutex);
5863 return rc;
5864 }
5865
5866 if ((*itr).meteringOnly != 1) {
5867 requestedStream.need_metadata = 1;
5868 streams_need_metadata++;
5869 }
5870 }
5871
5872 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5873 uint32_t j = 0;
5874 for (j = 0; j < streamsArray.num_streams; j++) {
5875 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005876 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5877 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5878 else
5879 streamsArray.stream_request[j].buf_index = indexUsed;
5880 break;
5881 }
5882 }
5883 if (j == streamsArray.num_streams) {
5884 LOGE("Did not find matching stream to update index");
5885 assert(0);
5886 }
5887
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005888 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005889 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005890 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005891 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005892 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005893 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005894 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005895 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005896
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005897 //If 2 streams have need_metadata set to true, fail the request, unless
5898 //we copy/reference count the metadata buffer
5899 if (streams_need_metadata > 1) {
5900 LOGE("not supporting request in which two streams requires"
5901 " 2 HAL metadata for reprocessing");
5902 pthread_mutex_unlock(&mMutex);
5903 return -EINVAL;
5904 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005905
Emilian Peev656e4fa2017-06-02 16:47:04 +01005906 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5907 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5908 if (depthRequestPresent && mDepthChannel) {
5909 if (request->settings) {
5910 camera_metadata_ro_entry entry;
5911 if (find_camera_metadata_ro_entry(request->settings,
5912 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5913 if (entry.data.u8[0]) {
5914 pdafEnable = CAM_PD_DATA_ENABLED;
5915 } else {
5916 pdafEnable = CAM_PD_DATA_SKIP;
5917 }
5918 mDepthCloudMode = pdafEnable;
5919 } else {
5920 pdafEnable = mDepthCloudMode;
5921 }
5922 } else {
5923 pdafEnable = mDepthCloudMode;
5924 }
5925 }
5926
Emilian Peev7650c122017-01-19 08:24:33 -08005927 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5928 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5929 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5930 pthread_mutex_unlock(&mMutex);
5931 return BAD_VALUE;
5932 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005933
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005934 if (request->input_buffer == NULL) {
5935 /* Set the parameters to backend:
5936 * - For every request in NORMAL MODE
5937 * - For every request in HFR mode during preview only case
5938 * - Once every batch in HFR mode during video recording
5939 */
5940 if (!mBatchSize ||
5941 (mBatchSize && !isVidBufRequested) ||
5942 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5943 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5944 mBatchSize, isVidBufRequested,
5945 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005946
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005947 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5948 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5949 uint32_t m = 0;
5950 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5951 if (streamsArray.stream_request[k].streamID ==
5952 mBatchedStreamsArray.stream_request[m].streamID)
5953 break;
5954 }
5955 if (m == mBatchedStreamsArray.num_streams) {
5956 mBatchedStreamsArray.stream_request\
5957 [mBatchedStreamsArray.num_streams].streamID =
5958 streamsArray.stream_request[k].streamID;
5959 mBatchedStreamsArray.stream_request\
5960 [mBatchedStreamsArray.num_streams].buf_index =
5961 streamsArray.stream_request[k].buf_index;
5962 mBatchedStreamsArray.num_streams =
5963 mBatchedStreamsArray.num_streams + 1;
5964 }
5965 }
5966 streamsArray = mBatchedStreamsArray;
5967 }
5968 /* Update stream id of all the requested buffers */
5969 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5970 streamsArray)) {
5971 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005972 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005973 return BAD_VALUE;
5974 }
5975
5976 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5977 mParameters);
5978 if (rc < 0) {
5979 LOGE("set_parms failed");
5980 }
5981 /* reset to zero coz, the batch is queued */
5982 mToBeQueuedVidBufs = 0;
5983 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5984 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5985 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005986 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5987 uint32_t m = 0;
5988 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5989 if (streamsArray.stream_request[k].streamID ==
5990 mBatchedStreamsArray.stream_request[m].streamID)
5991 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005992 }
5993 if (m == mBatchedStreamsArray.num_streams) {
5994 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5995 streamID = streamsArray.stream_request[k].streamID;
5996 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5997 buf_index = streamsArray.stream_request[k].buf_index;
5998 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5999 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08006000 }
6001 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006002 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006003
6004 // Start all streams after the first setting is sent, so that the
6005 // setting can be applied sooner: (0 + apply_delay)th frame.
6006 if (mState == CONFIGURED && mChannelHandle) {
6007 //Then start them.
6008 LOGH("Start META Channel");
6009 rc = mMetadataChannel->start();
6010 if (rc < 0) {
6011 LOGE("META channel start failed");
6012 pthread_mutex_unlock(&mMutex);
6013 return rc;
6014 }
6015
6016 if (mAnalysisChannel) {
6017 rc = mAnalysisChannel->start();
6018 if (rc < 0) {
6019 LOGE("Analysis channel start failed");
6020 mMetadataChannel->stop();
6021 pthread_mutex_unlock(&mMutex);
6022 return rc;
6023 }
6024 }
6025
6026 if (mSupportChannel) {
6027 rc = mSupportChannel->start();
6028 if (rc < 0) {
6029 LOGE("Support channel start failed");
6030 mMetadataChannel->stop();
6031 /* Although support and analysis are mutually exclusive today
6032 adding it in anycase for future proofing */
6033 if (mAnalysisChannel) {
6034 mAnalysisChannel->stop();
6035 }
6036 pthread_mutex_unlock(&mMutex);
6037 return rc;
6038 }
6039 }
6040 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6041 it != mStreamInfo.end(); it++) {
6042 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
6043 LOGH("Start Processing Channel mask=%d",
6044 channel->getStreamTypeMask());
6045 rc = channel->start();
6046 if (rc < 0) {
6047 LOGE("channel start failed");
6048 pthread_mutex_unlock(&mMutex);
6049 return rc;
6050 }
6051 }
6052
6053 if (mRawDumpChannel) {
6054 LOGD("Starting raw dump stream");
6055 rc = mRawDumpChannel->start();
6056 if (rc != NO_ERROR) {
6057 LOGE("Error Starting Raw Dump Channel");
6058 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6059 it != mStreamInfo.end(); it++) {
6060 QCamera3Channel *channel =
6061 (QCamera3Channel *)(*it)->stream->priv;
6062 LOGH("Stopping Processing Channel mask=%d",
6063 channel->getStreamTypeMask());
6064 channel->stop();
6065 }
6066 if (mSupportChannel)
6067 mSupportChannel->stop();
6068 if (mAnalysisChannel) {
6069 mAnalysisChannel->stop();
6070 }
6071 mMetadataChannel->stop();
6072 pthread_mutex_unlock(&mMutex);
6073 return rc;
6074 }
6075 }
6076
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006077 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006078 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006079 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006080 if (rc != NO_ERROR) {
6081 LOGE("start_channel failed %d", rc);
6082 pthread_mutex_unlock(&mMutex);
6083 return rc;
6084 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006085
6086 {
6087 // Configure Easel for stream on.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006088 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07006089
6090 // Now that sensor mode should have been selected, get the selected sensor mode
6091 // info.
6092 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6093 getCurrentSensorModeInfo(mSensorModeInfo);
6094
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006095 if (EaselManagerClientOpened) {
6096 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006097 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6098 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006099 if (rc != OK) {
6100 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6101 mCameraId, mSensorModeInfo.op_pixel_clk);
6102 pthread_mutex_unlock(&mMutex);
6103 return rc;
6104 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07006105 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006106 }
6107 }
6108
6109 // Start sensor streaming.
6110 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6111 mChannelHandle);
6112 if (rc != NO_ERROR) {
6113 LOGE("start_sensor_stream_on failed %d", rc);
6114 pthread_mutex_unlock(&mMutex);
6115 return rc;
6116 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006117 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006118 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006119 }
6120
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006121 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chenjie Luo4a761802017-06-13 17:35:54 +00006122 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006123 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006124 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006125 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6126 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6127 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6128 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006129
6130 if (isSessionHdrPlusModeCompatible()) {
6131 rc = enableHdrPlusModeLocked();
6132 if (rc != OK) {
6133 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6134 pthread_mutex_unlock(&mMutex);
6135 return rc;
6136 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006137 }
6138
6139 mFirstPreviewIntentSeen = true;
6140 }
6141 }
6142
Thierry Strudel3d639192016-09-09 11:52:26 -07006143 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6144
6145 mState = STARTED;
6146 // Added a timed condition wait
6147 struct timespec ts;
6148 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006149 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006150 if (rc < 0) {
6151 isValidTimeout = 0;
6152 LOGE("Error reading the real time clock!!");
6153 }
6154 else {
6155 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006156 int64_t timeout = 5;
6157 {
6158 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6159 // If there is a pending HDR+ request, the following requests may be blocked until the
6160 // HDR+ request is done. So allow a longer timeout.
6161 if (mHdrPlusPendingRequests.size() > 0) {
6162 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6163 }
6164 }
6165 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006166 }
6167 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006168 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006169 (mState != ERROR) && (mState != DEINIT)) {
6170 if (!isValidTimeout) {
6171 LOGD("Blocking on conditional wait");
6172 pthread_cond_wait(&mRequestCond, &mMutex);
6173 }
6174 else {
6175 LOGD("Blocking on timed conditional wait");
6176 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6177 if (rc == ETIMEDOUT) {
6178 rc = -ENODEV;
6179 LOGE("Unblocked on timeout!!!!");
6180 break;
6181 }
6182 }
6183 LOGD("Unblocked");
6184 if (mWokenUpByDaemon) {
6185 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006186 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006187 break;
6188 }
6189 }
6190 pthread_mutex_unlock(&mMutex);
6191
6192 return rc;
6193}
6194
6195/*===========================================================================
6196 * FUNCTION : dump
6197 *
6198 * DESCRIPTION:
6199 *
6200 * PARAMETERS :
6201 *
6202 *
6203 * RETURN :
6204 *==========================================================================*/
6205void QCamera3HardwareInterface::dump(int fd)
6206{
6207 pthread_mutex_lock(&mMutex);
6208 dprintf(fd, "\n Camera HAL3 information Begin \n");
6209
6210 dprintf(fd, "\nNumber of pending requests: %zu \n",
6211 mPendingRequestsList.size());
6212 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6213 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6214 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6215 for(pendingRequestIterator i = mPendingRequestsList.begin();
6216 i != mPendingRequestsList.end(); i++) {
6217 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6218 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6219 i->input_buffer);
6220 }
6221 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6222 mPendingBuffersMap.get_num_overall_buffers());
6223 dprintf(fd, "-------+------------------\n");
6224 dprintf(fd, " Frame | Stream type mask \n");
6225 dprintf(fd, "-------+------------------\n");
6226 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6227 for(auto &j : req.mPendingBufferList) {
6228 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6229 dprintf(fd, " %5d | %11d \n",
6230 req.frame_number, channel->getStreamTypeMask());
6231 }
6232 }
6233 dprintf(fd, "-------+------------------\n");
6234
6235 dprintf(fd, "\nPending frame drop list: %zu\n",
6236 mPendingFrameDropList.size());
6237 dprintf(fd, "-------+-----------\n");
6238 dprintf(fd, " Frame | Stream ID \n");
6239 dprintf(fd, "-------+-----------\n");
6240 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6241 i != mPendingFrameDropList.end(); i++) {
6242 dprintf(fd, " %5d | %9d \n",
6243 i->frame_number, i->stream_ID);
6244 }
6245 dprintf(fd, "-------+-----------\n");
6246
6247 dprintf(fd, "\n Camera HAL3 information End \n");
6248
6249 /* use dumpsys media.camera as trigger to send update debug level event */
6250 mUpdateDebugLevel = true;
6251 pthread_mutex_unlock(&mMutex);
6252 return;
6253}
6254
6255/*===========================================================================
6256 * FUNCTION : flush
6257 *
6258 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6259 * conditionally restarts channels
6260 *
6261 * PARAMETERS :
6262 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006263 * @ stopChannelImmediately: stop the channel immediately. This should be used
6264 * when device encountered an error and MIPI may has
6265 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006266 *
6267 * RETURN :
6268 * 0 on success
6269 * Error code on failure
6270 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006271int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006272{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006273 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006274 int32_t rc = NO_ERROR;
6275
6276 LOGD("Unblocking Process Capture Request");
6277 pthread_mutex_lock(&mMutex);
6278 mFlush = true;
6279 pthread_mutex_unlock(&mMutex);
6280
6281 rc = stopAllChannels();
6282 // unlink of dualcam
6283 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006284 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6285 &m_pDualCamCmdPtr->bundle_info;
6286 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006287 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6288 pthread_mutex_lock(&gCamLock);
6289
6290 if (mIsMainCamera == 1) {
6291 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6292 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006293 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006294 // related session id should be session id of linked session
6295 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6296 } else {
6297 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6298 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006299 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006300 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6301 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006302 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006303 pthread_mutex_unlock(&gCamLock);
6304
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006305 rc = mCameraHandle->ops->set_dual_cam_cmd(
6306 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006307 if (rc < 0) {
6308 LOGE("Dualcam: Unlink failed, but still proceed to close");
6309 }
6310 }
6311
6312 if (rc < 0) {
6313 LOGE("stopAllChannels failed");
6314 return rc;
6315 }
6316 if (mChannelHandle) {
6317 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006318 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006319 }
6320
6321 // Reset bundle info
6322 rc = setBundleInfo();
6323 if (rc < 0) {
6324 LOGE("setBundleInfo failed %d", rc);
6325 return rc;
6326 }
6327
6328 // Mutex Lock
6329 pthread_mutex_lock(&mMutex);
6330
6331 // Unblock process_capture_request
6332 mPendingLiveRequest = 0;
6333 pthread_cond_signal(&mRequestCond);
6334
6335 rc = notifyErrorForPendingRequests();
6336 if (rc < 0) {
6337 LOGE("notifyErrorForPendingRequests failed");
6338 pthread_mutex_unlock(&mMutex);
6339 return rc;
6340 }
6341
6342 mFlush = false;
6343
6344 // Start the Streams/Channels
6345 if (restartChannels) {
6346 rc = startAllChannels();
6347 if (rc < 0) {
6348 LOGE("startAllChannels failed");
6349 pthread_mutex_unlock(&mMutex);
6350 return rc;
6351 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006352 if (mChannelHandle) {
6353 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006354 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006355 if (rc < 0) {
6356 LOGE("start_channel failed");
6357 pthread_mutex_unlock(&mMutex);
6358 return rc;
6359 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006360 }
6361 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006362 pthread_mutex_unlock(&mMutex);
6363
6364 return 0;
6365}
6366
6367/*===========================================================================
6368 * FUNCTION : flushPerf
6369 *
6370 * DESCRIPTION: This is the performance optimization version of flush that does
6371 * not use stream off, rather flushes the system
6372 *
6373 * PARAMETERS :
6374 *
6375 *
6376 * RETURN : 0 : success
6377 * -EINVAL: input is malformed (device is not valid)
6378 * -ENODEV: if the device has encountered a serious error
6379 *==========================================================================*/
6380int QCamera3HardwareInterface::flushPerf()
6381{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006382 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006383 int32_t rc = 0;
6384 struct timespec timeout;
6385 bool timed_wait = false;
6386
6387 pthread_mutex_lock(&mMutex);
6388 mFlushPerf = true;
6389 mPendingBuffersMap.numPendingBufsAtFlush =
6390 mPendingBuffersMap.get_num_overall_buffers();
6391 LOGD("Calling flush. Wait for %d buffers to return",
6392 mPendingBuffersMap.numPendingBufsAtFlush);
6393
6394 /* send the flush event to the backend */
6395 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6396 if (rc < 0) {
6397 LOGE("Error in flush: IOCTL failure");
6398 mFlushPerf = false;
6399 pthread_mutex_unlock(&mMutex);
6400 return -ENODEV;
6401 }
6402
6403 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6404 LOGD("No pending buffers in HAL, return flush");
6405 mFlushPerf = false;
6406 pthread_mutex_unlock(&mMutex);
6407 return rc;
6408 }
6409
6410 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006411 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006412 if (rc < 0) {
6413 LOGE("Error reading the real time clock, cannot use timed wait");
6414 } else {
6415 timeout.tv_sec += FLUSH_TIMEOUT;
6416 timed_wait = true;
6417 }
6418
6419 //Block on conditional variable
6420 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6421 LOGD("Waiting on mBuffersCond");
6422 if (!timed_wait) {
6423 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6424 if (rc != 0) {
6425 LOGE("pthread_cond_wait failed due to rc = %s",
6426 strerror(rc));
6427 break;
6428 }
6429 } else {
6430 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6431 if (rc != 0) {
6432 LOGE("pthread_cond_timedwait failed due to rc = %s",
6433 strerror(rc));
6434 break;
6435 }
6436 }
6437 }
6438 if (rc != 0) {
6439 mFlushPerf = false;
6440 pthread_mutex_unlock(&mMutex);
6441 return -ENODEV;
6442 }
6443
6444 LOGD("Received buffers, now safe to return them");
6445
6446 //make sure the channels handle flush
6447 //currently only required for the picture channel to release snapshot resources
6448 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6449 it != mStreamInfo.end(); it++) {
6450 QCamera3Channel *channel = (*it)->channel;
6451 if (channel) {
6452 rc = channel->flush();
6453 if (rc) {
6454 LOGE("Flushing the channels failed with error %d", rc);
6455 // even though the channel flush failed we need to continue and
6456 // return the buffers we have to the framework, however the return
6457 // value will be an error
6458 rc = -ENODEV;
6459 }
6460 }
6461 }
6462
6463 /* notify the frameworks and send errored results */
6464 rc = notifyErrorForPendingRequests();
6465 if (rc < 0) {
6466 LOGE("notifyErrorForPendingRequests failed");
6467 pthread_mutex_unlock(&mMutex);
6468 return rc;
6469 }
6470
6471 //unblock process_capture_request
6472 mPendingLiveRequest = 0;
6473 unblockRequestIfNecessary();
6474
6475 mFlushPerf = false;
6476 pthread_mutex_unlock(&mMutex);
6477 LOGD ("Flush Operation complete. rc = %d", rc);
6478 return rc;
6479}
6480
6481/*===========================================================================
6482 * FUNCTION : handleCameraDeviceError
6483 *
6484 * DESCRIPTION: This function calls internal flush and notifies the error to
6485 * framework and updates the state variable.
6486 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006487 * PARAMETERS :
6488 * @stopChannelImmediately : stop channels immediately without waiting for
6489 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006490 *
6491 * RETURN : NO_ERROR on Success
6492 * Error code on failure
6493 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006494int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006495{
6496 int32_t rc = NO_ERROR;
6497
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006498 {
6499 Mutex::Autolock lock(mFlushLock);
6500 pthread_mutex_lock(&mMutex);
6501 if (mState != ERROR) {
6502 //if mState != ERROR, nothing to be done
6503 pthread_mutex_unlock(&mMutex);
6504 return NO_ERROR;
6505 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006506 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006507
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006508 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006509 if (NO_ERROR != rc) {
6510 LOGE("internal flush to handle mState = ERROR failed");
6511 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006512
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006513 pthread_mutex_lock(&mMutex);
6514 mState = DEINIT;
6515 pthread_mutex_unlock(&mMutex);
6516 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006517
6518 camera3_notify_msg_t notify_msg;
6519 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6520 notify_msg.type = CAMERA3_MSG_ERROR;
6521 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6522 notify_msg.message.error.error_stream = NULL;
6523 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006524 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006525
6526 return rc;
6527}
6528
6529/*===========================================================================
6530 * FUNCTION : captureResultCb
6531 *
6532 * DESCRIPTION: Callback handler for all capture result
6533 * (streams, as well as metadata)
6534 *
6535 * PARAMETERS :
6536 * @metadata : metadata information
6537 * @buffer : actual gralloc buffer to be returned to frameworks.
6538 * NULL if metadata.
6539 *
6540 * RETURN : NONE
6541 *==========================================================================*/
6542void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6543 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6544{
6545 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006546 pthread_mutex_lock(&mMutex);
6547 uint8_t batchSize = mBatchSize;
6548 pthread_mutex_unlock(&mMutex);
6549 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006550 handleBatchMetadata(metadata_buf,
6551 true /* free_and_bufdone_meta_buf */);
6552 } else { /* mBatchSize = 0 */
6553 hdrPlusPerfLock(metadata_buf);
6554 pthread_mutex_lock(&mMutex);
6555 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006556 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006557 true /* last urgent frame of batch metadata */,
6558 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006559 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006560 pthread_mutex_unlock(&mMutex);
6561 }
6562 } else if (isInputBuffer) {
6563 pthread_mutex_lock(&mMutex);
6564 handleInputBufferWithLock(frame_number);
6565 pthread_mutex_unlock(&mMutex);
6566 } else {
6567 pthread_mutex_lock(&mMutex);
6568 handleBufferWithLock(buffer, frame_number);
6569 pthread_mutex_unlock(&mMutex);
6570 }
6571 return;
6572}
6573
6574/*===========================================================================
6575 * FUNCTION : getReprocessibleOutputStreamId
6576 *
6577 * DESCRIPTION: Get source output stream id for the input reprocess stream
6578 * based on size and format, which would be the largest
6579 * output stream if an input stream exists.
6580 *
6581 * PARAMETERS :
6582 * @id : return the stream id if found
6583 *
6584 * RETURN : int32_t type of status
6585 * NO_ERROR -- success
6586 * none-zero failure code
6587 *==========================================================================*/
6588int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6589{
6590 /* check if any output or bidirectional stream with the same size and format
6591 and return that stream */
6592 if ((mInputStreamInfo.dim.width > 0) &&
6593 (mInputStreamInfo.dim.height > 0)) {
6594 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6595 it != mStreamInfo.end(); it++) {
6596
6597 camera3_stream_t *stream = (*it)->stream;
6598 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6599 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6600 (stream->format == mInputStreamInfo.format)) {
6601 // Usage flag for an input stream and the source output stream
6602 // may be different.
6603 LOGD("Found reprocessible output stream! %p", *it);
6604 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6605 stream->usage, mInputStreamInfo.usage);
6606
6607 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6608 if (channel != NULL && channel->mStreams[0]) {
6609 id = channel->mStreams[0]->getMyServerID();
6610 return NO_ERROR;
6611 }
6612 }
6613 }
6614 } else {
6615 LOGD("No input stream, so no reprocessible output stream");
6616 }
6617 return NAME_NOT_FOUND;
6618}
6619
6620/*===========================================================================
6621 * FUNCTION : lookupFwkName
6622 *
6623 * DESCRIPTION: In case the enum is not same in fwk and backend
6624 * make sure the parameter is correctly propogated
6625 *
6626 * PARAMETERS :
6627 * @arr : map between the two enums
6628 * @len : len of the map
6629 * @hal_name : name of the hal_parm to map
6630 *
6631 * RETURN : int type of status
6632 * fwk_name -- success
6633 * none-zero failure code
6634 *==========================================================================*/
6635template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6636 size_t len, halType hal_name)
6637{
6638
6639 for (size_t i = 0; i < len; i++) {
6640 if (arr[i].hal_name == hal_name) {
6641 return arr[i].fwk_name;
6642 }
6643 }
6644
6645 /* Not able to find matching framework type is not necessarily
6646 * an error case. This happens when mm-camera supports more attributes
6647 * than the frameworks do */
6648 LOGH("Cannot find matching framework type");
6649 return NAME_NOT_FOUND;
6650}
6651
6652/*===========================================================================
6653 * FUNCTION : lookupHalName
6654 *
6655 * DESCRIPTION: In case the enum is not same in fwk and backend
6656 * make sure the parameter is correctly propogated
6657 *
6658 * PARAMETERS :
6659 * @arr : map between the two enums
6660 * @len : len of the map
6661 * @fwk_name : name of the hal_parm to map
6662 *
6663 * RETURN : int32_t type of status
6664 * hal_name -- success
6665 * none-zero failure code
6666 *==========================================================================*/
6667template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6668 size_t len, fwkType fwk_name)
6669{
6670 for (size_t i = 0; i < len; i++) {
6671 if (arr[i].fwk_name == fwk_name) {
6672 return arr[i].hal_name;
6673 }
6674 }
6675
6676 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6677 return NAME_NOT_FOUND;
6678}
6679
6680/*===========================================================================
6681 * FUNCTION : lookupProp
6682 *
6683 * DESCRIPTION: lookup a value by its name
6684 *
6685 * PARAMETERS :
6686 * @arr : map between the two enums
6687 * @len : size of the map
6688 * @name : name to be looked up
6689 *
6690 * RETURN : Value if found
6691 * CAM_CDS_MODE_MAX if not found
6692 *==========================================================================*/
6693template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6694 size_t len, const char *name)
6695{
6696 if (name) {
6697 for (size_t i = 0; i < len; i++) {
6698 if (!strcmp(arr[i].desc, name)) {
6699 return arr[i].val;
6700 }
6701 }
6702 }
6703 return CAM_CDS_MODE_MAX;
6704}
6705
6706/*===========================================================================
6707 *
6708 * DESCRIPTION:
6709 *
6710 * PARAMETERS :
6711 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006712 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006713 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006714 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6715 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006716 *
6717 * RETURN : camera_metadata_t*
6718 * metadata in a format specified by fwk
6719 *==========================================================================*/
6720camera_metadata_t*
6721QCamera3HardwareInterface::translateFromHalMetadata(
6722 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006723 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006724 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006725 bool lastMetadataInBatch,
6726 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006727{
6728 CameraMetadata camMetadata;
6729 camera_metadata_t *resultMetadata;
6730
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006731 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006732 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6733 * Timestamp is needed because it's used for shutter notify calculation.
6734 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006735 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006736 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006737 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006738 }
6739
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006740 if (pendingRequest.jpegMetadata.entryCount())
6741 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006742
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006743 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6744 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6745 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6746 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6747 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006748 if (mBatchSize == 0) {
6749 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006750 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006751 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006752
Samuel Ha68ba5172016-12-15 18:41:12 -08006753 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6754 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006755 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006756 // DevCamDebug metadata translateFromHalMetadata AF
6757 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6758 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6759 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6760 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6761 }
6762 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6763 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6764 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6765 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6766 }
6767 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6768 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6769 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6770 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6771 }
6772 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6773 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6774 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6775 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6776 }
6777 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6778 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6779 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6780 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6781 }
6782 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6783 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6784 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6785 *DevCamDebug_af_monitor_pdaf_target_pos;
6786 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6787 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6788 }
6789 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6790 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6791 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6792 *DevCamDebug_af_monitor_pdaf_confidence;
6793 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6794 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6795 }
6796 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6797 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6798 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6799 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6800 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6801 }
6802 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6803 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6804 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6805 *DevCamDebug_af_monitor_tof_target_pos;
6806 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6807 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6808 }
6809 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6810 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6811 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6812 *DevCamDebug_af_monitor_tof_confidence;
6813 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6814 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6815 }
6816 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6817 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6818 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6819 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6820 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6821 }
6822 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6823 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6824 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6825 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6826 &fwk_DevCamDebug_af_monitor_type_select, 1);
6827 }
6828 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6829 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6830 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6831 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6832 &fwk_DevCamDebug_af_monitor_refocus, 1);
6833 }
6834 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6835 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6836 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6837 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6838 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6839 }
6840 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6841 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6842 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6843 *DevCamDebug_af_search_pdaf_target_pos;
6844 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6845 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6846 }
6847 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6848 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6849 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6850 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6851 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6852 }
6853 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6854 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6855 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6856 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6857 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6858 }
6859 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6860 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6861 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6862 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6863 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6864 }
6865 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6866 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6867 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6868 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6869 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6870 }
6871 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6872 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6873 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6874 *DevCamDebug_af_search_tof_target_pos;
6875 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6876 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6877 }
6878 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6879 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6880 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6881 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6882 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6883 }
6884 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6885 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6886 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6887 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6888 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6889 }
6890 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6891 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6892 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6893 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6894 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6895 }
6896 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6897 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6898 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6899 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6900 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6901 }
6902 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6903 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6904 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6905 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6906 &fwk_DevCamDebug_af_search_type_select, 1);
6907 }
6908 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6909 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6910 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6911 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6912 &fwk_DevCamDebug_af_search_next_pos, 1);
6913 }
6914 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6915 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6916 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6917 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6918 &fwk_DevCamDebug_af_search_target_pos, 1);
6919 }
6920 // DevCamDebug metadata translateFromHalMetadata AEC
6921 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6922 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6923 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6924 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6925 }
6926 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6927 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6928 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6929 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6930 }
6931 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6932 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6933 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6934 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6935 }
6936 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6937 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6938 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6939 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6940 }
6941 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6942 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6943 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6944 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6945 }
6946 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6947 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6948 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6949 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6950 }
6951 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6952 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6953 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6954 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6955 }
6956 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6957 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6958 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6959 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6960 }
Samuel Ha34229982017-02-17 13:51:11 -08006961 // DevCamDebug metadata translateFromHalMetadata zzHDR
6962 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6963 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6964 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6965 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6966 }
6967 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6968 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006969 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006970 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6971 }
6972 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6973 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6974 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6975 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6976 }
6977 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6978 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006979 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006980 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6981 }
6982 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6983 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6984 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6985 *DevCamDebug_aec_hdr_sensitivity_ratio;
6986 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6987 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6988 }
6989 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6990 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6991 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6992 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6993 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6994 }
6995 // DevCamDebug metadata translateFromHalMetadata ADRC
6996 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6997 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6998 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6999 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
7000 &fwk_DevCamDebug_aec_total_drc_gain, 1);
7001 }
7002 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
7003 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
7004 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
7005 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
7006 &fwk_DevCamDebug_aec_color_drc_gain, 1);
7007 }
7008 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
7009 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
7010 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
7011 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
7012 }
7013 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
7014 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
7015 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
7016 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
7017 }
7018 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
7019 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
7020 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
7021 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
7022 }
7023 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
7024 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
7025 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
7026 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
7027 }
Samuel Habdf4fac2017-07-28 17:21:18 -07007028 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
7029 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
7030 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
7031 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
7032 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
7033 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
7034 }
7035 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
7036 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
7037 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
7038 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
7039 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
7040 }
7041 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
7042 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
7043 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
7044 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
7045 &fwk_DevCamDebug_aec_subject_motion, 1);
7046 }
Samuel Ha68ba5172016-12-15 18:41:12 -08007047 // DevCamDebug metadata translateFromHalMetadata AWB
7048 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
7049 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
7050 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
7051 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7052 }
7053 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7054 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7055 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7056 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7057 }
7058 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7059 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7060 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7061 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7062 }
7063 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7064 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7065 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7066 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7067 }
7068 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7069 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7070 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7071 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7072 }
7073 }
7074 // atrace_end(ATRACE_TAG_ALWAYS);
7075
Thierry Strudel3d639192016-09-09 11:52:26 -07007076 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7077 int64_t fwk_frame_number = *frame_number;
7078 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7079 }
7080
7081 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7082 int32_t fps_range[2];
7083 fps_range[0] = (int32_t)float_range->min_fps;
7084 fps_range[1] = (int32_t)float_range->max_fps;
7085 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7086 fps_range, 2);
7087 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7088 fps_range[0], fps_range[1]);
7089 }
7090
7091 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7092 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7093 }
7094
7095 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7096 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7097 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7098 *sceneMode);
7099 if (NAME_NOT_FOUND != val) {
7100 uint8_t fwkSceneMode = (uint8_t)val;
7101 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7102 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7103 fwkSceneMode);
7104 }
7105 }
7106
7107 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7108 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7109 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7110 }
7111
7112 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7113 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7114 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7115 }
7116
7117 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7118 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7119 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7120 }
7121
7122 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7123 CAM_INTF_META_EDGE_MODE, metadata) {
7124 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7125 }
7126
7127 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7128 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7129 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7130 }
7131
7132 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7133 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7134 }
7135
7136 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7137 if (0 <= *flashState) {
7138 uint8_t fwk_flashState = (uint8_t) *flashState;
7139 if (!gCamCapability[mCameraId]->flash_available) {
7140 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7141 }
7142 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7143 }
7144 }
7145
7146 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7147 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7148 if (NAME_NOT_FOUND != val) {
7149 uint8_t fwk_flashMode = (uint8_t)val;
7150 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7151 }
7152 }
7153
7154 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7155 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7156 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7157 }
7158
7159 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7160 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7161 }
7162
7163 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7164 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7165 }
7166
7167 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7168 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7169 }
7170
7171 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7172 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7173 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7174 }
7175
7176 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7177 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7178 LOGD("fwk_videoStab = %d", fwk_videoStab);
7179 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7180 } else {
7181 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7182 // and so hardcoding the Video Stab result to OFF mode.
7183 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7184 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007185 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007186 }
7187
7188 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7189 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7190 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7191 }
7192
7193 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7194 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7195 }
7196
Thierry Strudel3d639192016-09-09 11:52:26 -07007197 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7198 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007199 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007200
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007201 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7202 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007203
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007204 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007205 blackLevelAppliedPattern->cam_black_level[0],
7206 blackLevelAppliedPattern->cam_black_level[1],
7207 blackLevelAppliedPattern->cam_black_level[2],
7208 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007209 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7210 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007211
7212#ifndef USE_HAL_3_3
7213 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307214 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007215 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307216 fwk_blackLevelInd[0] /= 16.0;
7217 fwk_blackLevelInd[1] /= 16.0;
7218 fwk_blackLevelInd[2] /= 16.0;
7219 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007220 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7221 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007222#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007223 }
7224
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007225#ifndef USE_HAL_3_3
7226 // Fixed whitelevel is used by ISP/Sensor
7227 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7228 &gCamCapability[mCameraId]->white_level, 1);
7229#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007230
7231 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7232 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7233 int32_t scalerCropRegion[4];
7234 scalerCropRegion[0] = hScalerCropRegion->left;
7235 scalerCropRegion[1] = hScalerCropRegion->top;
7236 scalerCropRegion[2] = hScalerCropRegion->width;
7237 scalerCropRegion[3] = hScalerCropRegion->height;
7238
7239 // Adjust crop region from sensor output coordinate system to active
7240 // array coordinate system.
7241 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7242 scalerCropRegion[2], scalerCropRegion[3]);
7243
7244 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7245 }
7246
7247 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7248 LOGD("sensorExpTime = %lld", *sensorExpTime);
7249 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7250 }
7251
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007252 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7253 LOGD("expTimeBoost = %f", *expTimeBoost);
7254 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7255 }
7256
Thierry Strudel3d639192016-09-09 11:52:26 -07007257 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7258 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7259 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7260 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7261 }
7262
7263 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7264 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7265 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7266 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7267 sensorRollingShutterSkew, 1);
7268 }
7269
7270 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7271 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7272 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7273
7274 //calculate the noise profile based on sensitivity
7275 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7276 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7277 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7278 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7279 noise_profile[i] = noise_profile_S;
7280 noise_profile[i+1] = noise_profile_O;
7281 }
7282 LOGD("noise model entry (S, O) is (%f, %f)",
7283 noise_profile_S, noise_profile_O);
7284 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7285 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7286 }
7287
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007288#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007289 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007290 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007291 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007292 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007293 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7294 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7295 }
7296 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007297#endif
7298
Thierry Strudel3d639192016-09-09 11:52:26 -07007299 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7300 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7301 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7302 }
7303
7304 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7305 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7306 *faceDetectMode);
7307 if (NAME_NOT_FOUND != val) {
7308 uint8_t fwk_faceDetectMode = (uint8_t)val;
7309 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7310
7311 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7312 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7313 CAM_INTF_META_FACE_DETECTION, metadata) {
7314 uint8_t numFaces = MIN(
7315 faceDetectionInfo->num_faces_detected, MAX_ROI);
7316 int32_t faceIds[MAX_ROI];
7317 uint8_t faceScores[MAX_ROI];
7318 int32_t faceRectangles[MAX_ROI * 4];
7319 int32_t faceLandmarks[MAX_ROI * 6];
7320 size_t j = 0, k = 0;
7321
7322 for (size_t i = 0; i < numFaces; i++) {
7323 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7324 // Adjust crop region from sensor output coordinate system to active
7325 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007326 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007327 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7328 rect.width, rect.height);
7329
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007330 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007331
Jason Lee8ce36fa2017-04-19 19:40:37 -07007332 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7333 "bottom-right (%d, %d)",
7334 faceDetectionInfo->frame_id, i,
7335 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7336 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7337
Thierry Strudel3d639192016-09-09 11:52:26 -07007338 j+= 4;
7339 }
7340 if (numFaces <= 0) {
7341 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7342 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7343 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7344 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7345 }
7346
7347 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7348 numFaces);
7349 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7350 faceRectangles, numFaces * 4U);
7351 if (fwk_faceDetectMode ==
7352 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7353 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7354 CAM_INTF_META_FACE_LANDMARK, metadata) {
7355
7356 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007357 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007358 // Map the co-ordinate sensor output coordinate system to active
7359 // array coordinate system.
7360 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007361 face_landmarks.left_eye_center.x,
7362 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007363 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007364 face_landmarks.right_eye_center.x,
7365 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007366 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007367 face_landmarks.mouth_center.x,
7368 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007369
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007370 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007371
7372 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7373 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7374 faceDetectionInfo->frame_id, i,
7375 faceLandmarks[k + LEFT_EYE_X],
7376 faceLandmarks[k + LEFT_EYE_Y],
7377 faceLandmarks[k + RIGHT_EYE_X],
7378 faceLandmarks[k + RIGHT_EYE_Y],
7379 faceLandmarks[k + MOUTH_X],
7380 faceLandmarks[k + MOUTH_Y]);
7381
Thierry Strudel04e026f2016-10-10 11:27:36 -07007382 k+= TOTAL_LANDMARK_INDICES;
7383 }
7384 } else {
7385 for (size_t i = 0; i < numFaces; i++) {
7386 setInvalidLandmarks(faceLandmarks+k);
7387 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007388 }
7389 }
7390
Jason Lee49619db2017-04-13 12:07:22 -07007391 for (size_t i = 0; i < numFaces; i++) {
7392 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7393
7394 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7395 faceDetectionInfo->frame_id, i, faceIds[i]);
7396 }
7397
Thierry Strudel3d639192016-09-09 11:52:26 -07007398 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7399 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7400 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007401 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007402 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7403 CAM_INTF_META_FACE_BLINK, metadata) {
7404 uint8_t detected[MAX_ROI];
7405 uint8_t degree[MAX_ROI * 2];
7406 for (size_t i = 0; i < numFaces; i++) {
7407 detected[i] = blinks->blink[i].blink_detected;
7408 degree[2 * i] = blinks->blink[i].left_blink;
7409 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007410
Jason Lee49619db2017-04-13 12:07:22 -07007411 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7412 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7413 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7414 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007415 }
7416 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7417 detected, numFaces);
7418 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7419 degree, numFaces * 2);
7420 }
7421 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7422 CAM_INTF_META_FACE_SMILE, metadata) {
7423 uint8_t degree[MAX_ROI];
7424 uint8_t confidence[MAX_ROI];
7425 for (size_t i = 0; i < numFaces; i++) {
7426 degree[i] = smiles->smile[i].smile_degree;
7427 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007428
Jason Lee49619db2017-04-13 12:07:22 -07007429 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7430 "smile_degree=%d, smile_score=%d",
7431 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007432 }
7433 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7434 degree, numFaces);
7435 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7436 confidence, numFaces);
7437 }
7438 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7439 CAM_INTF_META_FACE_GAZE, metadata) {
7440 int8_t angle[MAX_ROI];
7441 int32_t direction[MAX_ROI * 3];
7442 int8_t degree[MAX_ROI * 2];
7443 for (size_t i = 0; i < numFaces; i++) {
7444 angle[i] = gazes->gaze[i].gaze_angle;
7445 direction[3 * i] = gazes->gaze[i].updown_dir;
7446 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7447 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7448 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7449 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007450
7451 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7452 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7453 "left_right_gaze=%d, top_bottom_gaze=%d",
7454 faceDetectionInfo->frame_id, i, angle[i],
7455 direction[3 * i], direction[3 * i + 1],
7456 direction[3 * i + 2],
7457 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007458 }
7459 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7460 (uint8_t *)angle, numFaces);
7461 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7462 direction, numFaces * 3);
7463 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7464 (uint8_t *)degree, numFaces * 2);
7465 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007466 }
7467 }
7468 }
7469 }
7470
7471 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7472 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007473 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007474 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007475 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007476
Shuzhen Wang14415f52016-11-16 18:26:18 -08007477 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7478 histogramBins = *histBins;
7479 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7480 }
7481
7482 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007483 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7484 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007485 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007486
7487 switch (stats_data->type) {
7488 case CAM_HISTOGRAM_TYPE_BAYER:
7489 switch (stats_data->bayer_stats.data_type) {
7490 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007491 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7492 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007493 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007494 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7495 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007496 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007497 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7498 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007499 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007500 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007501 case CAM_STATS_CHANNEL_R:
7502 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007503 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7504 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007505 }
7506 break;
7507 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007508 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007509 break;
7510 }
7511
Shuzhen Wang14415f52016-11-16 18:26:18 -08007512 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007513 }
7514 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007515 }
7516
7517 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7518 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7519 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7520 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7521 }
7522
7523 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7524 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7525 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7526 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7527 }
7528
7529 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7530 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7531 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7532 CAM_MAX_SHADING_MAP_HEIGHT);
7533 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7534 CAM_MAX_SHADING_MAP_WIDTH);
7535 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7536 lensShadingMap->lens_shading, 4U * map_width * map_height);
7537 }
7538
7539 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7540 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7541 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7542 }
7543
7544 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7545 //Populate CAM_INTF_META_TONEMAP_CURVES
7546 /* ch0 = G, ch 1 = B, ch 2 = R*/
7547 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7548 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7549 tonemap->tonemap_points_cnt,
7550 CAM_MAX_TONEMAP_CURVE_SIZE);
7551 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7552 }
7553
7554 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7555 &tonemap->curves[0].tonemap_points[0][0],
7556 tonemap->tonemap_points_cnt * 2);
7557
7558 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7559 &tonemap->curves[1].tonemap_points[0][0],
7560 tonemap->tonemap_points_cnt * 2);
7561
7562 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7563 &tonemap->curves[2].tonemap_points[0][0],
7564 tonemap->tonemap_points_cnt * 2);
7565 }
7566
7567 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7568 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7569 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7570 CC_GAIN_MAX);
7571 }
7572
7573 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7574 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7575 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7576 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7577 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7578 }
7579
7580 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7581 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7582 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7583 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7584 toneCurve->tonemap_points_cnt,
7585 CAM_MAX_TONEMAP_CURVE_SIZE);
7586 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7587 }
7588 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7589 (float*)toneCurve->curve.tonemap_points,
7590 toneCurve->tonemap_points_cnt * 2);
7591 }
7592
7593 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7594 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7595 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7596 predColorCorrectionGains->gains, 4);
7597 }
7598
7599 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7600 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7601 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7602 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7603 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7604 }
7605
7606 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7607 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7608 }
7609
7610 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7611 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7612 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7613 }
7614
7615 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7616 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7617 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7618 }
7619
7620 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7621 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7622 *effectMode);
7623 if (NAME_NOT_FOUND != val) {
7624 uint8_t fwk_effectMode = (uint8_t)val;
7625 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7626 }
7627 }
7628
7629 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7630 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7631 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7632 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7633 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7634 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7635 }
7636 int32_t fwk_testPatternData[4];
7637 fwk_testPatternData[0] = testPatternData->r;
7638 fwk_testPatternData[3] = testPatternData->b;
7639 switch (gCamCapability[mCameraId]->color_arrangement) {
7640 case CAM_FILTER_ARRANGEMENT_RGGB:
7641 case CAM_FILTER_ARRANGEMENT_GRBG:
7642 fwk_testPatternData[1] = testPatternData->gr;
7643 fwk_testPatternData[2] = testPatternData->gb;
7644 break;
7645 case CAM_FILTER_ARRANGEMENT_GBRG:
7646 case CAM_FILTER_ARRANGEMENT_BGGR:
7647 fwk_testPatternData[2] = testPatternData->gr;
7648 fwk_testPatternData[1] = testPatternData->gb;
7649 break;
7650 default:
7651 LOGE("color arrangement %d is not supported",
7652 gCamCapability[mCameraId]->color_arrangement);
7653 break;
7654 }
7655 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7656 }
7657
7658 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7659 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7660 }
7661
7662 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7663 String8 str((const char *)gps_methods);
7664 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7665 }
7666
7667 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7668 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7669 }
7670
7671 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7672 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7673 }
7674
7675 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7676 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7677 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7678 }
7679
7680 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7681 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7682 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7683 }
7684
7685 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7686 int32_t fwk_thumb_size[2];
7687 fwk_thumb_size[0] = thumb_size->width;
7688 fwk_thumb_size[1] = thumb_size->height;
7689 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7690 }
7691
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007692 // Skip reprocess metadata if there is no input stream.
7693 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7694 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7695 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7696 privateData,
7697 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7698 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007699 }
7700
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007701 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007702 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007703 meteringMode, 1);
7704 }
7705
Thierry Strudel54dc9782017-02-15 12:12:10 -08007706 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7707 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7708 LOGD("hdr_scene_data: %d %f\n",
7709 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7710 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7711 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7712 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7713 &isHdr, 1);
7714 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7715 &isHdrConfidence, 1);
7716 }
7717
7718
7719
Thierry Strudel3d639192016-09-09 11:52:26 -07007720 if (metadata->is_tuning_params_valid) {
7721 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7722 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7723 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7724
7725
7726 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7727 sizeof(uint32_t));
7728 data += sizeof(uint32_t);
7729
7730 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7731 sizeof(uint32_t));
7732 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7733 data += sizeof(uint32_t);
7734
7735 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7736 sizeof(uint32_t));
7737 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7738 data += sizeof(uint32_t);
7739
7740 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7741 sizeof(uint32_t));
7742 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7743 data += sizeof(uint32_t);
7744
7745 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7746 sizeof(uint32_t));
7747 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7748 data += sizeof(uint32_t);
7749
7750 metadata->tuning_params.tuning_mod3_data_size = 0;
7751 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7752 sizeof(uint32_t));
7753 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7754 data += sizeof(uint32_t);
7755
7756 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7757 TUNING_SENSOR_DATA_MAX);
7758 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7759 count);
7760 data += count;
7761
7762 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7763 TUNING_VFE_DATA_MAX);
7764 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7765 count);
7766 data += count;
7767
7768 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7769 TUNING_CPP_DATA_MAX);
7770 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7771 count);
7772 data += count;
7773
7774 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7775 TUNING_CAC_DATA_MAX);
7776 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7777 count);
7778 data += count;
7779
7780 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7781 (int32_t *)(void *)tuning_meta_data_blob,
7782 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7783 }
7784
7785 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7786 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7787 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7788 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7789 NEUTRAL_COL_POINTS);
7790 }
7791
7792 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7793 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7794 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7795 }
7796
7797 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7798 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7799 // Adjust crop region from sensor output coordinate system to active
7800 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007801 cam_rect_t hAeRect = hAeRegions->rect;
7802 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7803 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007804
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007805 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007806 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7807 REGIONS_TUPLE_COUNT);
7808 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7809 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007810 hAeRect.left, hAeRect.top, hAeRect.width,
7811 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007812 }
7813
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007814 if (!pendingRequest.focusStateSent) {
7815 if (pendingRequest.focusStateValid) {
7816 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7817 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007818 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007819 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7820 uint8_t fwk_afState = (uint8_t) *afState;
7821 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7822 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7823 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007824 }
7825 }
7826
Thierry Strudel3d639192016-09-09 11:52:26 -07007827 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7828 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7829 }
7830
7831 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7832 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7833 }
7834
7835 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7836 uint8_t fwk_lensState = *lensState;
7837 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7838 }
7839
Thierry Strudel3d639192016-09-09 11:52:26 -07007840 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007841 uint32_t ab_mode = *hal_ab_mode;
7842 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7843 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7844 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7845 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007846 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007847 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007848 if (NAME_NOT_FOUND != val) {
7849 uint8_t fwk_ab_mode = (uint8_t)val;
7850 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7851 }
7852 }
7853
7854 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7855 int val = lookupFwkName(SCENE_MODES_MAP,
7856 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7857 if (NAME_NOT_FOUND != val) {
7858 uint8_t fwkBestshotMode = (uint8_t)val;
7859 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7860 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7861 } else {
7862 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7863 }
7864 }
7865
7866 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7867 uint8_t fwk_mode = (uint8_t) *mode;
7868 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7869 }
7870
7871 /* Constant metadata values to be update*/
7872 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7873 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7874
7875 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7876 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7877
7878 int32_t hotPixelMap[2];
7879 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7880
7881 // CDS
7882 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7883 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7884 }
7885
Thierry Strudel04e026f2016-10-10 11:27:36 -07007886 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7887 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007888 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007889 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7890 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7891 } else {
7892 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7893 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007894
7895 if(fwk_hdr != curr_hdr_state) {
7896 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7897 if(fwk_hdr)
7898 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7899 else
7900 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7901 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007902 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7903 }
7904
Thierry Strudel54dc9782017-02-15 12:12:10 -08007905 //binning correction
7906 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7907 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7908 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7909 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7910 }
7911
Thierry Strudel04e026f2016-10-10 11:27:36 -07007912 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007913 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007914 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7915 int8_t is_ir_on = 0;
7916
7917 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7918 if(is_ir_on != curr_ir_state) {
7919 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7920 if(is_ir_on)
7921 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7922 else
7923 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7924 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007925 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007926 }
7927
Thierry Strudel269c81a2016-10-12 12:13:59 -07007928 // AEC SPEED
7929 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7930 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7931 }
7932
7933 // AWB SPEED
7934 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7935 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7936 }
7937
Thierry Strudel3d639192016-09-09 11:52:26 -07007938 // TNR
7939 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7940 uint8_t tnr_enable = tnr->denoise_enable;
7941 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007942 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7943 int8_t is_tnr_on = 0;
7944
7945 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7946 if(is_tnr_on != curr_tnr_state) {
7947 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7948 if(is_tnr_on)
7949 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7950 else
7951 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7952 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007953
7954 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7955 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7956 }
7957
7958 // Reprocess crop data
7959 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7960 uint8_t cnt = crop_data->num_of_streams;
7961 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7962 // mm-qcamera-daemon only posts crop_data for streams
7963 // not linked to pproc. So no valid crop metadata is not
7964 // necessarily an error case.
7965 LOGD("No valid crop metadata entries");
7966 } else {
7967 uint32_t reproc_stream_id;
7968 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7969 LOGD("No reprocessible stream found, ignore crop data");
7970 } else {
7971 int rc = NO_ERROR;
7972 Vector<int32_t> roi_map;
7973 int32_t *crop = new int32_t[cnt*4];
7974 if (NULL == crop) {
7975 rc = NO_MEMORY;
7976 }
7977 if (NO_ERROR == rc) {
7978 int32_t streams_found = 0;
7979 for (size_t i = 0; i < cnt; i++) {
7980 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7981 if (pprocDone) {
7982 // HAL already does internal reprocessing,
7983 // either via reprocessing before JPEG encoding,
7984 // or offline postprocessing for pproc bypass case.
7985 crop[0] = 0;
7986 crop[1] = 0;
7987 crop[2] = mInputStreamInfo.dim.width;
7988 crop[3] = mInputStreamInfo.dim.height;
7989 } else {
7990 crop[0] = crop_data->crop_info[i].crop.left;
7991 crop[1] = crop_data->crop_info[i].crop.top;
7992 crop[2] = crop_data->crop_info[i].crop.width;
7993 crop[3] = crop_data->crop_info[i].crop.height;
7994 }
7995 roi_map.add(crop_data->crop_info[i].roi_map.left);
7996 roi_map.add(crop_data->crop_info[i].roi_map.top);
7997 roi_map.add(crop_data->crop_info[i].roi_map.width);
7998 roi_map.add(crop_data->crop_info[i].roi_map.height);
7999 streams_found++;
8000 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
8001 crop[0], crop[1], crop[2], crop[3]);
8002 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
8003 crop_data->crop_info[i].roi_map.left,
8004 crop_data->crop_info[i].roi_map.top,
8005 crop_data->crop_info[i].roi_map.width,
8006 crop_data->crop_info[i].roi_map.height);
8007 break;
8008
8009 }
8010 }
8011 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
8012 &streams_found, 1);
8013 camMetadata.update(QCAMERA3_CROP_REPROCESS,
8014 crop, (size_t)(streams_found * 4));
8015 if (roi_map.array()) {
8016 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
8017 roi_map.array(), roi_map.size());
8018 }
8019 }
8020 if (crop) {
8021 delete [] crop;
8022 }
8023 }
8024 }
8025 }
8026
8027 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8028 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
8029 // so hardcoding the CAC result to OFF mode.
8030 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8031 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
8032 } else {
8033 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
8034 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8035 *cacMode);
8036 if (NAME_NOT_FOUND != val) {
8037 uint8_t resultCacMode = (uint8_t)val;
8038 // check whether CAC result from CB is equal to Framework set CAC mode
8039 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008040 if (pendingRequest.fwkCacMode != resultCacMode) {
8041 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07008042 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08008043 //Check if CAC is disabled by property
8044 if (m_cacModeDisabled) {
8045 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8046 }
8047
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008048 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07008049 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
8050 } else {
8051 LOGE("Invalid CAC camera parameter: %d", *cacMode);
8052 }
8053 }
8054 }
8055
8056 // Post blob of cam_cds_data through vendor tag.
8057 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8058 uint8_t cnt = cdsInfo->num_of_streams;
8059 cam_cds_data_t cdsDataOverride;
8060 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8061 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8062 cdsDataOverride.num_of_streams = 1;
8063 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8064 uint32_t reproc_stream_id;
8065 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8066 LOGD("No reprocessible stream found, ignore cds data");
8067 } else {
8068 for (size_t i = 0; i < cnt; i++) {
8069 if (cdsInfo->cds_info[i].stream_id ==
8070 reproc_stream_id) {
8071 cdsDataOverride.cds_info[0].cds_enable =
8072 cdsInfo->cds_info[i].cds_enable;
8073 break;
8074 }
8075 }
8076 }
8077 } else {
8078 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8079 }
8080 camMetadata.update(QCAMERA3_CDS_INFO,
8081 (uint8_t *)&cdsDataOverride,
8082 sizeof(cam_cds_data_t));
8083 }
8084
8085 // Ldaf calibration data
8086 if (!mLdafCalibExist) {
8087 IF_META_AVAILABLE(uint32_t, ldafCalib,
8088 CAM_INTF_META_LDAF_EXIF, metadata) {
8089 mLdafCalibExist = true;
8090 mLdafCalib[0] = ldafCalib[0];
8091 mLdafCalib[1] = ldafCalib[1];
8092 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8093 ldafCalib[0], ldafCalib[1]);
8094 }
8095 }
8096
Thierry Strudel54dc9782017-02-15 12:12:10 -08008097 // EXIF debug data through vendor tag
8098 /*
8099 * Mobicat Mask can assume 3 values:
8100 * 1 refers to Mobicat data,
8101 * 2 refers to Stats Debug and Exif Debug Data
8102 * 3 refers to Mobicat and Stats Debug Data
8103 * We want to make sure that we are sending Exif debug data
8104 * only when Mobicat Mask is 2.
8105 */
8106 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8107 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8108 (uint8_t *)(void *)mExifParams.debug_params,
8109 sizeof(mm_jpeg_debug_exif_params_t));
8110 }
8111
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008112 // Reprocess and DDM debug data through vendor tag
8113 cam_reprocess_info_t repro_info;
8114 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008115 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8116 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008117 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008118 }
8119 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8120 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008121 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008122 }
8123 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8124 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008125 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008126 }
8127 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8128 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008129 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008130 }
8131 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8132 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008133 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008134 }
8135 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008136 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008137 }
8138 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8139 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008140 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008141 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008142 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8143 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8144 }
8145 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8146 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8147 }
8148 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8149 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008150
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008151 // INSTANT AEC MODE
8152 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8153 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8154 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8155 }
8156
Shuzhen Wange763e802016-03-31 10:24:29 -07008157 // AF scene change
8158 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8159 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8160 }
8161
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008162 // Enable ZSL
8163 if (enableZsl != nullptr) {
8164 uint8_t value = *enableZsl ?
8165 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8166 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8167 }
8168
Xu Han821ea9c2017-05-23 09:00:40 -07008169 // OIS Data
8170 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8171 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8172 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8173 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8174 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8175 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8176 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8177 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8178 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8179 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8180 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
Xue Tu2c3e9142017-08-18 16:23:52 -07008181 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8182 frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8183 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8184 frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
Xu Han821ea9c2017-05-23 09:00:40 -07008185 }
8186
Thierry Strudel3d639192016-09-09 11:52:26 -07008187 resultMetadata = camMetadata.release();
8188 return resultMetadata;
8189}
8190
8191/*===========================================================================
8192 * FUNCTION : saveExifParams
8193 *
8194 * DESCRIPTION:
8195 *
8196 * PARAMETERS :
8197 * @metadata : metadata information from callback
8198 *
8199 * RETURN : none
8200 *
8201 *==========================================================================*/
8202void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8203{
8204 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8205 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8206 if (mExifParams.debug_params) {
8207 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8208 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8209 }
8210 }
8211 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8212 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8213 if (mExifParams.debug_params) {
8214 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8215 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8216 }
8217 }
8218 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8219 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8220 if (mExifParams.debug_params) {
8221 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8222 mExifParams.debug_params->af_debug_params_valid = TRUE;
8223 }
8224 }
8225 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8226 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8227 if (mExifParams.debug_params) {
8228 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8229 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8230 }
8231 }
8232 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8233 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8234 if (mExifParams.debug_params) {
8235 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8236 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8237 }
8238 }
8239 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8240 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8241 if (mExifParams.debug_params) {
8242 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8243 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8244 }
8245 }
8246 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8247 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8248 if (mExifParams.debug_params) {
8249 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8250 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8251 }
8252 }
8253 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8254 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8255 if (mExifParams.debug_params) {
8256 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8257 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8258 }
8259 }
8260}
8261
8262/*===========================================================================
8263 * FUNCTION : get3AExifParams
8264 *
8265 * DESCRIPTION:
8266 *
8267 * PARAMETERS : none
8268 *
8269 *
8270 * RETURN : mm_jpeg_exif_params_t
8271 *
8272 *==========================================================================*/
8273mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8274{
8275 return mExifParams;
8276}
8277
8278/*===========================================================================
8279 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8280 *
8281 * DESCRIPTION:
8282 *
8283 * PARAMETERS :
8284 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008285 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8286 * urgent metadata in a batch. Always true for
8287 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008288 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008289 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8290 * i.e. even though it doesn't map to a valid partial
8291 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008292 * RETURN : camera_metadata_t*
8293 * metadata in a format specified by fwk
8294 *==========================================================================*/
8295camera_metadata_t*
8296QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008297 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008298 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008299{
8300 CameraMetadata camMetadata;
8301 camera_metadata_t *resultMetadata;
8302
Shuzhen Wang485e2442017-08-02 12:21:08 -07008303 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008304 /* In batch mode, use empty metadata if this is not the last in batch
8305 */
8306 resultMetadata = allocate_camera_metadata(0, 0);
8307 return resultMetadata;
8308 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008309
8310 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8311 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8312 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8313 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8314 }
8315
8316 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8317 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8318 &aecTrigger->trigger, 1);
8319 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8320 &aecTrigger->trigger_id, 1);
8321 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8322 aecTrigger->trigger);
8323 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8324 aecTrigger->trigger_id);
8325 }
8326
8327 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8328 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8329 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8330 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8331 }
8332
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008333 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8334 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8335 if (NAME_NOT_FOUND != val) {
8336 uint8_t fwkAfMode = (uint8_t)val;
8337 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8338 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8339 } else {
8340 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8341 val);
8342 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008343 }
8344
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008345 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8346 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8347 af_trigger->trigger);
8348 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8349 af_trigger->trigger_id);
8350
8351 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8352 mAfTrigger = *af_trigger;
8353 uint32_t fwk_AfState = (uint32_t) *afState;
8354
8355 // If this is the result for a new trigger, check if there is new early
8356 // af state. If there is, use the last af state for all results
8357 // preceding current partial frame number.
8358 for (auto & pendingRequest : mPendingRequestsList) {
8359 if (pendingRequest.frame_number < frame_number) {
8360 pendingRequest.focusStateValid = true;
8361 pendingRequest.focusState = fwk_AfState;
8362 } else if (pendingRequest.frame_number == frame_number) {
8363 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8364 // Check if early AF state for trigger exists. If yes, send AF state as
8365 // partial result for better latency.
8366 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8367 pendingRequest.focusStateSent = true;
8368 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8369 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8370 frame_number, fwkEarlyAfState);
8371 }
8372 }
8373 }
8374 }
8375 }
8376 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8377 &mAfTrigger.trigger, 1);
8378 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8379
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008380 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8381 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008382 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008383 int32_t afRegions[REGIONS_TUPLE_COUNT];
8384 // Adjust crop region from sensor output coordinate system to active
8385 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008386 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8387 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008388
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008389 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008390 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8391 REGIONS_TUPLE_COUNT);
8392 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8393 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008394 hAfRect.left, hAfRect.top, hAfRect.width,
8395 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008396 }
8397
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008398 // AF region confidence
8399 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8400 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8401 }
8402
Thierry Strudel3d639192016-09-09 11:52:26 -07008403 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8404 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8405 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8406 if (NAME_NOT_FOUND != val) {
8407 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8408 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8409 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8410 } else {
8411 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8412 }
8413 }
8414
8415 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8416 uint32_t aeMode = CAM_AE_MODE_MAX;
8417 int32_t flashMode = CAM_FLASH_MODE_MAX;
8418 int32_t redeye = -1;
8419 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8420 aeMode = *pAeMode;
8421 }
8422 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8423 flashMode = *pFlashMode;
8424 }
8425 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8426 redeye = *pRedeye;
8427 }
8428
8429 if (1 == redeye) {
8430 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8431 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8432 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8433 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8434 flashMode);
8435 if (NAME_NOT_FOUND != val) {
8436 fwk_aeMode = (uint8_t)val;
8437 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8438 } else {
8439 LOGE("Unsupported flash mode %d", flashMode);
8440 }
8441 } else if (aeMode == CAM_AE_MODE_ON) {
8442 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8443 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8444 } else if (aeMode == CAM_AE_MODE_OFF) {
8445 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8446 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008447 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8448 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8449 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008450 } else {
8451 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8452 "flashMode:%d, aeMode:%u!!!",
8453 redeye, flashMode, aeMode);
8454 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008455 if (mInstantAEC) {
8456 // Increment frame Idx count untill a bound reached for instant AEC.
8457 mInstantAecFrameIdxCount++;
8458 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8459 CAM_INTF_META_AEC_INFO, metadata) {
8460 LOGH("ae_params->settled = %d",ae_params->settled);
8461 // If AEC settled, or if number of frames reached bound value,
8462 // should reset instant AEC.
8463 if (ae_params->settled ||
8464 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8465 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8466 mInstantAEC = false;
8467 mResetInstantAEC = true;
8468 mInstantAecFrameIdxCount = 0;
8469 }
8470 }
8471 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008472 resultMetadata = camMetadata.release();
8473 return resultMetadata;
8474}
8475
8476/*===========================================================================
8477 * FUNCTION : dumpMetadataToFile
8478 *
8479 * DESCRIPTION: Dumps tuning metadata to file system
8480 *
8481 * PARAMETERS :
8482 * @meta : tuning metadata
8483 * @dumpFrameCount : current dump frame count
8484 * @enabled : Enable mask
8485 *
8486 *==========================================================================*/
8487void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8488 uint32_t &dumpFrameCount,
8489 bool enabled,
8490 const char *type,
8491 uint32_t frameNumber)
8492{
8493 //Some sanity checks
8494 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8495 LOGE("Tuning sensor data size bigger than expected %d: %d",
8496 meta.tuning_sensor_data_size,
8497 TUNING_SENSOR_DATA_MAX);
8498 return;
8499 }
8500
8501 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8502 LOGE("Tuning VFE data size bigger than expected %d: %d",
8503 meta.tuning_vfe_data_size,
8504 TUNING_VFE_DATA_MAX);
8505 return;
8506 }
8507
8508 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8509 LOGE("Tuning CPP data size bigger than expected %d: %d",
8510 meta.tuning_cpp_data_size,
8511 TUNING_CPP_DATA_MAX);
8512 return;
8513 }
8514
8515 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8516 LOGE("Tuning CAC data size bigger than expected %d: %d",
8517 meta.tuning_cac_data_size,
8518 TUNING_CAC_DATA_MAX);
8519 return;
8520 }
8521 //
8522
8523 if(enabled){
8524 char timeBuf[FILENAME_MAX];
8525 char buf[FILENAME_MAX];
8526 memset(buf, 0, sizeof(buf));
8527 memset(timeBuf, 0, sizeof(timeBuf));
8528 time_t current_time;
8529 struct tm * timeinfo;
8530 time (&current_time);
8531 timeinfo = localtime (&current_time);
8532 if (timeinfo != NULL) {
8533 strftime (timeBuf, sizeof(timeBuf),
8534 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8535 }
8536 String8 filePath(timeBuf);
8537 snprintf(buf,
8538 sizeof(buf),
8539 "%dm_%s_%d.bin",
8540 dumpFrameCount,
8541 type,
8542 frameNumber);
8543 filePath.append(buf);
8544 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8545 if (file_fd >= 0) {
8546 ssize_t written_len = 0;
8547 meta.tuning_data_version = TUNING_DATA_VERSION;
8548 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8549 written_len += write(file_fd, data, sizeof(uint32_t));
8550 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8551 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8552 written_len += write(file_fd, data, sizeof(uint32_t));
8553 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8554 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8555 written_len += write(file_fd, data, sizeof(uint32_t));
8556 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8557 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8558 written_len += write(file_fd, data, sizeof(uint32_t));
8559 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8560 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8561 written_len += write(file_fd, data, sizeof(uint32_t));
8562 meta.tuning_mod3_data_size = 0;
8563 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8564 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8565 written_len += write(file_fd, data, sizeof(uint32_t));
8566 size_t total_size = meta.tuning_sensor_data_size;
8567 data = (void *)((uint8_t *)&meta.data);
8568 written_len += write(file_fd, data, total_size);
8569 total_size = meta.tuning_vfe_data_size;
8570 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8571 written_len += write(file_fd, data, total_size);
8572 total_size = meta.tuning_cpp_data_size;
8573 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8574 written_len += write(file_fd, data, total_size);
8575 total_size = meta.tuning_cac_data_size;
8576 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8577 written_len += write(file_fd, data, total_size);
8578 close(file_fd);
8579 }else {
8580 LOGE("fail to open file for metadata dumping");
8581 }
8582 }
8583}
8584
8585/*===========================================================================
8586 * FUNCTION : cleanAndSortStreamInfo
8587 *
8588 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8589 * and sort them such that raw stream is at the end of the list
8590 * This is a workaround for camera daemon constraint.
8591 *
8592 * PARAMETERS : None
8593 *
8594 *==========================================================================*/
8595void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8596{
8597 List<stream_info_t *> newStreamInfo;
8598
8599 /*clean up invalid streams*/
8600 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8601 it != mStreamInfo.end();) {
8602 if(((*it)->status) == INVALID){
8603 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8604 delete channel;
8605 free(*it);
8606 it = mStreamInfo.erase(it);
8607 } else {
8608 it++;
8609 }
8610 }
8611
8612 // Move preview/video/callback/snapshot streams into newList
8613 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8614 it != mStreamInfo.end();) {
8615 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8616 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8617 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8618 newStreamInfo.push_back(*it);
8619 it = mStreamInfo.erase(it);
8620 } else
8621 it++;
8622 }
8623 // Move raw streams into newList
8624 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8625 it != mStreamInfo.end();) {
8626 newStreamInfo.push_back(*it);
8627 it = mStreamInfo.erase(it);
8628 }
8629
8630 mStreamInfo = newStreamInfo;
8631}
8632
8633/*===========================================================================
8634 * FUNCTION : extractJpegMetadata
8635 *
8636 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8637 * JPEG metadata is cached in HAL, and return as part of capture
8638 * result when metadata is returned from camera daemon.
8639 *
8640 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8641 * @request: capture request
8642 *
8643 *==========================================================================*/
8644void QCamera3HardwareInterface::extractJpegMetadata(
8645 CameraMetadata& jpegMetadata,
8646 const camera3_capture_request_t *request)
8647{
8648 CameraMetadata frame_settings;
8649 frame_settings = request->settings;
8650
8651 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8652 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8653 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8654 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8655
8656 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8657 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8658 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8659 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8660
8661 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8662 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8663 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8664 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8665
8666 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8667 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8668 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8669 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8670
8671 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8672 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8673 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8674 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8675
8676 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8677 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8678 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8679 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8680
8681 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8682 int32_t thumbnail_size[2];
8683 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8684 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8685 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8686 int32_t orientation =
8687 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008688 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008689 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8690 int32_t temp;
8691 temp = thumbnail_size[0];
8692 thumbnail_size[0] = thumbnail_size[1];
8693 thumbnail_size[1] = temp;
8694 }
8695 }
8696 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8697 thumbnail_size,
8698 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8699 }
8700
8701}
8702
8703/*===========================================================================
8704 * FUNCTION : convertToRegions
8705 *
8706 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8707 *
8708 * PARAMETERS :
8709 * @rect : cam_rect_t struct to convert
8710 * @region : int32_t destination array
8711 * @weight : if we are converting from cam_area_t, weight is valid
8712 * else weight = -1
8713 *
8714 *==========================================================================*/
8715void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8716 int32_t *region, int weight)
8717{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008718 region[FACE_LEFT] = rect.left;
8719 region[FACE_TOP] = rect.top;
8720 region[FACE_RIGHT] = rect.left + rect.width;
8721 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008722 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008723 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008724 }
8725}
8726
8727/*===========================================================================
8728 * FUNCTION : convertFromRegions
8729 *
8730 * DESCRIPTION: helper method to convert from array to cam_rect_t
8731 *
8732 * PARAMETERS :
8733 * @rect : cam_rect_t struct to convert
8734 * @region : int32_t destination array
8735 * @weight : if we are converting from cam_area_t, weight is valid
8736 * else weight = -1
8737 *
8738 *==========================================================================*/
8739void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008740 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008741{
Thierry Strudel3d639192016-09-09 11:52:26 -07008742 int32_t x_min = frame_settings.find(tag).data.i32[0];
8743 int32_t y_min = frame_settings.find(tag).data.i32[1];
8744 int32_t x_max = frame_settings.find(tag).data.i32[2];
8745 int32_t y_max = frame_settings.find(tag).data.i32[3];
8746 roi.weight = frame_settings.find(tag).data.i32[4];
8747 roi.rect.left = x_min;
8748 roi.rect.top = y_min;
8749 roi.rect.width = x_max - x_min;
8750 roi.rect.height = y_max - y_min;
8751}
8752
8753/*===========================================================================
8754 * FUNCTION : resetIfNeededROI
8755 *
8756 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8757 * crop region
8758 *
8759 * PARAMETERS :
8760 * @roi : cam_area_t struct to resize
8761 * @scalerCropRegion : cam_crop_region_t region to compare against
8762 *
8763 *
8764 *==========================================================================*/
8765bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8766 const cam_crop_region_t* scalerCropRegion)
8767{
8768 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8769 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8770 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8771 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8772
8773 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8774 * without having this check the calculations below to validate if the roi
8775 * is inside scalar crop region will fail resulting in the roi not being
8776 * reset causing algorithm to continue to use stale roi window
8777 */
8778 if (roi->weight == 0) {
8779 return true;
8780 }
8781
8782 if ((roi_x_max < scalerCropRegion->left) ||
8783 // right edge of roi window is left of scalar crop's left edge
8784 (roi_y_max < scalerCropRegion->top) ||
8785 // bottom edge of roi window is above scalar crop's top edge
8786 (roi->rect.left > crop_x_max) ||
8787 // left edge of roi window is beyond(right) of scalar crop's right edge
8788 (roi->rect.top > crop_y_max)){
8789 // top edge of roi windo is above scalar crop's top edge
8790 return false;
8791 }
8792 if (roi->rect.left < scalerCropRegion->left) {
8793 roi->rect.left = scalerCropRegion->left;
8794 }
8795 if (roi->rect.top < scalerCropRegion->top) {
8796 roi->rect.top = scalerCropRegion->top;
8797 }
8798 if (roi_x_max > crop_x_max) {
8799 roi_x_max = crop_x_max;
8800 }
8801 if (roi_y_max > crop_y_max) {
8802 roi_y_max = crop_y_max;
8803 }
8804 roi->rect.width = roi_x_max - roi->rect.left;
8805 roi->rect.height = roi_y_max - roi->rect.top;
8806 return true;
8807}
8808
8809/*===========================================================================
8810 * FUNCTION : convertLandmarks
8811 *
8812 * DESCRIPTION: helper method to extract the landmarks from face detection info
8813 *
8814 * PARAMETERS :
8815 * @landmark_data : input landmark data to be converted
8816 * @landmarks : int32_t destination array
8817 *
8818 *
8819 *==========================================================================*/
8820void QCamera3HardwareInterface::convertLandmarks(
8821 cam_face_landmarks_info_t landmark_data,
8822 int32_t *landmarks)
8823{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008824 if (landmark_data.is_left_eye_valid) {
8825 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8826 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8827 } else {
8828 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8829 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8830 }
8831
8832 if (landmark_data.is_right_eye_valid) {
8833 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8834 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8835 } else {
8836 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8837 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8838 }
8839
8840 if (landmark_data.is_mouth_valid) {
8841 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8842 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8843 } else {
8844 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8845 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8846 }
8847}
8848
8849/*===========================================================================
8850 * FUNCTION : setInvalidLandmarks
8851 *
8852 * DESCRIPTION: helper method to set invalid landmarks
8853 *
8854 * PARAMETERS :
8855 * @landmarks : int32_t destination array
8856 *
8857 *
8858 *==========================================================================*/
8859void QCamera3HardwareInterface::setInvalidLandmarks(
8860 int32_t *landmarks)
8861{
8862 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8863 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8864 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8865 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8866 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8867 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008868}
8869
8870#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008871
8872/*===========================================================================
8873 * FUNCTION : getCapabilities
8874 *
8875 * DESCRIPTION: query camera capability from back-end
8876 *
8877 * PARAMETERS :
8878 * @ops : mm-interface ops structure
8879 * @cam_handle : camera handle for which we need capability
8880 *
8881 * RETURN : ptr type of capability structure
8882 * capability for success
8883 * NULL for failure
8884 *==========================================================================*/
8885cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8886 uint32_t cam_handle)
8887{
8888 int rc = NO_ERROR;
8889 QCamera3HeapMemory *capabilityHeap = NULL;
8890 cam_capability_t *cap_ptr = NULL;
8891
8892 if (ops == NULL) {
8893 LOGE("Invalid arguments");
8894 return NULL;
8895 }
8896
8897 capabilityHeap = new QCamera3HeapMemory(1);
8898 if (capabilityHeap == NULL) {
8899 LOGE("creation of capabilityHeap failed");
8900 return NULL;
8901 }
8902
8903 /* Allocate memory for capability buffer */
8904 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8905 if(rc != OK) {
8906 LOGE("No memory for cappability");
8907 goto allocate_failed;
8908 }
8909
8910 /* Map memory for capability buffer */
8911 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8912
8913 rc = ops->map_buf(cam_handle,
8914 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8915 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8916 if(rc < 0) {
8917 LOGE("failed to map capability buffer");
8918 rc = FAILED_TRANSACTION;
8919 goto map_failed;
8920 }
8921
8922 /* Query Capability */
8923 rc = ops->query_capability(cam_handle);
8924 if(rc < 0) {
8925 LOGE("failed to query capability");
8926 rc = FAILED_TRANSACTION;
8927 goto query_failed;
8928 }
8929
8930 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8931 if (cap_ptr == NULL) {
8932 LOGE("out of memory");
8933 rc = NO_MEMORY;
8934 goto query_failed;
8935 }
8936
8937 memset(cap_ptr, 0, sizeof(cam_capability_t));
8938 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8939
8940 int index;
8941 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8942 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8943 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8944 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8945 }
8946
8947query_failed:
8948 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8949map_failed:
8950 capabilityHeap->deallocate();
8951allocate_failed:
8952 delete capabilityHeap;
8953
8954 if (rc != NO_ERROR) {
8955 return NULL;
8956 } else {
8957 return cap_ptr;
8958 }
8959}
8960
Thierry Strudel3d639192016-09-09 11:52:26 -07008961/*===========================================================================
8962 * FUNCTION : initCapabilities
8963 *
8964 * DESCRIPTION: initialize camera capabilities in static data struct
8965 *
8966 * PARAMETERS :
8967 * @cameraId : camera Id
8968 *
8969 * RETURN : int32_t type of status
8970 * NO_ERROR -- success
8971 * none-zero failure code
8972 *==========================================================================*/
8973int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8974{
8975 int rc = 0;
8976 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008977 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008978
8979 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8980 if (rc) {
8981 LOGE("camera_open failed. rc = %d", rc);
8982 goto open_failed;
8983 }
8984 if (!cameraHandle) {
8985 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8986 goto open_failed;
8987 }
8988
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008989 handle = get_main_camera_handle(cameraHandle->camera_handle);
8990 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8991 if (gCamCapability[cameraId] == NULL) {
8992 rc = FAILED_TRANSACTION;
8993 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008994 }
8995
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008996 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008997 if (is_dual_camera_by_idx(cameraId)) {
8998 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8999 gCamCapability[cameraId]->aux_cam_cap =
9000 getCapabilities(cameraHandle->ops, handle);
9001 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
9002 rc = FAILED_TRANSACTION;
9003 free(gCamCapability[cameraId]);
9004 goto failed_op;
9005 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009006
9007 // Copy the main camera capability to main_cam_cap struct
9008 gCamCapability[cameraId]->main_cam_cap =
9009 (cam_capability_t *)malloc(sizeof(cam_capability_t));
9010 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
9011 LOGE("out of memory");
9012 rc = NO_MEMORY;
9013 goto failed_op;
9014 }
9015 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
9016 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07009017 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009018failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07009019 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
9020 cameraHandle = NULL;
9021open_failed:
9022 return rc;
9023}
9024
9025/*==========================================================================
9026 * FUNCTION : get3Aversion
9027 *
9028 * DESCRIPTION: get the Q3A S/W version
9029 *
9030 * PARAMETERS :
9031 * @sw_version: Reference of Q3A structure which will hold version info upon
9032 * return
9033 *
9034 * RETURN : None
9035 *
9036 *==========================================================================*/
9037void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
9038{
9039 if(gCamCapability[mCameraId])
9040 sw_version = gCamCapability[mCameraId]->q3a_version;
9041 else
9042 LOGE("Capability structure NULL!");
9043}
9044
9045
9046/*===========================================================================
9047 * FUNCTION : initParameters
9048 *
9049 * DESCRIPTION: initialize camera parameters
9050 *
9051 * PARAMETERS :
9052 *
9053 * RETURN : int32_t type of status
9054 * NO_ERROR -- success
9055 * none-zero failure code
9056 *==========================================================================*/
9057int QCamera3HardwareInterface::initParameters()
9058{
9059 int rc = 0;
9060
9061 //Allocate Set Param Buffer
9062 mParamHeap = new QCamera3HeapMemory(1);
9063 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9064 if(rc != OK) {
9065 rc = NO_MEMORY;
9066 LOGE("Failed to allocate SETPARM Heap memory");
9067 delete mParamHeap;
9068 mParamHeap = NULL;
9069 return rc;
9070 }
9071
9072 //Map memory for parameters buffer
9073 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9074 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9075 mParamHeap->getFd(0),
9076 sizeof(metadata_buffer_t),
9077 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9078 if(rc < 0) {
9079 LOGE("failed to map SETPARM buffer");
9080 rc = FAILED_TRANSACTION;
9081 mParamHeap->deallocate();
9082 delete mParamHeap;
9083 mParamHeap = NULL;
9084 return rc;
9085 }
9086
9087 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9088
9089 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9090 return rc;
9091}
9092
9093/*===========================================================================
9094 * FUNCTION : deinitParameters
9095 *
9096 * DESCRIPTION: de-initialize camera parameters
9097 *
9098 * PARAMETERS :
9099 *
9100 * RETURN : NONE
9101 *==========================================================================*/
9102void QCamera3HardwareInterface::deinitParameters()
9103{
9104 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9105 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9106
9107 mParamHeap->deallocate();
9108 delete mParamHeap;
9109 mParamHeap = NULL;
9110
9111 mParameters = NULL;
9112
9113 free(mPrevParameters);
9114 mPrevParameters = NULL;
9115}
9116
9117/*===========================================================================
9118 * FUNCTION : calcMaxJpegSize
9119 *
9120 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9121 *
9122 * PARAMETERS :
9123 *
9124 * RETURN : max_jpeg_size
9125 *==========================================================================*/
9126size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9127{
9128 size_t max_jpeg_size = 0;
9129 size_t temp_width, temp_height;
9130 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9131 MAX_SIZES_CNT);
9132 for (size_t i = 0; i < count; i++) {
9133 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9134 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9135 if (temp_width * temp_height > max_jpeg_size ) {
9136 max_jpeg_size = temp_width * temp_height;
9137 }
9138 }
9139 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9140 return max_jpeg_size;
9141}
9142
9143/*===========================================================================
9144 * FUNCTION : getMaxRawSize
9145 *
9146 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9147 *
9148 * PARAMETERS :
9149 *
9150 * RETURN : Largest supported Raw Dimension
9151 *==========================================================================*/
9152cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9153{
9154 int max_width = 0;
9155 cam_dimension_t maxRawSize;
9156
9157 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9158 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9159 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9160 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9161 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9162 }
9163 }
9164 return maxRawSize;
9165}
9166
9167
9168/*===========================================================================
9169 * FUNCTION : calcMaxJpegDim
9170 *
9171 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9172 *
9173 * PARAMETERS :
9174 *
9175 * RETURN : max_jpeg_dim
9176 *==========================================================================*/
9177cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9178{
9179 cam_dimension_t max_jpeg_dim;
9180 cam_dimension_t curr_jpeg_dim;
9181 max_jpeg_dim.width = 0;
9182 max_jpeg_dim.height = 0;
9183 curr_jpeg_dim.width = 0;
9184 curr_jpeg_dim.height = 0;
9185 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9186 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9187 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9188 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9189 max_jpeg_dim.width * max_jpeg_dim.height ) {
9190 max_jpeg_dim.width = curr_jpeg_dim.width;
9191 max_jpeg_dim.height = curr_jpeg_dim.height;
9192 }
9193 }
9194 return max_jpeg_dim;
9195}
9196
9197/*===========================================================================
9198 * FUNCTION : addStreamConfig
9199 *
9200 * DESCRIPTION: adds the stream configuration to the array
9201 *
9202 * PARAMETERS :
9203 * @available_stream_configs : pointer to stream configuration array
9204 * @scalar_format : scalar format
9205 * @dim : configuration dimension
9206 * @config_type : input or output configuration type
9207 *
9208 * RETURN : NONE
9209 *==========================================================================*/
9210void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9211 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9212{
9213 available_stream_configs.add(scalar_format);
9214 available_stream_configs.add(dim.width);
9215 available_stream_configs.add(dim.height);
9216 available_stream_configs.add(config_type);
9217}
9218
9219/*===========================================================================
9220 * FUNCTION : suppportBurstCapture
9221 *
9222 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9223 *
9224 * PARAMETERS :
9225 * @cameraId : camera Id
9226 *
9227 * RETURN : true if camera supports BURST_CAPTURE
9228 * false otherwise
9229 *==========================================================================*/
9230bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9231{
9232 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9233 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9234 const int32_t highResWidth = 3264;
9235 const int32_t highResHeight = 2448;
9236
9237 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9238 // Maximum resolution images cannot be captured at >= 10fps
9239 // -> not supporting BURST_CAPTURE
9240 return false;
9241 }
9242
9243 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9244 // Maximum resolution images can be captured at >= 20fps
9245 // --> supporting BURST_CAPTURE
9246 return true;
9247 }
9248
9249 // Find the smallest highRes resolution, or largest resolution if there is none
9250 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9251 MAX_SIZES_CNT);
9252 size_t highRes = 0;
9253 while ((highRes + 1 < totalCnt) &&
9254 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9255 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9256 highResWidth * highResHeight)) {
9257 highRes++;
9258 }
9259 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9260 return true;
9261 } else {
9262 return false;
9263 }
9264}
9265
9266/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009267 * FUNCTION : getPDStatIndex
9268 *
9269 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9270 *
9271 * PARAMETERS :
9272 * @caps : camera capabilities
9273 *
9274 * RETURN : int32_t type
9275 * non-negative - on success
9276 * -1 - on failure
9277 *==========================================================================*/
9278int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9279 if (nullptr == caps) {
9280 return -1;
9281 }
9282
9283 uint32_t metaRawCount = caps->meta_raw_channel_count;
9284 int32_t ret = -1;
9285 for (size_t i = 0; i < metaRawCount; i++) {
9286 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9287 ret = i;
9288 break;
9289 }
9290 }
9291
9292 return ret;
9293}
9294
9295/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009296 * FUNCTION : initStaticMetadata
9297 *
9298 * DESCRIPTION: initialize the static metadata
9299 *
9300 * PARAMETERS :
9301 * @cameraId : camera Id
9302 *
9303 * RETURN : int32_t type of status
9304 * 0 -- success
9305 * non-zero failure code
9306 *==========================================================================*/
9307int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9308{
9309 int rc = 0;
9310 CameraMetadata staticInfo;
9311 size_t count = 0;
9312 bool limitedDevice = false;
9313 char prop[PROPERTY_VALUE_MAX];
9314 bool supportBurst = false;
9315
9316 supportBurst = supportBurstCapture(cameraId);
9317
9318 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9319 * guaranteed or if min fps of max resolution is less than 20 fps, its
9320 * advertised as limited device*/
9321 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9322 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9323 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9324 !supportBurst;
9325
9326 uint8_t supportedHwLvl = limitedDevice ?
9327 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009328#ifndef USE_HAL_3_3
9329 // LEVEL_3 - This device will support level 3.
9330 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9331#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009332 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009333#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009334
9335 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9336 &supportedHwLvl, 1);
9337
9338 bool facingBack = false;
9339 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9340 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9341 facingBack = true;
9342 }
9343 /*HAL 3 only*/
9344 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9345 &gCamCapability[cameraId]->min_focus_distance, 1);
9346
9347 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9348 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9349
9350 /*should be using focal lengths but sensor doesn't provide that info now*/
9351 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9352 &gCamCapability[cameraId]->focal_length,
9353 1);
9354
9355 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9356 gCamCapability[cameraId]->apertures,
9357 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9358
9359 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9360 gCamCapability[cameraId]->filter_densities,
9361 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9362
9363
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009364 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9365 size_t mode_count =
9366 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9367 for (size_t i = 0; i < mode_count; i++) {
9368 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9369 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009370 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009371 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009372
9373 int32_t lens_shading_map_size[] = {
9374 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9375 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9376 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9377 lens_shading_map_size,
9378 sizeof(lens_shading_map_size)/sizeof(int32_t));
9379
9380 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9381 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9382
9383 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9384 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9385
9386 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9387 &gCamCapability[cameraId]->max_frame_duration, 1);
9388
9389 camera_metadata_rational baseGainFactor = {
9390 gCamCapability[cameraId]->base_gain_factor.numerator,
9391 gCamCapability[cameraId]->base_gain_factor.denominator};
9392 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9393 &baseGainFactor, 1);
9394
9395 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9396 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9397
9398 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9399 gCamCapability[cameraId]->pixel_array_size.height};
9400 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9401 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9402
9403 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9404 gCamCapability[cameraId]->active_array_size.top,
9405 gCamCapability[cameraId]->active_array_size.width,
9406 gCamCapability[cameraId]->active_array_size.height};
9407 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9408 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9409
9410 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9411 &gCamCapability[cameraId]->white_level, 1);
9412
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009413 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9414 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9415 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009416 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009417 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009418
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009419#ifndef USE_HAL_3_3
9420 bool hasBlackRegions = false;
9421 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9422 LOGW("black_region_count: %d is bounded to %d",
9423 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9424 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9425 }
9426 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9427 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9428 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9429 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9430 }
9431 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9432 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9433 hasBlackRegions = true;
9434 }
9435#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009436 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9437 &gCamCapability[cameraId]->flash_charge_duration, 1);
9438
9439 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9440 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9441
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009442 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9443 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9444 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009445 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9446 &timestampSource, 1);
9447
Thierry Strudel54dc9782017-02-15 12:12:10 -08009448 //update histogram vendor data
9449 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009450 &gCamCapability[cameraId]->histogram_size, 1);
9451
Thierry Strudel54dc9782017-02-15 12:12:10 -08009452 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009453 &gCamCapability[cameraId]->max_histogram_count, 1);
9454
Shuzhen Wang14415f52016-11-16 18:26:18 -08009455 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9456 //so that app can request fewer number of bins than the maximum supported.
9457 std::vector<int32_t> histBins;
9458 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9459 histBins.push_back(maxHistBins);
9460 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9461 (maxHistBins & 0x1) == 0) {
9462 histBins.push_back(maxHistBins >> 1);
9463 maxHistBins >>= 1;
9464 }
9465 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9466 histBins.data(), histBins.size());
9467
Thierry Strudel3d639192016-09-09 11:52:26 -07009468 int32_t sharpness_map_size[] = {
9469 gCamCapability[cameraId]->sharpness_map_size.width,
9470 gCamCapability[cameraId]->sharpness_map_size.height};
9471
9472 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9473 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9474
9475 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9476 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9477
Emilian Peev0f3c3162017-03-15 12:57:46 +00009478 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9479 if (0 <= indexPD) {
9480 // Advertise PD stats data as part of the Depth capabilities
9481 int32_t depthWidth =
9482 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9483 int32_t depthHeight =
9484 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009485 int32_t depthStride =
9486 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009487 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9488 assert(0 < depthSamplesCount);
9489 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9490 &depthSamplesCount, 1);
9491
9492 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9493 depthHeight,
9494 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9495 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9496 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9497 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9498 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9499
9500 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9501 depthHeight, 33333333,
9502 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9503 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9504 depthMinDuration,
9505 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9506
9507 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9508 depthHeight, 0,
9509 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9510 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9511 depthStallDuration,
9512 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9513
9514 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9515 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009516
9517 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9518 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9519 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009520 }
9521
Thierry Strudel3d639192016-09-09 11:52:26 -07009522 int32_t scalar_formats[] = {
9523 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9524 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9525 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9526 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9527 HAL_PIXEL_FORMAT_RAW10,
9528 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009529 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9530 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9531 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009532
9533 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9534 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9535 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9536 count, MAX_SIZES_CNT, available_processed_sizes);
9537 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9538 available_processed_sizes, count * 2);
9539
9540 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9541 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9542 makeTable(gCamCapability[cameraId]->raw_dim,
9543 count, MAX_SIZES_CNT, available_raw_sizes);
9544 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9545 available_raw_sizes, count * 2);
9546
9547 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9548 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9549 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9550 count, MAX_SIZES_CNT, available_fps_ranges);
9551 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9552 available_fps_ranges, count * 2);
9553
9554 camera_metadata_rational exposureCompensationStep = {
9555 gCamCapability[cameraId]->exp_compensation_step.numerator,
9556 gCamCapability[cameraId]->exp_compensation_step.denominator};
9557 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9558 &exposureCompensationStep, 1);
9559
9560 Vector<uint8_t> availableVstabModes;
9561 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9562 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009563 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009564 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009565 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009566 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009567 count = IS_TYPE_MAX;
9568 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9569 for (size_t i = 0; i < count; i++) {
9570 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9571 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9572 eisSupported = true;
9573 break;
9574 }
9575 }
9576 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009577 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9578 }
9579 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9580 availableVstabModes.array(), availableVstabModes.size());
9581
9582 /*HAL 1 and HAL 3 common*/
9583 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9584 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9585 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009586 // Cap the max zoom to the max preferred value
9587 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009588 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9589 &maxZoom, 1);
9590
9591 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9592 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9593
9594 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9595 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9596 max3aRegions[2] = 0; /* AF not supported */
9597 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9598 max3aRegions, 3);
9599
9600 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9601 memset(prop, 0, sizeof(prop));
9602 property_get("persist.camera.facedetect", prop, "1");
9603 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9604 LOGD("Support face detection mode: %d",
9605 supportedFaceDetectMode);
9606
9607 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009608 /* support mode should be OFF if max number of face is 0 */
9609 if (maxFaces <= 0) {
9610 supportedFaceDetectMode = 0;
9611 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009612 Vector<uint8_t> availableFaceDetectModes;
9613 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9614 if (supportedFaceDetectMode == 1) {
9615 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9616 } else if (supportedFaceDetectMode == 2) {
9617 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9618 } else if (supportedFaceDetectMode == 3) {
9619 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9620 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9621 } else {
9622 maxFaces = 0;
9623 }
9624 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9625 availableFaceDetectModes.array(),
9626 availableFaceDetectModes.size());
9627 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9628 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009629 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9630 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9631 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009632
9633 int32_t exposureCompensationRange[] = {
9634 gCamCapability[cameraId]->exposure_compensation_min,
9635 gCamCapability[cameraId]->exposure_compensation_max};
9636 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9637 exposureCompensationRange,
9638 sizeof(exposureCompensationRange)/sizeof(int32_t));
9639
9640 uint8_t lensFacing = (facingBack) ?
9641 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9642 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9643
9644 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9645 available_thumbnail_sizes,
9646 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9647
9648 /*all sizes will be clubbed into this tag*/
9649 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9650 /*android.scaler.availableStreamConfigurations*/
9651 Vector<int32_t> available_stream_configs;
9652 cam_dimension_t active_array_dim;
9653 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9654 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009655
9656 /*advertise list of input dimensions supported based on below property.
9657 By default all sizes upto 5MP will be advertised.
9658 Note that the setprop resolution format should be WxH.
9659 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9660 To list all supported sizes, setprop needs to be set with "0x0" */
9661 cam_dimension_t minInputSize = {2592,1944}; //5MP
9662 memset(prop, 0, sizeof(prop));
9663 property_get("persist.camera.input.minsize", prop, "2592x1944");
9664 if (strlen(prop) > 0) {
9665 char *saveptr = NULL;
9666 char *token = strtok_r(prop, "x", &saveptr);
9667 if (token != NULL) {
9668 minInputSize.width = atoi(token);
9669 }
9670 token = strtok_r(NULL, "x", &saveptr);
9671 if (token != NULL) {
9672 minInputSize.height = atoi(token);
9673 }
9674 }
9675
Thierry Strudel3d639192016-09-09 11:52:26 -07009676 /* Add input/output stream configurations for each scalar formats*/
9677 for (size_t j = 0; j < scalar_formats_count; j++) {
9678 switch (scalar_formats[j]) {
9679 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9680 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9681 case HAL_PIXEL_FORMAT_RAW10:
9682 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9683 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9684 addStreamConfig(available_stream_configs, scalar_formats[j],
9685 gCamCapability[cameraId]->raw_dim[i],
9686 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9687 }
9688 break;
9689 case HAL_PIXEL_FORMAT_BLOB:
9690 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9691 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9692 addStreamConfig(available_stream_configs, scalar_formats[j],
9693 gCamCapability[cameraId]->picture_sizes_tbl[i],
9694 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9695 }
9696 break;
9697 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9698 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9699 default:
9700 cam_dimension_t largest_picture_size;
9701 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9702 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9703 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9704 addStreamConfig(available_stream_configs, scalar_formats[j],
9705 gCamCapability[cameraId]->picture_sizes_tbl[i],
9706 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009707 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009708 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9709 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009710 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9711 >= minInputSize.width) || (gCamCapability[cameraId]->
9712 picture_sizes_tbl[i].height >= minInputSize.height)) {
9713 addStreamConfig(available_stream_configs, scalar_formats[j],
9714 gCamCapability[cameraId]->picture_sizes_tbl[i],
9715 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9716 }
9717 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009718 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009719
Thierry Strudel3d639192016-09-09 11:52:26 -07009720 break;
9721 }
9722 }
9723
9724 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9725 available_stream_configs.array(), available_stream_configs.size());
9726 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9727 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9728
9729 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9730 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9731
9732 /* android.scaler.availableMinFrameDurations */
9733 Vector<int64_t> available_min_durations;
9734 for (size_t j = 0; j < scalar_formats_count; j++) {
9735 switch (scalar_formats[j]) {
9736 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9737 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9738 case HAL_PIXEL_FORMAT_RAW10:
9739 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9740 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9741 available_min_durations.add(scalar_formats[j]);
9742 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9743 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9744 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9745 }
9746 break;
9747 default:
9748 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9749 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9750 available_min_durations.add(scalar_formats[j]);
9751 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9752 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9753 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9754 }
9755 break;
9756 }
9757 }
9758 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9759 available_min_durations.array(), available_min_durations.size());
9760
9761 Vector<int32_t> available_hfr_configs;
9762 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9763 int32_t fps = 0;
9764 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9765 case CAM_HFR_MODE_60FPS:
9766 fps = 60;
9767 break;
9768 case CAM_HFR_MODE_90FPS:
9769 fps = 90;
9770 break;
9771 case CAM_HFR_MODE_120FPS:
9772 fps = 120;
9773 break;
9774 case CAM_HFR_MODE_150FPS:
9775 fps = 150;
9776 break;
9777 case CAM_HFR_MODE_180FPS:
9778 fps = 180;
9779 break;
9780 case CAM_HFR_MODE_210FPS:
9781 fps = 210;
9782 break;
9783 case CAM_HFR_MODE_240FPS:
9784 fps = 240;
9785 break;
9786 case CAM_HFR_MODE_480FPS:
9787 fps = 480;
9788 break;
9789 case CAM_HFR_MODE_OFF:
9790 case CAM_HFR_MODE_MAX:
9791 default:
9792 break;
9793 }
9794
9795 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9796 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9797 /* For each HFR frame rate, need to advertise one variable fps range
9798 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9799 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9800 * set by the app. When video recording is started, [120, 120] is
9801 * set. This way sensor configuration does not change when recording
9802 * is started */
9803
9804 /* (width, height, fps_min, fps_max, batch_size_max) */
9805 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9806 j < MAX_SIZES_CNT; j++) {
9807 available_hfr_configs.add(
9808 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9809 available_hfr_configs.add(
9810 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9811 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9812 available_hfr_configs.add(fps);
9813 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9814
9815 /* (width, height, fps_min, fps_max, batch_size_max) */
9816 available_hfr_configs.add(
9817 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9818 available_hfr_configs.add(
9819 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9820 available_hfr_configs.add(fps);
9821 available_hfr_configs.add(fps);
9822 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9823 }
9824 }
9825 }
9826 //Advertise HFR capability only if the property is set
9827 memset(prop, 0, sizeof(prop));
9828 property_get("persist.camera.hal3hfr.enable", prop, "1");
9829 uint8_t hfrEnable = (uint8_t)atoi(prop);
9830
9831 if(hfrEnable && available_hfr_configs.array()) {
9832 staticInfo.update(
9833 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9834 available_hfr_configs.array(), available_hfr_configs.size());
9835 }
9836
9837 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9838 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9839 &max_jpeg_size, 1);
9840
9841 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9842 size_t size = 0;
9843 count = CAM_EFFECT_MODE_MAX;
9844 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9845 for (size_t i = 0; i < count; i++) {
9846 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9847 gCamCapability[cameraId]->supported_effects[i]);
9848 if (NAME_NOT_FOUND != val) {
9849 avail_effects[size] = (uint8_t)val;
9850 size++;
9851 }
9852 }
9853 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9854 avail_effects,
9855 size);
9856
9857 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9858 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9859 size_t supported_scene_modes_cnt = 0;
9860 count = CAM_SCENE_MODE_MAX;
9861 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9862 for (size_t i = 0; i < count; i++) {
9863 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9864 CAM_SCENE_MODE_OFF) {
9865 int val = lookupFwkName(SCENE_MODES_MAP,
9866 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9867 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009868
Thierry Strudel3d639192016-09-09 11:52:26 -07009869 if (NAME_NOT_FOUND != val) {
9870 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9871 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9872 supported_scene_modes_cnt++;
9873 }
9874 }
9875 }
9876 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9877 avail_scene_modes,
9878 supported_scene_modes_cnt);
9879
9880 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9881 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9882 supported_scene_modes_cnt,
9883 CAM_SCENE_MODE_MAX,
9884 scene_mode_overrides,
9885 supported_indexes,
9886 cameraId);
9887
9888 if (supported_scene_modes_cnt == 0) {
9889 supported_scene_modes_cnt = 1;
9890 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9891 }
9892
9893 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9894 scene_mode_overrides, supported_scene_modes_cnt * 3);
9895
9896 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9897 ANDROID_CONTROL_MODE_AUTO,
9898 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9899 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9900 available_control_modes,
9901 3);
9902
9903 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9904 size = 0;
9905 count = CAM_ANTIBANDING_MODE_MAX;
9906 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9907 for (size_t i = 0; i < count; i++) {
9908 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9909 gCamCapability[cameraId]->supported_antibandings[i]);
9910 if (NAME_NOT_FOUND != val) {
9911 avail_antibanding_modes[size] = (uint8_t)val;
9912 size++;
9913 }
9914
9915 }
9916 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9917 avail_antibanding_modes,
9918 size);
9919
9920 uint8_t avail_abberation_modes[] = {
9921 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9922 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9923 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9924 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9925 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9926 if (0 == count) {
9927 // If no aberration correction modes are available for a device, this advertise OFF mode
9928 size = 1;
9929 } else {
9930 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9931 // So, advertize all 3 modes if atleast any one mode is supported as per the
9932 // new M requirement
9933 size = 3;
9934 }
9935 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9936 avail_abberation_modes,
9937 size);
9938
9939 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9940 size = 0;
9941 count = CAM_FOCUS_MODE_MAX;
9942 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9943 for (size_t i = 0; i < count; i++) {
9944 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9945 gCamCapability[cameraId]->supported_focus_modes[i]);
9946 if (NAME_NOT_FOUND != val) {
9947 avail_af_modes[size] = (uint8_t)val;
9948 size++;
9949 }
9950 }
9951 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9952 avail_af_modes,
9953 size);
9954
9955 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9956 size = 0;
9957 count = CAM_WB_MODE_MAX;
9958 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9959 for (size_t i = 0; i < count; i++) {
9960 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9961 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9962 gCamCapability[cameraId]->supported_white_balances[i]);
9963 if (NAME_NOT_FOUND != val) {
9964 avail_awb_modes[size] = (uint8_t)val;
9965 size++;
9966 }
9967 }
9968 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9969 avail_awb_modes,
9970 size);
9971
9972 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9973 count = CAM_FLASH_FIRING_LEVEL_MAX;
9974 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9975 count);
9976 for (size_t i = 0; i < count; i++) {
9977 available_flash_levels[i] =
9978 gCamCapability[cameraId]->supported_firing_levels[i];
9979 }
9980 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9981 available_flash_levels, count);
9982
9983 uint8_t flashAvailable;
9984 if (gCamCapability[cameraId]->flash_available)
9985 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9986 else
9987 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9988 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9989 &flashAvailable, 1);
9990
9991 Vector<uint8_t> avail_ae_modes;
9992 count = CAM_AE_MODE_MAX;
9993 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9994 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009995 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9996 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9997 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9998 }
9999 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070010000 }
10001 if (flashAvailable) {
10002 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
10003 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
10004 }
10005 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
10006 avail_ae_modes.array(),
10007 avail_ae_modes.size());
10008
10009 int32_t sensitivity_range[2];
10010 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
10011 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
10012 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
10013 sensitivity_range,
10014 sizeof(sensitivity_range) / sizeof(int32_t));
10015
10016 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10017 &gCamCapability[cameraId]->max_analog_sensitivity,
10018 1);
10019
10020 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
10021 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
10022 &sensor_orientation,
10023 1);
10024
10025 int32_t max_output_streams[] = {
10026 MAX_STALLING_STREAMS,
10027 MAX_PROCESSED_STREAMS,
10028 MAX_RAW_STREAMS};
10029 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
10030 max_output_streams,
10031 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
10032
10033 uint8_t avail_leds = 0;
10034 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
10035 &avail_leds, 0);
10036
10037 uint8_t focus_dist_calibrated;
10038 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
10039 gCamCapability[cameraId]->focus_dist_calibrated);
10040 if (NAME_NOT_FOUND != val) {
10041 focus_dist_calibrated = (uint8_t)val;
10042 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10043 &focus_dist_calibrated, 1);
10044 }
10045
10046 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
10047 size = 0;
10048 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
10049 MAX_TEST_PATTERN_CNT);
10050 for (size_t i = 0; i < count; i++) {
10051 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10052 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10053 if (NAME_NOT_FOUND != testpatternMode) {
10054 avail_testpattern_modes[size] = testpatternMode;
10055 size++;
10056 }
10057 }
10058 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10059 avail_testpattern_modes,
10060 size);
10061
10062 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10063 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10064 &max_pipeline_depth,
10065 1);
10066
10067 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10068 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10069 &partial_result_count,
10070 1);
10071
10072 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10073 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10074
10075 Vector<uint8_t> available_capabilities;
10076 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10077 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10078 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10079 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10080 if (supportBurst) {
10081 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10082 }
10083 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10084 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10085 if (hfrEnable && available_hfr_configs.array()) {
10086 available_capabilities.add(
10087 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10088 }
10089
10090 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10091 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10092 }
10093 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10094 available_capabilities.array(),
10095 available_capabilities.size());
10096
10097 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10098 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10099 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10100 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10101
10102 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10103 &aeLockAvailable, 1);
10104
10105 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10106 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10107 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10108 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10109
10110 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10111 &awbLockAvailable, 1);
10112
10113 int32_t max_input_streams = 1;
10114 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10115 &max_input_streams,
10116 1);
10117
10118 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10119 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10120 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10121 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10122 HAL_PIXEL_FORMAT_YCbCr_420_888};
10123 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10124 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10125
10126 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10127 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10128 &max_latency,
10129 1);
10130
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010131#ifndef USE_HAL_3_3
10132 int32_t isp_sensitivity_range[2];
10133 isp_sensitivity_range[0] =
10134 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10135 isp_sensitivity_range[1] =
10136 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10137 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10138 isp_sensitivity_range,
10139 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10140#endif
10141
Thierry Strudel3d639192016-09-09 11:52:26 -070010142 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10143 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10144 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10145 available_hot_pixel_modes,
10146 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10147
10148 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10149 ANDROID_SHADING_MODE_FAST,
10150 ANDROID_SHADING_MODE_HIGH_QUALITY};
10151 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10152 available_shading_modes,
10153 3);
10154
10155 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10156 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10157 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10158 available_lens_shading_map_modes,
10159 2);
10160
10161 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10162 ANDROID_EDGE_MODE_FAST,
10163 ANDROID_EDGE_MODE_HIGH_QUALITY,
10164 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10165 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10166 available_edge_modes,
10167 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10168
10169 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10170 ANDROID_NOISE_REDUCTION_MODE_FAST,
10171 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10172 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10173 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10174 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10175 available_noise_red_modes,
10176 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10177
10178 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10179 ANDROID_TONEMAP_MODE_FAST,
10180 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10181 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10182 available_tonemap_modes,
10183 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10184
10185 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10186 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10187 available_hot_pixel_map_modes,
10188 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10189
10190 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10191 gCamCapability[cameraId]->reference_illuminant1);
10192 if (NAME_NOT_FOUND != val) {
10193 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10194 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10195 }
10196
10197 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10198 gCamCapability[cameraId]->reference_illuminant2);
10199 if (NAME_NOT_FOUND != val) {
10200 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10201 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10202 }
10203
10204 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10205 (void *)gCamCapability[cameraId]->forward_matrix1,
10206 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10207
10208 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10209 (void *)gCamCapability[cameraId]->forward_matrix2,
10210 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10211
10212 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10213 (void *)gCamCapability[cameraId]->color_transform1,
10214 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10215
10216 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10217 (void *)gCamCapability[cameraId]->color_transform2,
10218 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10219
10220 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10221 (void *)gCamCapability[cameraId]->calibration_transform1,
10222 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10223
10224 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10225 (void *)gCamCapability[cameraId]->calibration_transform2,
10226 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10227
10228 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10229 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10230 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10231 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10232 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10233 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10234 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10235 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10236 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10237 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10238 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10239 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10240 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10241 ANDROID_JPEG_GPS_COORDINATES,
10242 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10243 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10244 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10245 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10246 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10247 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10248 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10249 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10250 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10251 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010252#ifndef USE_HAL_3_3
10253 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10254#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010255 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010256 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010257 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10258 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010259 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010260 /* DevCamDebug metadata request_keys_basic */
10261 DEVCAMDEBUG_META_ENABLE,
10262 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010263 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010264 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010265 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010266 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010267 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010268 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010269
10270 size_t request_keys_cnt =
10271 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10272 Vector<int32_t> available_request_keys;
10273 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10274 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10275 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10276 }
10277
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010278 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010279 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010280 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070010281 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
Chien-Yu Chenec328c82017-08-30 16:41:08 -070010282 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010283 }
10284
Thierry Strudel3d639192016-09-09 11:52:26 -070010285 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10286 available_request_keys.array(), available_request_keys.size());
10287
10288 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10289 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10290 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10291 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10292 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10293 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10294 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10295 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10296 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10297 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10298 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10299 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10300 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10301 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10302 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10303 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10304 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010305 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010306 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10307 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10308 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010309 ANDROID_STATISTICS_FACE_SCORES,
10310#ifndef USE_HAL_3_3
10311 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10312#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010313 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010314 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010315 // DevCamDebug metadata result_keys_basic
10316 DEVCAMDEBUG_META_ENABLE,
10317 // DevCamDebug metadata result_keys AF
10318 DEVCAMDEBUG_AF_LENS_POSITION,
10319 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10320 DEVCAMDEBUG_AF_TOF_DISTANCE,
10321 DEVCAMDEBUG_AF_LUMA,
10322 DEVCAMDEBUG_AF_HAF_STATE,
10323 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10324 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10325 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10326 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10327 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10328 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10329 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10330 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10331 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10332 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10333 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10334 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10335 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10336 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10337 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10338 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10339 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10340 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10341 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10342 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10343 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10344 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10345 // DevCamDebug metadata result_keys AEC
10346 DEVCAMDEBUG_AEC_TARGET_LUMA,
10347 DEVCAMDEBUG_AEC_COMP_LUMA,
10348 DEVCAMDEBUG_AEC_AVG_LUMA,
10349 DEVCAMDEBUG_AEC_CUR_LUMA,
10350 DEVCAMDEBUG_AEC_LINECOUNT,
10351 DEVCAMDEBUG_AEC_REAL_GAIN,
10352 DEVCAMDEBUG_AEC_EXP_INDEX,
10353 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010354 // DevCamDebug metadata result_keys zzHDR
10355 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10356 DEVCAMDEBUG_AEC_L_LINECOUNT,
10357 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10358 DEVCAMDEBUG_AEC_S_LINECOUNT,
10359 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10360 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10361 // DevCamDebug metadata result_keys ADRC
10362 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10363 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10364 DEVCAMDEBUG_AEC_GTM_RATIO,
10365 DEVCAMDEBUG_AEC_LTM_RATIO,
10366 DEVCAMDEBUG_AEC_LA_RATIO,
10367 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010368 // DevCamDebug metadata result_keys AEC MOTION
10369 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10370 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10371 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010372 // DevCamDebug metadata result_keys AWB
10373 DEVCAMDEBUG_AWB_R_GAIN,
10374 DEVCAMDEBUG_AWB_G_GAIN,
10375 DEVCAMDEBUG_AWB_B_GAIN,
10376 DEVCAMDEBUG_AWB_CCT,
10377 DEVCAMDEBUG_AWB_DECISION,
10378 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010379 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10380 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10381 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010382 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010383 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010384 };
10385
Thierry Strudel3d639192016-09-09 11:52:26 -070010386 size_t result_keys_cnt =
10387 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10388
10389 Vector<int32_t> available_result_keys;
10390 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10391 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10392 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10393 }
10394 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10395 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10396 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10397 }
10398 if (supportedFaceDetectMode == 1) {
10399 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10400 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10401 } else if ((supportedFaceDetectMode == 2) ||
10402 (supportedFaceDetectMode == 3)) {
10403 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10404 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10405 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010406#ifndef USE_HAL_3_3
10407 if (hasBlackRegions) {
10408 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10409 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10410 }
10411#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010412
10413 if (gExposeEnableZslKey) {
10414 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chendaf68892017-08-25 12:56:40 -070010415 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010416 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
10417 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010418 }
10419
Thierry Strudel3d639192016-09-09 11:52:26 -070010420 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10421 available_result_keys.array(), available_result_keys.size());
10422
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010423 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010424 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10425 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10426 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10427 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10428 ANDROID_SCALER_CROPPING_TYPE,
10429 ANDROID_SYNC_MAX_LATENCY,
10430 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10431 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10432 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10433 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10434 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10435 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10436 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10437 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10438 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10439 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10440 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10441 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10442 ANDROID_LENS_FACING,
10443 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10444 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10445 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10446 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10447 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10448 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10449 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10450 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10451 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10452 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10453 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10454 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10455 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10456 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10457 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10458 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10459 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10460 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10461 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10462 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010463 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010464 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10465 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10466 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10467 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10468 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10469 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10470 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10471 ANDROID_CONTROL_AVAILABLE_MODES,
10472 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10473 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10474 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10475 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010476 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10477#ifndef USE_HAL_3_3
10478 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10479 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10480#endif
10481 };
10482
10483 Vector<int32_t> available_characteristics_keys;
10484 available_characteristics_keys.appendArray(characteristics_keys_basic,
10485 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10486#ifndef USE_HAL_3_3
10487 if (hasBlackRegions) {
10488 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10489 }
10490#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010491
10492 if (0 <= indexPD) {
10493 int32_t depthKeys[] = {
10494 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10495 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10496 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10497 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10498 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10499 };
10500 available_characteristics_keys.appendArray(depthKeys,
10501 sizeof(depthKeys) / sizeof(depthKeys[0]));
10502 }
10503
Thierry Strudel3d639192016-09-09 11:52:26 -070010504 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010505 available_characteristics_keys.array(),
10506 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010507
10508 /*available stall durations depend on the hw + sw and will be different for different devices */
10509 /*have to add for raw after implementation*/
10510 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10511 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10512
10513 Vector<int64_t> available_stall_durations;
10514 for (uint32_t j = 0; j < stall_formats_count; j++) {
10515 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10516 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10517 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10518 available_stall_durations.add(stall_formats[j]);
10519 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10520 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10521 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10522 }
10523 } else {
10524 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10525 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10526 available_stall_durations.add(stall_formats[j]);
10527 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10528 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10529 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10530 }
10531 }
10532 }
10533 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10534 available_stall_durations.array(),
10535 available_stall_durations.size());
10536
10537 //QCAMERA3_OPAQUE_RAW
10538 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10539 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10540 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10541 case LEGACY_RAW:
10542 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10543 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10544 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10545 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10546 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10547 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10548 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10549 break;
10550 case MIPI_RAW:
10551 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10552 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10553 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10554 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10555 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10556 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10557 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10558 break;
10559 default:
10560 LOGE("unknown opaque_raw_format %d",
10561 gCamCapability[cameraId]->opaque_raw_fmt);
10562 break;
10563 }
10564 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10565
10566 Vector<int32_t> strides;
10567 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10568 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10569 cam_stream_buf_plane_info_t buf_planes;
10570 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10571 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10572 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10573 &gCamCapability[cameraId]->padding_info, &buf_planes);
10574 strides.add(buf_planes.plane_info.mp[0].stride);
10575 }
10576 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10577 strides.size());
10578
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010579 //TBD: remove the following line once backend advertises zzHDR in feature mask
10580 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010581 //Video HDR default
10582 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10583 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010584 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010585 int32_t vhdr_mode[] = {
10586 QCAMERA3_VIDEO_HDR_MODE_OFF,
10587 QCAMERA3_VIDEO_HDR_MODE_ON};
10588
10589 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10590 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10591 vhdr_mode, vhdr_mode_count);
10592 }
10593
Thierry Strudel3d639192016-09-09 11:52:26 -070010594 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10595 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10596 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10597
10598 uint8_t isMonoOnly =
10599 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10600 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10601 &isMonoOnly, 1);
10602
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010603#ifndef USE_HAL_3_3
10604 Vector<int32_t> opaque_size;
10605 for (size_t j = 0; j < scalar_formats_count; j++) {
10606 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10607 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10608 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10609 cam_stream_buf_plane_info_t buf_planes;
10610
10611 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10612 &gCamCapability[cameraId]->padding_info, &buf_planes);
10613
10614 if (rc == 0) {
10615 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10616 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10617 opaque_size.add(buf_planes.plane_info.frame_len);
10618 }else {
10619 LOGE("raw frame calculation failed!");
10620 }
10621 }
10622 }
10623 }
10624
10625 if ((opaque_size.size() > 0) &&
10626 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10627 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10628 else
10629 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10630#endif
10631
Thierry Strudel04e026f2016-10-10 11:27:36 -070010632 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10633 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10634 size = 0;
10635 count = CAM_IR_MODE_MAX;
10636 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10637 for (size_t i = 0; i < count; i++) {
10638 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10639 gCamCapability[cameraId]->supported_ir_modes[i]);
10640 if (NAME_NOT_FOUND != val) {
10641 avail_ir_modes[size] = (int32_t)val;
10642 size++;
10643 }
10644 }
10645 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10646 avail_ir_modes, size);
10647 }
10648
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010649 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10650 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10651 size = 0;
10652 count = CAM_AEC_CONVERGENCE_MAX;
10653 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10654 for (size_t i = 0; i < count; i++) {
10655 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10656 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10657 if (NAME_NOT_FOUND != val) {
10658 available_instant_aec_modes[size] = (int32_t)val;
10659 size++;
10660 }
10661 }
10662 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10663 available_instant_aec_modes, size);
10664 }
10665
Thierry Strudel54dc9782017-02-15 12:12:10 -080010666 int32_t sharpness_range[] = {
10667 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10668 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10669 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10670
10671 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10672 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10673 size = 0;
10674 count = CAM_BINNING_CORRECTION_MODE_MAX;
10675 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10676 for (size_t i = 0; i < count; i++) {
10677 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10678 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10679 gCamCapability[cameraId]->supported_binning_modes[i]);
10680 if (NAME_NOT_FOUND != val) {
10681 avail_binning_modes[size] = (int32_t)val;
10682 size++;
10683 }
10684 }
10685 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10686 avail_binning_modes, size);
10687 }
10688
10689 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10690 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10691 size = 0;
10692 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10693 for (size_t i = 0; i < count; i++) {
10694 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10695 gCamCapability[cameraId]->supported_aec_modes[i]);
10696 if (NAME_NOT_FOUND != val)
10697 available_aec_modes[size++] = val;
10698 }
10699 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10700 available_aec_modes, size);
10701 }
10702
10703 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10704 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10705 size = 0;
10706 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10707 for (size_t i = 0; i < count; i++) {
10708 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10709 gCamCapability[cameraId]->supported_iso_modes[i]);
10710 if (NAME_NOT_FOUND != val)
10711 available_iso_modes[size++] = val;
10712 }
10713 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10714 available_iso_modes, size);
10715 }
10716
10717 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010718 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010719 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10720 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10721 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10722
10723 int32_t available_saturation_range[4];
10724 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10725 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10726 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10727 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10728 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10729 available_saturation_range, 4);
10730
10731 uint8_t is_hdr_values[2];
10732 is_hdr_values[0] = 0;
10733 is_hdr_values[1] = 1;
10734 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10735 is_hdr_values, 2);
10736
10737 float is_hdr_confidence_range[2];
10738 is_hdr_confidence_range[0] = 0.0;
10739 is_hdr_confidence_range[1] = 1.0;
10740 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10741 is_hdr_confidence_range, 2);
10742
Emilian Peev0a972ef2017-03-16 10:25:53 +000010743 size_t eepromLength = strnlen(
10744 reinterpret_cast<const char *>(
10745 gCamCapability[cameraId]->eeprom_version_info),
10746 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10747 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010748 char easelInfo[] = ",E:N";
10749 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10750 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10751 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010752 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
Arnd Geis082a4d72017-08-24 10:33:07 -070010753 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010754 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010755 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010756 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10757 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10758 }
10759
Thierry Strudel3d639192016-09-09 11:52:26 -070010760 gStaticMetadata[cameraId] = staticInfo.release();
10761 return rc;
10762}
10763
10764/*===========================================================================
10765 * FUNCTION : makeTable
10766 *
10767 * DESCRIPTION: make a table of sizes
10768 *
10769 * PARAMETERS :
10770 *
10771 *
10772 *==========================================================================*/
10773void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10774 size_t max_size, int32_t *sizeTable)
10775{
10776 size_t j = 0;
10777 if (size > max_size) {
10778 size = max_size;
10779 }
10780 for (size_t i = 0; i < size; i++) {
10781 sizeTable[j] = dimTable[i].width;
10782 sizeTable[j+1] = dimTable[i].height;
10783 j+=2;
10784 }
10785}
10786
10787/*===========================================================================
10788 * FUNCTION : makeFPSTable
10789 *
10790 * DESCRIPTION: make a table of fps ranges
10791 *
10792 * PARAMETERS :
10793 *
10794 *==========================================================================*/
10795void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10796 size_t max_size, int32_t *fpsRangesTable)
10797{
10798 size_t j = 0;
10799 if (size > max_size) {
10800 size = max_size;
10801 }
10802 for (size_t i = 0; i < size; i++) {
10803 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10804 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10805 j+=2;
10806 }
10807}
10808
10809/*===========================================================================
10810 * FUNCTION : makeOverridesList
10811 *
10812 * DESCRIPTION: make a list of scene mode overrides
10813 *
10814 * PARAMETERS :
10815 *
10816 *
10817 *==========================================================================*/
10818void QCamera3HardwareInterface::makeOverridesList(
10819 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10820 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10821{
10822 /*daemon will give a list of overrides for all scene modes.
10823 However we should send the fwk only the overrides for the scene modes
10824 supported by the framework*/
10825 size_t j = 0;
10826 if (size > max_size) {
10827 size = max_size;
10828 }
10829 size_t focus_count = CAM_FOCUS_MODE_MAX;
10830 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10831 focus_count);
10832 for (size_t i = 0; i < size; i++) {
10833 bool supt = false;
10834 size_t index = supported_indexes[i];
10835 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10836 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10837 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10838 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10839 overridesTable[index].awb_mode);
10840 if (NAME_NOT_FOUND != val) {
10841 overridesList[j+1] = (uint8_t)val;
10842 }
10843 uint8_t focus_override = overridesTable[index].af_mode;
10844 for (size_t k = 0; k < focus_count; k++) {
10845 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10846 supt = true;
10847 break;
10848 }
10849 }
10850 if (supt) {
10851 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10852 focus_override);
10853 if (NAME_NOT_FOUND != val) {
10854 overridesList[j+2] = (uint8_t)val;
10855 }
10856 } else {
10857 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10858 }
10859 j+=3;
10860 }
10861}
10862
10863/*===========================================================================
10864 * FUNCTION : filterJpegSizes
10865 *
10866 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10867 * could be downscaled to
10868 *
10869 * PARAMETERS :
10870 *
10871 * RETURN : length of jpegSizes array
10872 *==========================================================================*/
10873
10874size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10875 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10876 uint8_t downscale_factor)
10877{
10878 if (0 == downscale_factor) {
10879 downscale_factor = 1;
10880 }
10881
10882 int32_t min_width = active_array_size.width / downscale_factor;
10883 int32_t min_height = active_array_size.height / downscale_factor;
10884 size_t jpegSizesCnt = 0;
10885 if (processedSizesCnt > maxCount) {
10886 processedSizesCnt = maxCount;
10887 }
10888 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10889 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10890 jpegSizes[jpegSizesCnt] = processedSizes[i];
10891 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10892 jpegSizesCnt += 2;
10893 }
10894 }
10895 return jpegSizesCnt;
10896}
10897
10898/*===========================================================================
10899 * FUNCTION : computeNoiseModelEntryS
10900 *
10901 * DESCRIPTION: function to map a given sensitivity to the S noise
10902 * model parameters in the DNG noise model.
10903 *
10904 * PARAMETERS : sens : the sensor sensitivity
10905 *
10906 ** RETURN : S (sensor amplification) noise
10907 *
10908 *==========================================================================*/
10909double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10910 double s = gCamCapability[mCameraId]->gradient_S * sens +
10911 gCamCapability[mCameraId]->offset_S;
10912 return ((s < 0.0) ? 0.0 : s);
10913}
10914
10915/*===========================================================================
10916 * FUNCTION : computeNoiseModelEntryO
10917 *
10918 * DESCRIPTION: function to map a given sensitivity to the O noise
10919 * model parameters in the DNG noise model.
10920 *
10921 * PARAMETERS : sens : the sensor sensitivity
10922 *
10923 ** RETURN : O (sensor readout) noise
10924 *
10925 *==========================================================================*/
10926double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10927 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10928 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10929 1.0 : (1.0 * sens / max_analog_sens);
10930 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10931 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10932 return ((o < 0.0) ? 0.0 : o);
10933}
10934
10935/*===========================================================================
10936 * FUNCTION : getSensorSensitivity
10937 *
10938 * DESCRIPTION: convert iso_mode to an integer value
10939 *
10940 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10941 *
10942 ** RETURN : sensitivity supported by sensor
10943 *
10944 *==========================================================================*/
10945int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10946{
10947 int32_t sensitivity;
10948
10949 switch (iso_mode) {
10950 case CAM_ISO_MODE_100:
10951 sensitivity = 100;
10952 break;
10953 case CAM_ISO_MODE_200:
10954 sensitivity = 200;
10955 break;
10956 case CAM_ISO_MODE_400:
10957 sensitivity = 400;
10958 break;
10959 case CAM_ISO_MODE_800:
10960 sensitivity = 800;
10961 break;
10962 case CAM_ISO_MODE_1600:
10963 sensitivity = 1600;
10964 break;
10965 default:
10966 sensitivity = -1;
10967 break;
10968 }
10969 return sensitivity;
10970}
10971
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010972int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010973 if (gEaselManagerClient == nullptr) {
10974 gEaselManagerClient = EaselManagerClient::create();
10975 if (gEaselManagerClient == nullptr) {
10976 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10977 return -ENODEV;
10978 }
10979 }
10980
10981 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010982 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10983 // to connect to Easel.
10984 bool doNotpowerOnEasel =
10985 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10986
10987 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010988 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10989 return OK;
10990 }
10991
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010992 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010993 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010994 if (res != OK) {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070010995 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
10996 res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010997 return res;
10998 }
10999
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011000 EaselManagerClientOpened = true;
11001
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011002 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011003 if (res != OK) {
11004 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
11005 }
11006
Chien-Yu Chen4d752e32017-06-07 12:13:24 -070011007 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", true);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070011008 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070011009 gEnableMultipleHdrplusOutputs =
11010 property_get_bool("persist.camera.hdrplus.multiple_outputs", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011011
11012 // Expose enableZsl key only when HDR+ mode is enabled.
11013 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011014 }
11015
11016 return OK;
11017}
11018
Thierry Strudel3d639192016-09-09 11:52:26 -070011019/*===========================================================================
11020 * FUNCTION : getCamInfo
11021 *
11022 * DESCRIPTION: query camera capabilities
11023 *
11024 * PARAMETERS :
11025 * @cameraId : camera Id
11026 * @info : camera info struct to be filled in with camera capabilities
11027 *
11028 * RETURN : int type of status
11029 * NO_ERROR -- success
11030 * none-zero failure code
11031 *==========================================================================*/
11032int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
11033 struct camera_info *info)
11034{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011035 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070011036 int rc = 0;
11037
11038 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070011039
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011040 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070011041 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011042 rc = initHdrPlusClientLocked();
11043 if (rc != OK) {
11044 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
11045 pthread_mutex_unlock(&gCamLock);
11046 return rc;
11047 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070011048 }
11049
Thierry Strudel3d639192016-09-09 11:52:26 -070011050 if (NULL == gCamCapability[cameraId]) {
11051 rc = initCapabilities(cameraId);
11052 if (rc < 0) {
11053 pthread_mutex_unlock(&gCamLock);
11054 return rc;
11055 }
11056 }
11057
11058 if (NULL == gStaticMetadata[cameraId]) {
11059 rc = initStaticMetadata(cameraId);
11060 if (rc < 0) {
11061 pthread_mutex_unlock(&gCamLock);
11062 return rc;
11063 }
11064 }
11065
11066 switch(gCamCapability[cameraId]->position) {
11067 case CAM_POSITION_BACK:
11068 case CAM_POSITION_BACK_AUX:
11069 info->facing = CAMERA_FACING_BACK;
11070 break;
11071
11072 case CAM_POSITION_FRONT:
11073 case CAM_POSITION_FRONT_AUX:
11074 info->facing = CAMERA_FACING_FRONT;
11075 break;
11076
11077 default:
11078 LOGE("Unknown position type %d for camera id:%d",
11079 gCamCapability[cameraId]->position, cameraId);
11080 rc = -1;
11081 break;
11082 }
11083
11084
11085 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011086#ifndef USE_HAL_3_3
11087 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11088#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011089 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011090#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011091 info->static_camera_characteristics = gStaticMetadata[cameraId];
11092
11093 //For now assume both cameras can operate independently.
11094 info->conflicting_devices = NULL;
11095 info->conflicting_devices_length = 0;
11096
11097 //resource cost is 100 * MIN(1.0, m/M),
11098 //where m is throughput requirement with maximum stream configuration
11099 //and M is CPP maximum throughput.
11100 float max_fps = 0.0;
11101 for (uint32_t i = 0;
11102 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11103 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11104 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11105 }
11106 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11107 gCamCapability[cameraId]->active_array_size.width *
11108 gCamCapability[cameraId]->active_array_size.height * max_fps /
11109 gCamCapability[cameraId]->max_pixel_bandwidth;
11110 info->resource_cost = 100 * MIN(1.0, ratio);
11111 LOGI("camera %d resource cost is %d", cameraId,
11112 info->resource_cost);
11113
11114 pthread_mutex_unlock(&gCamLock);
11115 return rc;
11116}
11117
11118/*===========================================================================
11119 * FUNCTION : translateCapabilityToMetadata
11120 *
11121 * DESCRIPTION: translate the capability into camera_metadata_t
11122 *
11123 * PARAMETERS : type of the request
11124 *
11125 *
11126 * RETURN : success: camera_metadata_t*
11127 * failure: NULL
11128 *
11129 *==========================================================================*/
11130camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11131{
11132 if (mDefaultMetadata[type] != NULL) {
11133 return mDefaultMetadata[type];
11134 }
11135 //first time we are handling this request
11136 //fill up the metadata structure using the wrapper class
11137 CameraMetadata settings;
11138 //translate from cam_capability_t to camera_metadata_tag_t
11139 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11140 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11141 int32_t defaultRequestID = 0;
11142 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11143
11144 /* OIS disable */
11145 char ois_prop[PROPERTY_VALUE_MAX];
11146 memset(ois_prop, 0, sizeof(ois_prop));
11147 property_get("persist.camera.ois.disable", ois_prop, "0");
11148 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11149
11150 /* Force video to use OIS */
11151 char videoOisProp[PROPERTY_VALUE_MAX];
11152 memset(videoOisProp, 0, sizeof(videoOisProp));
11153 property_get("persist.camera.ois.video", videoOisProp, "1");
11154 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011155
11156 // Hybrid AE enable/disable
11157 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11158 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11159 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011160 uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011161
Thierry Strudel3d639192016-09-09 11:52:26 -070011162 uint8_t controlIntent = 0;
11163 uint8_t focusMode;
11164 uint8_t vsMode;
11165 uint8_t optStabMode;
11166 uint8_t cacMode;
11167 uint8_t edge_mode;
11168 uint8_t noise_red_mode;
11169 uint8_t tonemap_mode;
11170 bool highQualityModeEntryAvailable = FALSE;
11171 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011172 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011173 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11174 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011175 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011176 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011177 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011178
Thierry Strudel3d639192016-09-09 11:52:26 -070011179 switch (type) {
11180 case CAMERA3_TEMPLATE_PREVIEW:
11181 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11182 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11183 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11184 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11185 edge_mode = ANDROID_EDGE_MODE_FAST;
11186 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11187 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11188 break;
11189 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11190 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11191 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11192 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11193 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11194 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11195 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11196 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11197 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11198 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11199 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11200 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11201 highQualityModeEntryAvailable = TRUE;
11202 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11203 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11204 fastModeEntryAvailable = TRUE;
11205 }
11206 }
11207 if (highQualityModeEntryAvailable) {
11208 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11209 } else if (fastModeEntryAvailable) {
11210 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11211 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011212 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11213 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11214 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011215 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011216 break;
11217 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11218 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11219 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11220 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011221 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11222 edge_mode = ANDROID_EDGE_MODE_FAST;
11223 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11224 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11225 if (forceVideoOis)
11226 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11227 break;
11228 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11229 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11230 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11231 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011232 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11233 edge_mode = ANDROID_EDGE_MODE_FAST;
11234 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11235 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11236 if (forceVideoOis)
11237 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11238 break;
11239 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11240 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11241 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11242 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11243 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11244 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11245 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11246 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11247 break;
11248 case CAMERA3_TEMPLATE_MANUAL:
11249 edge_mode = ANDROID_EDGE_MODE_FAST;
11250 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11251 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11252 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11253 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11254 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11255 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11256 break;
11257 default:
11258 edge_mode = ANDROID_EDGE_MODE_FAST;
11259 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11260 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11261 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11262 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11263 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11264 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11265 break;
11266 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011267 // Set CAC to OFF if underlying device doesn't support
11268 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11269 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11270 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011271 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11272 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11273 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11274 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11275 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11276 }
11277 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011278 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011279 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011280
11281 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11282 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11283 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11284 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11285 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11286 || ois_disable)
11287 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11288 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011289 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011290
11291 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11292 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11293
11294 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11295 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11296
11297 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11298 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11299
11300 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11301 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11302
11303 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11304 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11305
11306 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11307 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11308
11309 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11310 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11311
11312 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11313 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11314
11315 /*flash*/
11316 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11317 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11318
11319 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11320 settings.update(ANDROID_FLASH_FIRING_POWER,
11321 &flashFiringLevel, 1);
11322
11323 /* lens */
11324 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11325 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11326
11327 if (gCamCapability[mCameraId]->filter_densities_count) {
11328 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11329 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11330 gCamCapability[mCameraId]->filter_densities_count);
11331 }
11332
11333 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11334 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11335
Thierry Strudel3d639192016-09-09 11:52:26 -070011336 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11337 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11338
11339 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11340 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11341
11342 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11343 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11344
11345 /* face detection (default to OFF) */
11346 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11347 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11348
Thierry Strudel54dc9782017-02-15 12:12:10 -080011349 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11350 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011351
11352 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11353 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11354
11355 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11356 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11357
Thierry Strudel3d639192016-09-09 11:52:26 -070011358
11359 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11360 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11361
11362 /* Exposure time(Update the Min Exposure Time)*/
11363 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11364 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11365
11366 /* frame duration */
11367 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11368 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11369
11370 /* sensitivity */
11371 static const int32_t default_sensitivity = 100;
11372 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011373#ifndef USE_HAL_3_3
11374 static const int32_t default_isp_sensitivity =
11375 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11376 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11377#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011378
11379 /*edge mode*/
11380 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11381
11382 /*noise reduction mode*/
11383 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11384
11385 /*color correction mode*/
11386 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11387 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11388
11389 /*transform matrix mode*/
11390 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11391
11392 int32_t scaler_crop_region[4];
11393 scaler_crop_region[0] = 0;
11394 scaler_crop_region[1] = 0;
11395 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11396 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11397 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11398
11399 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11400 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11401
11402 /*focus distance*/
11403 float focus_distance = 0.0;
11404 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11405
11406 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011407 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011408 float max_range = 0.0;
11409 float max_fixed_fps = 0.0;
11410 int32_t fps_range[2] = {0, 0};
11411 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11412 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011413 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11414 TEMPLATE_MAX_PREVIEW_FPS) {
11415 continue;
11416 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011417 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11418 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11419 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11420 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11421 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11422 if (range > max_range) {
11423 fps_range[0] =
11424 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11425 fps_range[1] =
11426 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11427 max_range = range;
11428 }
11429 } else {
11430 if (range < 0.01 && max_fixed_fps <
11431 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11432 fps_range[0] =
11433 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11434 fps_range[1] =
11435 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11436 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11437 }
11438 }
11439 }
11440 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11441
11442 /*precapture trigger*/
11443 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11444 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11445
11446 /*af trigger*/
11447 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11448 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11449
11450 /* ae & af regions */
11451 int32_t active_region[] = {
11452 gCamCapability[mCameraId]->active_array_size.left,
11453 gCamCapability[mCameraId]->active_array_size.top,
11454 gCamCapability[mCameraId]->active_array_size.left +
11455 gCamCapability[mCameraId]->active_array_size.width,
11456 gCamCapability[mCameraId]->active_array_size.top +
11457 gCamCapability[mCameraId]->active_array_size.height,
11458 0};
11459 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11460 sizeof(active_region) / sizeof(active_region[0]));
11461 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11462 sizeof(active_region) / sizeof(active_region[0]));
11463
11464 /* black level lock */
11465 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11466 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11467
Thierry Strudel3d639192016-09-09 11:52:26 -070011468 //special defaults for manual template
11469 if (type == CAMERA3_TEMPLATE_MANUAL) {
11470 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11471 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11472
11473 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11474 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11475
11476 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11477 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11478
11479 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11480 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11481
11482 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11483 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11484
11485 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11486 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11487 }
11488
11489
11490 /* TNR
11491 * We'll use this location to determine which modes TNR will be set.
11492 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11493 * This is not to be confused with linking on a per stream basis that decision
11494 * is still on per-session basis and will be handled as part of config stream
11495 */
11496 uint8_t tnr_enable = 0;
11497
11498 if (m_bTnrPreview || m_bTnrVideo) {
11499
11500 switch (type) {
11501 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11502 tnr_enable = 1;
11503 break;
11504
11505 default:
11506 tnr_enable = 0;
11507 break;
11508 }
11509
11510 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11511 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11512 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11513
11514 LOGD("TNR:%d with process plate %d for template:%d",
11515 tnr_enable, tnr_process_type, type);
11516 }
11517
11518 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011519 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011520 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11521
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011522 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011523 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11524
Shuzhen Wang920ea402017-05-03 08:49:39 -070011525 uint8_t related_camera_id = mCameraId;
11526 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011527
11528 /* CDS default */
11529 char prop[PROPERTY_VALUE_MAX];
11530 memset(prop, 0, sizeof(prop));
11531 property_get("persist.camera.CDS", prop, "Auto");
11532 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11533 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11534 if (CAM_CDS_MODE_MAX == cds_mode) {
11535 cds_mode = CAM_CDS_MODE_AUTO;
11536 }
11537
11538 /* Disabling CDS in templates which have TNR enabled*/
11539 if (tnr_enable)
11540 cds_mode = CAM_CDS_MODE_OFF;
11541
11542 int32_t mode = cds_mode;
11543 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011544
Thierry Strudel269c81a2016-10-12 12:13:59 -070011545 /* Manual Convergence AEC Speed is disabled by default*/
11546 float default_aec_speed = 0;
11547 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11548
11549 /* Manual Convergence AWB Speed is disabled by default*/
11550 float default_awb_speed = 0;
11551 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11552
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011553 // Set instant AEC to normal convergence by default
11554 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11555 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11556
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011557 if (gExposeEnableZslKey) {
11558 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070011559 int32_t postview = 0;
11560 settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070011561 int32_t continuousZslCapture = 0;
11562 settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
Chien-Yu Chenec328c82017-08-30 16:41:08 -070011563 // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE.
11564 int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE) ? 0 : 1;
11565 settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
11566
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011567 // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
11568 // hybrid ae is enabled for 3rd party app HDR+.
11569 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11570 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
11571 hybrid_ae = 1;
11572 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011573 }
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011574 /* hybrid ae */
11575 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011576
Thierry Strudel3d639192016-09-09 11:52:26 -070011577 mDefaultMetadata[type] = settings.release();
11578
11579 return mDefaultMetadata[type];
11580}
11581
11582/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011583 * FUNCTION : getExpectedFrameDuration
11584 *
11585 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11586 * duration
11587 *
11588 * PARAMETERS :
11589 * @request : request settings
11590 * @frameDuration : The maximum frame duration in nanoseconds
11591 *
11592 * RETURN : None
11593 *==========================================================================*/
11594void QCamera3HardwareInterface::getExpectedFrameDuration(
11595 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11596 if (nullptr == frameDuration) {
11597 return;
11598 }
11599
11600 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11601 find_camera_metadata_ro_entry(request,
11602 ANDROID_SENSOR_EXPOSURE_TIME,
11603 &e);
11604 if (e.count > 0) {
11605 *frameDuration = e.data.i64[0];
11606 }
11607 find_camera_metadata_ro_entry(request,
11608 ANDROID_SENSOR_FRAME_DURATION,
11609 &e);
11610 if (e.count > 0) {
11611 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11612 }
11613}
11614
11615/*===========================================================================
11616 * FUNCTION : calculateMaxExpectedDuration
11617 *
11618 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11619 * current camera settings.
11620 *
11621 * PARAMETERS :
11622 * @request : request settings
11623 *
11624 * RETURN : Expected frame duration in nanoseconds.
11625 *==========================================================================*/
11626nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11627 const camera_metadata_t *request) {
11628 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11629 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11630 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11631 if (e.count == 0) {
11632 return maxExpectedDuration;
11633 }
11634
11635 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11636 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11637 }
11638
11639 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11640 return maxExpectedDuration;
11641 }
11642
11643 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11644 if (e.count == 0) {
11645 return maxExpectedDuration;
11646 }
11647
11648 switch (e.data.u8[0]) {
11649 case ANDROID_CONTROL_AE_MODE_OFF:
11650 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11651 break;
11652 default:
11653 find_camera_metadata_ro_entry(request,
11654 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11655 &e);
11656 if (e.count > 1) {
11657 maxExpectedDuration = 1e9 / e.data.u8[0];
11658 }
11659 break;
11660 }
11661
11662 return maxExpectedDuration;
11663}
11664
11665/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011666 * FUNCTION : setFrameParameters
11667 *
11668 * DESCRIPTION: set parameters per frame as requested in the metadata from
11669 * framework
11670 *
11671 * PARAMETERS :
11672 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011673 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011674 * @blob_request: Whether this request is a blob request or not
11675 *
11676 * RETURN : success: NO_ERROR
11677 * failure:
11678 *==========================================================================*/
11679int QCamera3HardwareInterface::setFrameParameters(
11680 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011681 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011682 int blob_request,
11683 uint32_t snapshotStreamId)
11684{
11685 /*translate from camera_metadata_t type to parm_type_t*/
11686 int rc = 0;
11687 int32_t hal_version = CAM_HAL_V3;
11688
11689 clear_metadata_buffer(mParameters);
11690 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11691 LOGE("Failed to set hal version in the parameters");
11692 return BAD_VALUE;
11693 }
11694
11695 /*we need to update the frame number in the parameters*/
11696 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11697 request->frame_number)) {
11698 LOGE("Failed to set the frame number in the parameters");
11699 return BAD_VALUE;
11700 }
11701
11702 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011703 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011704 LOGE("Failed to set stream type mask in the parameters");
11705 return BAD_VALUE;
11706 }
11707
11708 if (mUpdateDebugLevel) {
11709 uint32_t dummyDebugLevel = 0;
11710 /* The value of dummyDebugLevel is irrelavent. On
11711 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11712 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11713 dummyDebugLevel)) {
11714 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11715 return BAD_VALUE;
11716 }
11717 mUpdateDebugLevel = false;
11718 }
11719
11720 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011721 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011722 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11723 if (blob_request)
11724 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11725 }
11726
11727 return rc;
11728}
11729
11730/*===========================================================================
11731 * FUNCTION : setReprocParameters
11732 *
11733 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11734 * return it.
11735 *
11736 * PARAMETERS :
11737 * @request : request that needs to be serviced
11738 *
11739 * RETURN : success: NO_ERROR
11740 * failure:
11741 *==========================================================================*/
11742int32_t QCamera3HardwareInterface::setReprocParameters(
11743 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11744 uint32_t snapshotStreamId)
11745{
11746 /*translate from camera_metadata_t type to parm_type_t*/
11747 int rc = 0;
11748
11749 if (NULL == request->settings){
11750 LOGE("Reprocess settings cannot be NULL");
11751 return BAD_VALUE;
11752 }
11753
11754 if (NULL == reprocParam) {
11755 LOGE("Invalid reprocessing metadata buffer");
11756 return BAD_VALUE;
11757 }
11758 clear_metadata_buffer(reprocParam);
11759
11760 /*we need to update the frame number in the parameters*/
11761 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11762 request->frame_number)) {
11763 LOGE("Failed to set the frame number in the parameters");
11764 return BAD_VALUE;
11765 }
11766
11767 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11768 if (rc < 0) {
11769 LOGE("Failed to translate reproc request");
11770 return rc;
11771 }
11772
11773 CameraMetadata frame_settings;
11774 frame_settings = request->settings;
11775 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11776 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11777 int32_t *crop_count =
11778 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11779 int32_t *crop_data =
11780 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11781 int32_t *roi_map =
11782 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11783 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11784 cam_crop_data_t crop_meta;
11785 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11786 crop_meta.num_of_streams = 1;
11787 crop_meta.crop_info[0].crop.left = crop_data[0];
11788 crop_meta.crop_info[0].crop.top = crop_data[1];
11789 crop_meta.crop_info[0].crop.width = crop_data[2];
11790 crop_meta.crop_info[0].crop.height = crop_data[3];
11791
11792 crop_meta.crop_info[0].roi_map.left =
11793 roi_map[0];
11794 crop_meta.crop_info[0].roi_map.top =
11795 roi_map[1];
11796 crop_meta.crop_info[0].roi_map.width =
11797 roi_map[2];
11798 crop_meta.crop_info[0].roi_map.height =
11799 roi_map[3];
11800
11801 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11802 rc = BAD_VALUE;
11803 }
11804 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11805 request->input_buffer->stream,
11806 crop_meta.crop_info[0].crop.left,
11807 crop_meta.crop_info[0].crop.top,
11808 crop_meta.crop_info[0].crop.width,
11809 crop_meta.crop_info[0].crop.height);
11810 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11811 request->input_buffer->stream,
11812 crop_meta.crop_info[0].roi_map.left,
11813 crop_meta.crop_info[0].roi_map.top,
11814 crop_meta.crop_info[0].roi_map.width,
11815 crop_meta.crop_info[0].roi_map.height);
11816 } else {
11817 LOGE("Invalid reprocess crop count %d!", *crop_count);
11818 }
11819 } else {
11820 LOGE("No crop data from matching output stream");
11821 }
11822
11823 /* These settings are not needed for regular requests so handle them specially for
11824 reprocess requests; information needed for EXIF tags */
11825 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11826 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11827 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11828 if (NAME_NOT_FOUND != val) {
11829 uint32_t flashMode = (uint32_t)val;
11830 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11831 rc = BAD_VALUE;
11832 }
11833 } else {
11834 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11835 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11836 }
11837 } else {
11838 LOGH("No flash mode in reprocess settings");
11839 }
11840
11841 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11842 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11843 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11844 rc = BAD_VALUE;
11845 }
11846 } else {
11847 LOGH("No flash state in reprocess settings");
11848 }
11849
11850 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11851 uint8_t *reprocessFlags =
11852 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11853 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11854 *reprocessFlags)) {
11855 rc = BAD_VALUE;
11856 }
11857 }
11858
Thierry Strudel54dc9782017-02-15 12:12:10 -080011859 // Add exif debug data to internal metadata
11860 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11861 mm_jpeg_debug_exif_params_t *debug_params =
11862 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11863 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11864 // AE
11865 if (debug_params->ae_debug_params_valid == TRUE) {
11866 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11867 debug_params->ae_debug_params);
11868 }
11869 // AWB
11870 if (debug_params->awb_debug_params_valid == TRUE) {
11871 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11872 debug_params->awb_debug_params);
11873 }
11874 // AF
11875 if (debug_params->af_debug_params_valid == TRUE) {
11876 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11877 debug_params->af_debug_params);
11878 }
11879 // ASD
11880 if (debug_params->asd_debug_params_valid == TRUE) {
11881 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11882 debug_params->asd_debug_params);
11883 }
11884 // Stats
11885 if (debug_params->stats_debug_params_valid == TRUE) {
11886 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11887 debug_params->stats_debug_params);
11888 }
11889 // BE Stats
11890 if (debug_params->bestats_debug_params_valid == TRUE) {
11891 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11892 debug_params->bestats_debug_params);
11893 }
11894 // BHIST
11895 if (debug_params->bhist_debug_params_valid == TRUE) {
11896 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11897 debug_params->bhist_debug_params);
11898 }
11899 // 3A Tuning
11900 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11901 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11902 debug_params->q3a_tuning_debug_params);
11903 }
11904 }
11905
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011906 // Add metadata which reprocess needs
11907 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11908 cam_reprocess_info_t *repro_info =
11909 (cam_reprocess_info_t *)frame_settings.find
11910 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011911 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011912 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011913 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011914 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011915 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011916 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011917 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011918 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011919 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011920 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011921 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011922 repro_info->pipeline_flip);
11923 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11924 repro_info->af_roi);
11925 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11926 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011927 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11928 CAM_INTF_PARM_ROTATION metadata then has been added in
11929 translateToHalMetadata. HAL need to keep this new rotation
11930 metadata. Otherwise, the old rotation info saved in the vendor tag
11931 would be used */
11932 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11933 CAM_INTF_PARM_ROTATION, reprocParam) {
11934 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11935 } else {
11936 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011937 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011938 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011939 }
11940
11941 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11942 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11943 roi.width and roi.height would be the final JPEG size.
11944 For now, HAL only checks this for reprocess request */
11945 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11946 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11947 uint8_t *enable =
11948 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11949 if (*enable == TRUE) {
11950 int32_t *crop_data =
11951 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11952 cam_stream_crop_info_t crop_meta;
11953 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11954 crop_meta.stream_id = 0;
11955 crop_meta.crop.left = crop_data[0];
11956 crop_meta.crop.top = crop_data[1];
11957 crop_meta.crop.width = crop_data[2];
11958 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011959 // The JPEG crop roi should match cpp output size
11960 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11961 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11962 crop_meta.roi_map.left = 0;
11963 crop_meta.roi_map.top = 0;
11964 crop_meta.roi_map.width = cpp_crop->crop.width;
11965 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011966 }
11967 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11968 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011969 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011970 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011971 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11972 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011973 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011974 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11975
11976 // Add JPEG scale information
11977 cam_dimension_t scale_dim;
11978 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11979 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11980 int32_t *roi =
11981 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11982 scale_dim.width = roi[2];
11983 scale_dim.height = roi[3];
11984 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11985 scale_dim);
11986 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11987 scale_dim.width, scale_dim.height, mCameraId);
11988 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011989 }
11990 }
11991
11992 return rc;
11993}
11994
11995/*===========================================================================
11996 * FUNCTION : saveRequestSettings
11997 *
11998 * DESCRIPTION: Add any settings that might have changed to the request settings
11999 * and save the settings to be applied on the frame
12000 *
12001 * PARAMETERS :
12002 * @jpegMetadata : the extracted and/or modified jpeg metadata
12003 * @request : request with initial settings
12004 *
12005 * RETURN :
12006 * camera_metadata_t* : pointer to the saved request settings
12007 *==========================================================================*/
12008camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
12009 const CameraMetadata &jpegMetadata,
12010 camera3_capture_request_t *request)
12011{
12012 camera_metadata_t *resultMetadata;
12013 CameraMetadata camMetadata;
12014 camMetadata = request->settings;
12015
12016 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12017 int32_t thumbnail_size[2];
12018 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12019 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12020 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
12021 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
12022 }
12023
12024 if (request->input_buffer != NULL) {
12025 uint8_t reprocessFlags = 1;
12026 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
12027 (uint8_t*)&reprocessFlags,
12028 sizeof(reprocessFlags));
12029 }
12030
12031 resultMetadata = camMetadata.release();
12032 return resultMetadata;
12033}
12034
12035/*===========================================================================
12036 * FUNCTION : setHalFpsRange
12037 *
12038 * DESCRIPTION: set FPS range parameter
12039 *
12040 *
12041 * PARAMETERS :
12042 * @settings : Metadata from framework
12043 * @hal_metadata: Metadata buffer
12044 *
12045 *
12046 * RETURN : success: NO_ERROR
12047 * failure:
12048 *==========================================================================*/
12049int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
12050 metadata_buffer_t *hal_metadata)
12051{
12052 int32_t rc = NO_ERROR;
12053 cam_fps_range_t fps_range;
12054 fps_range.min_fps = (float)
12055 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12056 fps_range.max_fps = (float)
12057 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12058 fps_range.video_min_fps = fps_range.min_fps;
12059 fps_range.video_max_fps = fps_range.max_fps;
12060
12061 LOGD("aeTargetFpsRange fps: [%f %f]",
12062 fps_range.min_fps, fps_range.max_fps);
12063 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12064 * follows:
12065 * ---------------------------------------------------------------|
12066 * Video stream is absent in configure_streams |
12067 * (Camcorder preview before the first video record |
12068 * ---------------------------------------------------------------|
12069 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12070 * | | | vid_min/max_fps|
12071 * ---------------------------------------------------------------|
12072 * NO | [ 30, 240] | 240 | [240, 240] |
12073 * |-------------|-------------|----------------|
12074 * | [240, 240] | 240 | [240, 240] |
12075 * ---------------------------------------------------------------|
12076 * Video stream is present in configure_streams |
12077 * ---------------------------------------------------------------|
12078 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12079 * | | | vid_min/max_fps|
12080 * ---------------------------------------------------------------|
12081 * NO | [ 30, 240] | 240 | [240, 240] |
12082 * (camcorder prev |-------------|-------------|----------------|
12083 * after video rec | [240, 240] | 240 | [240, 240] |
12084 * is stopped) | | | |
12085 * ---------------------------------------------------------------|
12086 * YES | [ 30, 240] | 240 | [240, 240] |
12087 * |-------------|-------------|----------------|
12088 * | [240, 240] | 240 | [240, 240] |
12089 * ---------------------------------------------------------------|
12090 * When Video stream is absent in configure_streams,
12091 * preview fps = sensor_fps / batchsize
12092 * Eg: for 240fps at batchSize 4, preview = 60fps
12093 * for 120fps at batchSize 4, preview = 30fps
12094 *
12095 * When video stream is present in configure_streams, preview fps is as per
12096 * the ratio of preview buffers to video buffers requested in process
12097 * capture request
12098 */
12099 mBatchSize = 0;
12100 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12101 fps_range.min_fps = fps_range.video_max_fps;
12102 fps_range.video_min_fps = fps_range.video_max_fps;
12103 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12104 fps_range.max_fps);
12105 if (NAME_NOT_FOUND != val) {
12106 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12107 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12108 return BAD_VALUE;
12109 }
12110
12111 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12112 /* If batchmode is currently in progress and the fps changes,
12113 * set the flag to restart the sensor */
12114 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12115 (mHFRVideoFps != fps_range.max_fps)) {
12116 mNeedSensorRestart = true;
12117 }
12118 mHFRVideoFps = fps_range.max_fps;
12119 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12120 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12121 mBatchSize = MAX_HFR_BATCH_SIZE;
12122 }
12123 }
12124 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12125
12126 }
12127 } else {
12128 /* HFR mode is session param in backend/ISP. This should be reset when
12129 * in non-HFR mode */
12130 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12131 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12132 return BAD_VALUE;
12133 }
12134 }
12135 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12136 return BAD_VALUE;
12137 }
12138 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12139 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12140 return rc;
12141}
12142
12143/*===========================================================================
12144 * FUNCTION : translateToHalMetadata
12145 *
12146 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12147 *
12148 *
12149 * PARAMETERS :
12150 * @request : request sent from framework
12151 *
12152 *
12153 * RETURN : success: NO_ERROR
12154 * failure:
12155 *==========================================================================*/
12156int QCamera3HardwareInterface::translateToHalMetadata
12157 (const camera3_capture_request_t *request,
12158 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012159 uint32_t snapshotStreamId) {
12160 if (request == nullptr || hal_metadata == nullptr) {
12161 return BAD_VALUE;
12162 }
12163
12164 int64_t minFrameDuration = getMinFrameDuration(request);
12165
12166 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12167 minFrameDuration);
12168}
12169
12170int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12171 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12172 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12173
Thierry Strudel3d639192016-09-09 11:52:26 -070012174 int rc = 0;
12175 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012176 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012177
12178 /* Do not change the order of the following list unless you know what you are
12179 * doing.
12180 * The order is laid out in such a way that parameters in the front of the table
12181 * may be used to override the parameters later in the table. Examples are:
12182 * 1. META_MODE should precede AEC/AWB/AF MODE
12183 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12184 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12185 * 4. Any mode should precede it's corresponding settings
12186 */
12187 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12188 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12189 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12190 rc = BAD_VALUE;
12191 }
12192 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12193 if (rc != NO_ERROR) {
12194 LOGE("extractSceneMode failed");
12195 }
12196 }
12197
12198 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12199 uint8_t fwk_aeMode =
12200 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12201 uint8_t aeMode;
12202 int32_t redeye;
12203
12204 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12205 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012206 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12207 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012208 } else {
12209 aeMode = CAM_AE_MODE_ON;
12210 }
12211 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12212 redeye = 1;
12213 } else {
12214 redeye = 0;
12215 }
12216
12217 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12218 fwk_aeMode);
12219 if (NAME_NOT_FOUND != val) {
12220 int32_t flashMode = (int32_t)val;
12221 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12222 }
12223
12224 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12225 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12226 rc = BAD_VALUE;
12227 }
12228 }
12229
12230 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12231 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12232 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12233 fwk_whiteLevel);
12234 if (NAME_NOT_FOUND != val) {
12235 uint8_t whiteLevel = (uint8_t)val;
12236 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12237 rc = BAD_VALUE;
12238 }
12239 }
12240 }
12241
12242 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12243 uint8_t fwk_cacMode =
12244 frame_settings.find(
12245 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12246 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12247 fwk_cacMode);
12248 if (NAME_NOT_FOUND != val) {
12249 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12250 bool entryAvailable = FALSE;
12251 // Check whether Frameworks set CAC mode is supported in device or not
12252 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12253 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12254 entryAvailable = TRUE;
12255 break;
12256 }
12257 }
12258 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12259 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12260 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12261 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12262 if (entryAvailable == FALSE) {
12263 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12264 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12265 } else {
12266 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12267 // High is not supported and so set the FAST as spec say's underlying
12268 // device implementation can be the same for both modes.
12269 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12270 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12271 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12272 // in order to avoid the fps drop due to high quality
12273 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12274 } else {
12275 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12276 }
12277 }
12278 }
12279 LOGD("Final cacMode is %d", cacMode);
12280 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12281 rc = BAD_VALUE;
12282 }
12283 } else {
12284 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12285 }
12286 }
12287
Jason Lee84ae9972017-02-24 13:24:24 -080012288 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012289 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012290 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012291 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012292 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12293 fwk_focusMode);
12294 if (NAME_NOT_FOUND != val) {
12295 uint8_t focusMode = (uint8_t)val;
12296 LOGD("set focus mode %d", focusMode);
12297 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12298 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12299 rc = BAD_VALUE;
12300 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012301 }
12302 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012303 } else {
12304 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12305 LOGE("Focus forced to infinity %d", focusMode);
12306 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12307 rc = BAD_VALUE;
12308 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012309 }
12310
Jason Lee84ae9972017-02-24 13:24:24 -080012311 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12312 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012313 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12314 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12315 focalDistance)) {
12316 rc = BAD_VALUE;
12317 }
12318 }
12319
12320 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12321 uint8_t fwk_antibandingMode =
12322 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12323 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12324 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12325 if (NAME_NOT_FOUND != val) {
12326 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012327 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12328 if (m60HzZone) {
12329 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12330 } else {
12331 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12332 }
12333 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012334 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12335 hal_antibandingMode)) {
12336 rc = BAD_VALUE;
12337 }
12338 }
12339 }
12340
12341 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12342 int32_t expCompensation = frame_settings.find(
12343 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12344 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12345 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12346 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12347 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012348 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012349 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12350 expCompensation)) {
12351 rc = BAD_VALUE;
12352 }
12353 }
12354
12355 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12356 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12357 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12358 rc = BAD_VALUE;
12359 }
12360 }
12361 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12362 rc = setHalFpsRange(frame_settings, hal_metadata);
12363 if (rc != NO_ERROR) {
12364 LOGE("setHalFpsRange failed");
12365 }
12366 }
12367
12368 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12369 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12370 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12371 rc = BAD_VALUE;
12372 }
12373 }
12374
12375 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12376 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12377 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12378 fwk_effectMode);
12379 if (NAME_NOT_FOUND != val) {
12380 uint8_t effectMode = (uint8_t)val;
12381 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12382 rc = BAD_VALUE;
12383 }
12384 }
12385 }
12386
12387 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12388 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12389 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12390 colorCorrectMode)) {
12391 rc = BAD_VALUE;
12392 }
12393 }
12394
12395 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12396 cam_color_correct_gains_t colorCorrectGains;
12397 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12398 colorCorrectGains.gains[i] =
12399 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12400 }
12401 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12402 colorCorrectGains)) {
12403 rc = BAD_VALUE;
12404 }
12405 }
12406
12407 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12408 cam_color_correct_matrix_t colorCorrectTransform;
12409 cam_rational_type_t transform_elem;
12410 size_t num = 0;
12411 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12412 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12413 transform_elem.numerator =
12414 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12415 transform_elem.denominator =
12416 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12417 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12418 num++;
12419 }
12420 }
12421 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12422 colorCorrectTransform)) {
12423 rc = BAD_VALUE;
12424 }
12425 }
12426
12427 cam_trigger_t aecTrigger;
12428 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12429 aecTrigger.trigger_id = -1;
12430 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12431 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12432 aecTrigger.trigger =
12433 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12434 aecTrigger.trigger_id =
12435 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12436 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12437 aecTrigger)) {
12438 rc = BAD_VALUE;
12439 }
12440 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12441 aecTrigger.trigger, aecTrigger.trigger_id);
12442 }
12443
12444 /*af_trigger must come with a trigger id*/
12445 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12446 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12447 cam_trigger_t af_trigger;
12448 af_trigger.trigger =
12449 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12450 af_trigger.trigger_id =
12451 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12452 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12453 rc = BAD_VALUE;
12454 }
12455 LOGD("AfTrigger: %d AfTriggerID: %d",
12456 af_trigger.trigger, af_trigger.trigger_id);
12457 }
12458
12459 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12460 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12461 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12462 rc = BAD_VALUE;
12463 }
12464 }
12465 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12466 cam_edge_application_t edge_application;
12467 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012468
Thierry Strudel3d639192016-09-09 11:52:26 -070012469 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12470 edge_application.sharpness = 0;
12471 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012472 edge_application.sharpness =
12473 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12474 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12475 int32_t sharpness =
12476 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12477 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12478 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12479 LOGD("Setting edge mode sharpness %d", sharpness);
12480 edge_application.sharpness = sharpness;
12481 }
12482 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012483 }
12484 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12485 rc = BAD_VALUE;
12486 }
12487 }
12488
12489 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12490 int32_t respectFlashMode = 1;
12491 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12492 uint8_t fwk_aeMode =
12493 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012494 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12495 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12496 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012497 respectFlashMode = 0;
12498 LOGH("AE Mode controls flash, ignore android.flash.mode");
12499 }
12500 }
12501 if (respectFlashMode) {
12502 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12503 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12504 LOGH("flash mode after mapping %d", val);
12505 // To check: CAM_INTF_META_FLASH_MODE usage
12506 if (NAME_NOT_FOUND != val) {
12507 uint8_t flashMode = (uint8_t)val;
12508 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12509 rc = BAD_VALUE;
12510 }
12511 }
12512 }
12513 }
12514
12515 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12516 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12517 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12518 rc = BAD_VALUE;
12519 }
12520 }
12521
12522 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12523 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12524 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12525 flashFiringTime)) {
12526 rc = BAD_VALUE;
12527 }
12528 }
12529
12530 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12531 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12532 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12533 hotPixelMode)) {
12534 rc = BAD_VALUE;
12535 }
12536 }
12537
12538 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12539 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12540 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12541 lensAperture)) {
12542 rc = BAD_VALUE;
12543 }
12544 }
12545
12546 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12547 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12548 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12549 filterDensity)) {
12550 rc = BAD_VALUE;
12551 }
12552 }
12553
12554 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12555 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12556 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12557 focalLength)) {
12558 rc = BAD_VALUE;
12559 }
12560 }
12561
12562 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12563 uint8_t optStabMode =
12564 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12565 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12566 optStabMode)) {
12567 rc = BAD_VALUE;
12568 }
12569 }
12570
12571 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12572 uint8_t videoStabMode =
12573 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12574 LOGD("videoStabMode from APP = %d", videoStabMode);
12575 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12576 videoStabMode)) {
12577 rc = BAD_VALUE;
12578 }
12579 }
12580
12581
12582 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12583 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12584 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12585 noiseRedMode)) {
12586 rc = BAD_VALUE;
12587 }
12588 }
12589
12590 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12591 float reprocessEffectiveExposureFactor =
12592 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12593 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12594 reprocessEffectiveExposureFactor)) {
12595 rc = BAD_VALUE;
12596 }
12597 }
12598
12599 cam_crop_region_t scalerCropRegion;
12600 bool scalerCropSet = false;
12601 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12602 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12603 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12604 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12605 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12606
12607 // Map coordinate system from active array to sensor output.
12608 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12609 scalerCropRegion.width, scalerCropRegion.height);
12610
12611 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12612 scalerCropRegion)) {
12613 rc = BAD_VALUE;
12614 }
12615 scalerCropSet = true;
12616 }
12617
12618 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12619 int64_t sensorExpTime =
12620 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12621 LOGD("setting sensorExpTime %lld", sensorExpTime);
12622 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12623 sensorExpTime)) {
12624 rc = BAD_VALUE;
12625 }
12626 }
12627
12628 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12629 int64_t sensorFrameDuration =
12630 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012631 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12632 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12633 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12634 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12635 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12636 sensorFrameDuration)) {
12637 rc = BAD_VALUE;
12638 }
12639 }
12640
12641 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12642 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12643 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12644 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12645 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12646 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12647 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12648 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12649 sensorSensitivity)) {
12650 rc = BAD_VALUE;
12651 }
12652 }
12653
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012654#ifndef USE_HAL_3_3
12655 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12656 int32_t ispSensitivity =
12657 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12658 if (ispSensitivity <
12659 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12660 ispSensitivity =
12661 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12662 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12663 }
12664 if (ispSensitivity >
12665 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12666 ispSensitivity =
12667 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12668 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12669 }
12670 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12671 ispSensitivity)) {
12672 rc = BAD_VALUE;
12673 }
12674 }
12675#endif
12676
Thierry Strudel3d639192016-09-09 11:52:26 -070012677 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12678 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12679 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12680 rc = BAD_VALUE;
12681 }
12682 }
12683
12684 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12685 uint8_t fwk_facedetectMode =
12686 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12687
12688 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12689 fwk_facedetectMode);
12690
12691 if (NAME_NOT_FOUND != val) {
12692 uint8_t facedetectMode = (uint8_t)val;
12693 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12694 facedetectMode)) {
12695 rc = BAD_VALUE;
12696 }
12697 }
12698 }
12699
Thierry Strudel54dc9782017-02-15 12:12:10 -080012700 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012701 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012702 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012703 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12704 histogramMode)) {
12705 rc = BAD_VALUE;
12706 }
12707 }
12708
12709 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12710 uint8_t sharpnessMapMode =
12711 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12712 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12713 sharpnessMapMode)) {
12714 rc = BAD_VALUE;
12715 }
12716 }
12717
12718 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12719 uint8_t tonemapMode =
12720 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12721 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12722 rc = BAD_VALUE;
12723 }
12724 }
12725 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12726 /*All tonemap channels will have the same number of points*/
12727 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12728 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12729 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12730 cam_rgb_tonemap_curves tonemapCurves;
12731 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12732 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12733 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12734 tonemapCurves.tonemap_points_cnt,
12735 CAM_MAX_TONEMAP_CURVE_SIZE);
12736 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12737 }
12738
12739 /* ch0 = G*/
12740 size_t point = 0;
12741 cam_tonemap_curve_t tonemapCurveGreen;
12742 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12743 for (size_t j = 0; j < 2; j++) {
12744 tonemapCurveGreen.tonemap_points[i][j] =
12745 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12746 point++;
12747 }
12748 }
12749 tonemapCurves.curves[0] = tonemapCurveGreen;
12750
12751 /* ch 1 = B */
12752 point = 0;
12753 cam_tonemap_curve_t tonemapCurveBlue;
12754 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12755 for (size_t j = 0; j < 2; j++) {
12756 tonemapCurveBlue.tonemap_points[i][j] =
12757 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12758 point++;
12759 }
12760 }
12761 tonemapCurves.curves[1] = tonemapCurveBlue;
12762
12763 /* ch 2 = R */
12764 point = 0;
12765 cam_tonemap_curve_t tonemapCurveRed;
12766 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12767 for (size_t j = 0; j < 2; j++) {
12768 tonemapCurveRed.tonemap_points[i][j] =
12769 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12770 point++;
12771 }
12772 }
12773 tonemapCurves.curves[2] = tonemapCurveRed;
12774
12775 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12776 tonemapCurves)) {
12777 rc = BAD_VALUE;
12778 }
12779 }
12780
12781 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12782 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12783 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12784 captureIntent)) {
12785 rc = BAD_VALUE;
12786 }
12787 }
12788
12789 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12790 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12791 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12792 blackLevelLock)) {
12793 rc = BAD_VALUE;
12794 }
12795 }
12796
12797 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12798 uint8_t lensShadingMapMode =
12799 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12800 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12801 lensShadingMapMode)) {
12802 rc = BAD_VALUE;
12803 }
12804 }
12805
12806 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12807 cam_area_t roi;
12808 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012809 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012810
12811 // Map coordinate system from active array to sensor output.
12812 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12813 roi.rect.height);
12814
12815 if (scalerCropSet) {
12816 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12817 }
12818 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12819 rc = BAD_VALUE;
12820 }
12821 }
12822
12823 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12824 cam_area_t roi;
12825 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012826 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012827
12828 // Map coordinate system from active array to sensor output.
12829 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12830 roi.rect.height);
12831
12832 if (scalerCropSet) {
12833 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12834 }
12835 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12836 rc = BAD_VALUE;
12837 }
12838 }
12839
12840 // CDS for non-HFR non-video mode
12841 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12842 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12843 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12844 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12845 LOGE("Invalid CDS mode %d!", *fwk_cds);
12846 } else {
12847 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12848 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12849 rc = BAD_VALUE;
12850 }
12851 }
12852 }
12853
Thierry Strudel04e026f2016-10-10 11:27:36 -070012854 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012855 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012856 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012857 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12858 }
12859 if (m_bVideoHdrEnabled)
12860 vhdr = CAM_VIDEO_HDR_MODE_ON;
12861
Thierry Strudel54dc9782017-02-15 12:12:10 -080012862 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12863
12864 if(vhdr != curr_hdr_state)
12865 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12866
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012867 rc = setVideoHdrMode(mParameters, vhdr);
12868 if (rc != NO_ERROR) {
12869 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012870 }
12871
12872 //IR
12873 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12874 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12875 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012876 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12877 uint8_t isIRon = 0;
12878
12879 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012880 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12881 LOGE("Invalid IR mode %d!", fwk_ir);
12882 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012883 if(isIRon != curr_ir_state )
12884 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12885
Thierry Strudel04e026f2016-10-10 11:27:36 -070012886 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12887 CAM_INTF_META_IR_MODE, fwk_ir)) {
12888 rc = BAD_VALUE;
12889 }
12890 }
12891 }
12892
Thierry Strudel54dc9782017-02-15 12:12:10 -080012893 //Binning Correction Mode
12894 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12895 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12896 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12897 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12898 || (0 > fwk_binning_correction)) {
12899 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12900 } else {
12901 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12902 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12903 rc = BAD_VALUE;
12904 }
12905 }
12906 }
12907
Thierry Strudel269c81a2016-10-12 12:13:59 -070012908 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12909 float aec_speed;
12910 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12911 LOGD("AEC Speed :%f", aec_speed);
12912 if ( aec_speed < 0 ) {
12913 LOGE("Invalid AEC mode %f!", aec_speed);
12914 } else {
12915 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12916 aec_speed)) {
12917 rc = BAD_VALUE;
12918 }
12919 }
12920 }
12921
12922 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12923 float awb_speed;
12924 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12925 LOGD("AWB Speed :%f", awb_speed);
12926 if ( awb_speed < 0 ) {
12927 LOGE("Invalid AWB mode %f!", awb_speed);
12928 } else {
12929 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12930 awb_speed)) {
12931 rc = BAD_VALUE;
12932 }
12933 }
12934 }
12935
Thierry Strudel3d639192016-09-09 11:52:26 -070012936 // TNR
12937 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12938 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12939 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012940 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012941 cam_denoise_param_t tnr;
12942 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12943 tnr.process_plates =
12944 (cam_denoise_process_type_t)frame_settings.find(
12945 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12946 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012947
12948 if(b_TnrRequested != curr_tnr_state)
12949 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12950
Thierry Strudel3d639192016-09-09 11:52:26 -070012951 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12952 rc = BAD_VALUE;
12953 }
12954 }
12955
Thierry Strudel54dc9782017-02-15 12:12:10 -080012956 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012957 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012958 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012959 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12960 *exposure_metering_mode)) {
12961 rc = BAD_VALUE;
12962 }
12963 }
12964
Thierry Strudel3d639192016-09-09 11:52:26 -070012965 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12966 int32_t fwk_testPatternMode =
12967 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12968 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12969 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12970
12971 if (NAME_NOT_FOUND != testPatternMode) {
12972 cam_test_pattern_data_t testPatternData;
12973 memset(&testPatternData, 0, sizeof(testPatternData));
12974 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12975 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12976 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12977 int32_t *fwk_testPatternData =
12978 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12979 testPatternData.r = fwk_testPatternData[0];
12980 testPatternData.b = fwk_testPatternData[3];
12981 switch (gCamCapability[mCameraId]->color_arrangement) {
12982 case CAM_FILTER_ARRANGEMENT_RGGB:
12983 case CAM_FILTER_ARRANGEMENT_GRBG:
12984 testPatternData.gr = fwk_testPatternData[1];
12985 testPatternData.gb = fwk_testPatternData[2];
12986 break;
12987 case CAM_FILTER_ARRANGEMENT_GBRG:
12988 case CAM_FILTER_ARRANGEMENT_BGGR:
12989 testPatternData.gr = fwk_testPatternData[2];
12990 testPatternData.gb = fwk_testPatternData[1];
12991 break;
12992 default:
12993 LOGE("color arrangement %d is not supported",
12994 gCamCapability[mCameraId]->color_arrangement);
12995 break;
12996 }
12997 }
12998 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12999 testPatternData)) {
13000 rc = BAD_VALUE;
13001 }
13002 } else {
13003 LOGE("Invalid framework sensor test pattern mode %d",
13004 fwk_testPatternMode);
13005 }
13006 }
13007
13008 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
13009 size_t count = 0;
13010 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
13011 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
13012 gps_coords.data.d, gps_coords.count, count);
13013 if (gps_coords.count != count) {
13014 rc = BAD_VALUE;
13015 }
13016 }
13017
13018 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
13019 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
13020 size_t count = 0;
13021 const char *gps_methods_src = (const char *)
13022 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
13023 memset(gps_methods, '\0', sizeof(gps_methods));
13024 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
13025 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
13026 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
13027 if (GPS_PROCESSING_METHOD_SIZE != count) {
13028 rc = BAD_VALUE;
13029 }
13030 }
13031
13032 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
13033 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
13034 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
13035 gps_timestamp)) {
13036 rc = BAD_VALUE;
13037 }
13038 }
13039
13040 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
13041 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
13042 cam_rotation_info_t rotation_info;
13043 if (orientation == 0) {
13044 rotation_info.rotation = ROTATE_0;
13045 } else if (orientation == 90) {
13046 rotation_info.rotation = ROTATE_90;
13047 } else if (orientation == 180) {
13048 rotation_info.rotation = ROTATE_180;
13049 } else if (orientation == 270) {
13050 rotation_info.rotation = ROTATE_270;
13051 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070013052 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070013053 rotation_info.streamId = snapshotStreamId;
13054 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
13055 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13056 rc = BAD_VALUE;
13057 }
13058 }
13059
13060 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13061 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13062 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13063 rc = BAD_VALUE;
13064 }
13065 }
13066
13067 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13068 uint32_t thumb_quality = (uint32_t)
13069 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13070 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13071 thumb_quality)) {
13072 rc = BAD_VALUE;
13073 }
13074 }
13075
13076 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13077 cam_dimension_t dim;
13078 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13079 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13080 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13081 rc = BAD_VALUE;
13082 }
13083 }
13084
13085 // Internal metadata
13086 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13087 size_t count = 0;
13088 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13089 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13090 privatedata.data.i32, privatedata.count, count);
13091 if (privatedata.count != count) {
13092 rc = BAD_VALUE;
13093 }
13094 }
13095
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013096 // ISO/Exposure Priority
13097 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13098 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13099 cam_priority_mode_t mode =
13100 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13101 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13102 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13103 use_iso_exp_pty.previewOnly = FALSE;
13104 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13105 use_iso_exp_pty.value = *ptr;
13106
13107 if(CAM_ISO_PRIORITY == mode) {
13108 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13109 use_iso_exp_pty)) {
13110 rc = BAD_VALUE;
13111 }
13112 }
13113 else {
13114 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13115 use_iso_exp_pty)) {
13116 rc = BAD_VALUE;
13117 }
13118 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013119
13120 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13121 rc = BAD_VALUE;
13122 }
13123 }
13124 } else {
13125 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13126 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013127 }
13128 }
13129
13130 // Saturation
13131 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13132 int32_t* use_saturation =
13133 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13134 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13135 rc = BAD_VALUE;
13136 }
13137 }
13138
Thierry Strudel3d639192016-09-09 11:52:26 -070013139 // EV step
13140 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13141 gCamCapability[mCameraId]->exp_compensation_step)) {
13142 rc = BAD_VALUE;
13143 }
13144
13145 // CDS info
13146 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13147 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13148 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13149
13150 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13151 CAM_INTF_META_CDS_DATA, *cdsData)) {
13152 rc = BAD_VALUE;
13153 }
13154 }
13155
Shuzhen Wang19463d72016-03-08 11:09:52 -080013156 // Hybrid AE
13157 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13158 uint8_t *hybrid_ae = (uint8_t *)
13159 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
Shuzhen Wang77b049a2017-08-30 12:24:36 -070013160 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13161 rc = BAD_VALUE;
13162 }
Shuzhen Wang19463d72016-03-08 11:09:52 -080013163 }
13164
Shuzhen Wang14415f52016-11-16 18:26:18 -080013165 // Histogram
13166 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13167 uint8_t histogramMode =
13168 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13169 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13170 histogramMode)) {
13171 rc = BAD_VALUE;
13172 }
13173 }
13174
13175 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13176 int32_t histogramBins =
13177 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13178 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13179 histogramBins)) {
13180 rc = BAD_VALUE;
13181 }
13182 }
13183
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013184 // Tracking AF
13185 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13186 uint8_t trackingAfTrigger =
13187 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13188 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13189 trackingAfTrigger)) {
13190 rc = BAD_VALUE;
13191 }
13192 }
13193
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013194 // Makernote
13195 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13196 if (entry.count != 0) {
13197 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13198 cam_makernote_t makernote;
13199 makernote.length = entry.count;
13200 memcpy(makernote.data, entry.data.u8, makernote.length);
13201 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13202 rc = BAD_VALUE;
13203 }
13204 } else {
13205 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13206 MAX_MAKERNOTE_LENGTH);
13207 rc = BAD_VALUE;
13208 }
13209 }
13210
Thierry Strudel3d639192016-09-09 11:52:26 -070013211 return rc;
13212}
13213
13214/*===========================================================================
13215 * FUNCTION : captureResultCb
13216 *
13217 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13218 *
13219 * PARAMETERS :
13220 * @frame : frame information from mm-camera-interface
13221 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13222 * @userdata: userdata
13223 *
13224 * RETURN : NONE
13225 *==========================================================================*/
13226void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13227 camera3_stream_buffer_t *buffer,
13228 uint32_t frame_number, bool isInputBuffer, void *userdata)
13229{
13230 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13231 if (hw == NULL) {
13232 LOGE("Invalid hw %p", hw);
13233 return;
13234 }
13235
13236 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13237 return;
13238}
13239
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013240/*===========================================================================
13241 * FUNCTION : setBufferErrorStatus
13242 *
13243 * DESCRIPTION: Callback handler for channels to report any buffer errors
13244 *
13245 * PARAMETERS :
13246 * @ch : Channel on which buffer error is reported from
13247 * @frame_number : frame number on which buffer error is reported on
13248 * @buffer_status : buffer error status
13249 * @userdata: userdata
13250 *
13251 * RETURN : NONE
13252 *==========================================================================*/
13253void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13254 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13255{
13256 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13257 if (hw == NULL) {
13258 LOGE("Invalid hw %p", hw);
13259 return;
13260 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013261
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013262 hw->setBufferErrorStatus(ch, frame_number, err);
13263 return;
13264}
13265
13266void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13267 uint32_t frameNumber, camera3_buffer_status_t err)
13268{
13269 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13270 pthread_mutex_lock(&mMutex);
13271
13272 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13273 if (req.frame_number != frameNumber)
13274 continue;
13275 for (auto& k : req.mPendingBufferList) {
13276 if(k.stream->priv == ch) {
13277 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13278 }
13279 }
13280 }
13281
13282 pthread_mutex_unlock(&mMutex);
13283 return;
13284}
Thierry Strudel3d639192016-09-09 11:52:26 -070013285/*===========================================================================
13286 * FUNCTION : initialize
13287 *
13288 * DESCRIPTION: Pass framework callback pointers to HAL
13289 *
13290 * PARAMETERS :
13291 *
13292 *
13293 * RETURN : Success : 0
13294 * Failure: -ENODEV
13295 *==========================================================================*/
13296
13297int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13298 const camera3_callback_ops_t *callback_ops)
13299{
13300 LOGD("E");
13301 QCamera3HardwareInterface *hw =
13302 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13303 if (!hw) {
13304 LOGE("NULL camera device");
13305 return -ENODEV;
13306 }
13307
13308 int rc = hw->initialize(callback_ops);
13309 LOGD("X");
13310 return rc;
13311}
13312
13313/*===========================================================================
13314 * FUNCTION : configure_streams
13315 *
13316 * DESCRIPTION:
13317 *
13318 * PARAMETERS :
13319 *
13320 *
13321 * RETURN : Success: 0
13322 * Failure: -EINVAL (if stream configuration is invalid)
13323 * -ENODEV (fatal error)
13324 *==========================================================================*/
13325
13326int QCamera3HardwareInterface::configure_streams(
13327 const struct camera3_device *device,
13328 camera3_stream_configuration_t *stream_list)
13329{
13330 LOGD("E");
13331 QCamera3HardwareInterface *hw =
13332 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13333 if (!hw) {
13334 LOGE("NULL camera device");
13335 return -ENODEV;
13336 }
13337 int rc = hw->configureStreams(stream_list);
13338 LOGD("X");
13339 return rc;
13340}
13341
13342/*===========================================================================
13343 * FUNCTION : construct_default_request_settings
13344 *
13345 * DESCRIPTION: Configure a settings buffer to meet the required use case
13346 *
13347 * PARAMETERS :
13348 *
13349 *
13350 * RETURN : Success: Return valid metadata
13351 * Failure: Return NULL
13352 *==========================================================================*/
13353const camera_metadata_t* QCamera3HardwareInterface::
13354 construct_default_request_settings(const struct camera3_device *device,
13355 int type)
13356{
13357
13358 LOGD("E");
13359 camera_metadata_t* fwk_metadata = NULL;
13360 QCamera3HardwareInterface *hw =
13361 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13362 if (!hw) {
13363 LOGE("NULL camera device");
13364 return NULL;
13365 }
13366
13367 fwk_metadata = hw->translateCapabilityToMetadata(type);
13368
13369 LOGD("X");
13370 return fwk_metadata;
13371}
13372
13373/*===========================================================================
13374 * FUNCTION : process_capture_request
13375 *
13376 * DESCRIPTION:
13377 *
13378 * PARAMETERS :
13379 *
13380 *
13381 * RETURN :
13382 *==========================================================================*/
13383int QCamera3HardwareInterface::process_capture_request(
13384 const struct camera3_device *device,
13385 camera3_capture_request_t *request)
13386{
13387 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013388 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013389 QCamera3HardwareInterface *hw =
13390 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13391 if (!hw) {
13392 LOGE("NULL camera device");
13393 return -EINVAL;
13394 }
13395
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013396 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013397 LOGD("X");
13398 return rc;
13399}
13400
13401/*===========================================================================
13402 * FUNCTION : dump
13403 *
13404 * DESCRIPTION:
13405 *
13406 * PARAMETERS :
13407 *
13408 *
13409 * RETURN :
13410 *==========================================================================*/
13411
13412void QCamera3HardwareInterface::dump(
13413 const struct camera3_device *device, int fd)
13414{
13415 /* Log level property is read when "adb shell dumpsys media.camera" is
13416 called so that the log level can be controlled without restarting
13417 the media server */
13418 getLogLevel();
13419
13420 LOGD("E");
13421 QCamera3HardwareInterface *hw =
13422 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13423 if (!hw) {
13424 LOGE("NULL camera device");
13425 return;
13426 }
13427
13428 hw->dump(fd);
13429 LOGD("X");
13430 return;
13431}
13432
13433/*===========================================================================
13434 * FUNCTION : flush
13435 *
13436 * DESCRIPTION:
13437 *
13438 * PARAMETERS :
13439 *
13440 *
13441 * RETURN :
13442 *==========================================================================*/
13443
13444int QCamera3HardwareInterface::flush(
13445 const struct camera3_device *device)
13446{
13447 int rc;
13448 LOGD("E");
13449 QCamera3HardwareInterface *hw =
13450 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13451 if (!hw) {
13452 LOGE("NULL camera device");
13453 return -EINVAL;
13454 }
13455
13456 pthread_mutex_lock(&hw->mMutex);
13457 // Validate current state
13458 switch (hw->mState) {
13459 case STARTED:
13460 /* valid state */
13461 break;
13462
13463 case ERROR:
13464 pthread_mutex_unlock(&hw->mMutex);
13465 hw->handleCameraDeviceError();
13466 return -ENODEV;
13467
13468 default:
13469 LOGI("Flush returned during state %d", hw->mState);
13470 pthread_mutex_unlock(&hw->mMutex);
13471 return 0;
13472 }
13473 pthread_mutex_unlock(&hw->mMutex);
13474
13475 rc = hw->flush(true /* restart channels */ );
13476 LOGD("X");
13477 return rc;
13478}
13479
13480/*===========================================================================
13481 * FUNCTION : close_camera_device
13482 *
13483 * DESCRIPTION:
13484 *
13485 * PARAMETERS :
13486 *
13487 *
13488 * RETURN :
13489 *==========================================================================*/
13490int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13491{
13492 int ret = NO_ERROR;
13493 QCamera3HardwareInterface *hw =
13494 reinterpret_cast<QCamera3HardwareInterface *>(
13495 reinterpret_cast<camera3_device_t *>(device)->priv);
13496 if (!hw) {
13497 LOGE("NULL camera device");
13498 return BAD_VALUE;
13499 }
13500
13501 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13502 delete hw;
13503 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013504 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013505 return ret;
13506}
13507
13508/*===========================================================================
13509 * FUNCTION : getWaveletDenoiseProcessPlate
13510 *
13511 * DESCRIPTION: query wavelet denoise process plate
13512 *
13513 * PARAMETERS : None
13514 *
13515 * RETURN : WNR prcocess plate value
13516 *==========================================================================*/
13517cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13518{
13519 char prop[PROPERTY_VALUE_MAX];
13520 memset(prop, 0, sizeof(prop));
13521 property_get("persist.denoise.process.plates", prop, "0");
13522 int processPlate = atoi(prop);
13523 switch(processPlate) {
13524 case 0:
13525 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13526 case 1:
13527 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13528 case 2:
13529 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13530 case 3:
13531 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13532 default:
13533 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13534 }
13535}
13536
13537
13538/*===========================================================================
13539 * FUNCTION : getTemporalDenoiseProcessPlate
13540 *
13541 * DESCRIPTION: query temporal denoise process plate
13542 *
13543 * PARAMETERS : None
13544 *
13545 * RETURN : TNR prcocess plate value
13546 *==========================================================================*/
13547cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13548{
13549 char prop[PROPERTY_VALUE_MAX];
13550 memset(prop, 0, sizeof(prop));
13551 property_get("persist.tnr.process.plates", prop, "0");
13552 int processPlate = atoi(prop);
13553 switch(processPlate) {
13554 case 0:
13555 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13556 case 1:
13557 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13558 case 2:
13559 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13560 case 3:
13561 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13562 default:
13563 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13564 }
13565}
13566
13567
13568/*===========================================================================
13569 * FUNCTION : extractSceneMode
13570 *
13571 * DESCRIPTION: Extract scene mode from frameworks set metadata
13572 *
13573 * PARAMETERS :
13574 * @frame_settings: CameraMetadata reference
13575 * @metaMode: ANDROID_CONTORL_MODE
13576 * @hal_metadata: hal metadata structure
13577 *
13578 * RETURN : None
13579 *==========================================================================*/
13580int32_t QCamera3HardwareInterface::extractSceneMode(
13581 const CameraMetadata &frame_settings, uint8_t metaMode,
13582 metadata_buffer_t *hal_metadata)
13583{
13584 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013585 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13586
13587 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13588 LOGD("Ignoring control mode OFF_KEEP_STATE");
13589 return NO_ERROR;
13590 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013591
13592 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13593 camera_metadata_ro_entry entry =
13594 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13595 if (0 == entry.count)
13596 return rc;
13597
13598 uint8_t fwk_sceneMode = entry.data.u8[0];
13599
13600 int val = lookupHalName(SCENE_MODES_MAP,
13601 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13602 fwk_sceneMode);
13603 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013604 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013605 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013606 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013607 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013608
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013609 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13610 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13611 }
13612
13613 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13614 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013615 cam_hdr_param_t hdr_params;
13616 hdr_params.hdr_enable = 1;
13617 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13618 hdr_params.hdr_need_1x = false;
13619 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13620 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13621 rc = BAD_VALUE;
13622 }
13623 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013624
Thierry Strudel3d639192016-09-09 11:52:26 -070013625 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13626 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13627 rc = BAD_VALUE;
13628 }
13629 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013630
13631 if (mForceHdrSnapshot) {
13632 cam_hdr_param_t hdr_params;
13633 hdr_params.hdr_enable = 1;
13634 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13635 hdr_params.hdr_need_1x = false;
13636 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13637 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13638 rc = BAD_VALUE;
13639 }
13640 }
13641
Thierry Strudel3d639192016-09-09 11:52:26 -070013642 return rc;
13643}
13644
13645/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013646 * FUNCTION : setVideoHdrMode
13647 *
13648 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13649 *
13650 * PARAMETERS :
13651 * @hal_metadata: hal metadata structure
13652 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13653 *
13654 * RETURN : None
13655 *==========================================================================*/
13656int32_t QCamera3HardwareInterface::setVideoHdrMode(
13657 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13658{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013659 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13660 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13661 }
13662
13663 LOGE("Invalid Video HDR mode %d!", vhdr);
13664 return BAD_VALUE;
13665}
13666
13667/*===========================================================================
13668 * FUNCTION : setSensorHDR
13669 *
13670 * DESCRIPTION: Enable/disable sensor HDR.
13671 *
13672 * PARAMETERS :
13673 * @hal_metadata: hal metadata structure
13674 * @enable: boolean whether to enable/disable sensor HDR
13675 *
13676 * RETURN : None
13677 *==========================================================================*/
13678int32_t QCamera3HardwareInterface::setSensorHDR(
13679 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13680{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013681 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013682 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13683
13684 if (enable) {
13685 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13686 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13687 #ifdef _LE_CAMERA_
13688 //Default to staggered HDR for IOT
13689 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13690 #else
13691 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13692 #endif
13693 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13694 }
13695
13696 bool isSupported = false;
13697 switch (sensor_hdr) {
13698 case CAM_SENSOR_HDR_IN_SENSOR:
13699 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13700 CAM_QCOM_FEATURE_SENSOR_HDR) {
13701 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013702 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013703 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013704 break;
13705 case CAM_SENSOR_HDR_ZIGZAG:
13706 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13707 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13708 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013709 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013710 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013711 break;
13712 case CAM_SENSOR_HDR_STAGGERED:
13713 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13714 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13715 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013716 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013717 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013718 break;
13719 case CAM_SENSOR_HDR_OFF:
13720 isSupported = true;
13721 LOGD("Turning off sensor HDR");
13722 break;
13723 default:
13724 LOGE("HDR mode %d not supported", sensor_hdr);
13725 rc = BAD_VALUE;
13726 break;
13727 }
13728
13729 if(isSupported) {
13730 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13731 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13732 rc = BAD_VALUE;
13733 } else {
13734 if(!isVideoHdrEnable)
13735 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013736 }
13737 }
13738 return rc;
13739}
13740
13741/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013742 * FUNCTION : needRotationReprocess
13743 *
13744 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13745 *
13746 * PARAMETERS : none
13747 *
13748 * RETURN : true: needed
13749 * false: no need
13750 *==========================================================================*/
13751bool QCamera3HardwareInterface::needRotationReprocess()
13752{
13753 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13754 // current rotation is not zero, and pp has the capability to process rotation
13755 LOGH("need do reprocess for rotation");
13756 return true;
13757 }
13758
13759 return false;
13760}
13761
13762/*===========================================================================
13763 * FUNCTION : needReprocess
13764 *
13765 * DESCRIPTION: if reprocess in needed
13766 *
13767 * PARAMETERS : none
13768 *
13769 * RETURN : true: needed
13770 * false: no need
13771 *==========================================================================*/
13772bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13773{
13774 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13775 // TODO: add for ZSL HDR later
13776 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13777 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13778 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13779 return true;
13780 } else {
13781 LOGH("already post processed frame");
13782 return false;
13783 }
13784 }
13785 return needRotationReprocess();
13786}
13787
13788/*===========================================================================
13789 * FUNCTION : needJpegExifRotation
13790 *
13791 * DESCRIPTION: if rotation from jpeg is needed
13792 *
13793 * PARAMETERS : none
13794 *
13795 * RETURN : true: needed
13796 * false: no need
13797 *==========================================================================*/
13798bool QCamera3HardwareInterface::needJpegExifRotation()
13799{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013800 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013801 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13802 LOGD("Need use Jpeg EXIF Rotation");
13803 return true;
13804 }
13805 return false;
13806}
13807
13808/*===========================================================================
13809 * FUNCTION : addOfflineReprocChannel
13810 *
13811 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13812 * coming from input channel
13813 *
13814 * PARAMETERS :
13815 * @config : reprocess configuration
13816 * @inputChHandle : pointer to the input (source) channel
13817 *
13818 *
13819 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13820 *==========================================================================*/
13821QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13822 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13823{
13824 int32_t rc = NO_ERROR;
13825 QCamera3ReprocessChannel *pChannel = NULL;
13826
13827 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013828 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13829 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013830 if (NULL == pChannel) {
13831 LOGE("no mem for reprocess channel");
13832 return NULL;
13833 }
13834
13835 rc = pChannel->initialize(IS_TYPE_NONE);
13836 if (rc != NO_ERROR) {
13837 LOGE("init reprocess channel failed, ret = %d", rc);
13838 delete pChannel;
13839 return NULL;
13840 }
13841
13842 // pp feature config
13843 cam_pp_feature_config_t pp_config;
13844 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13845
13846 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13847 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13848 & CAM_QCOM_FEATURE_DSDN) {
13849 //Use CPP CDS incase h/w supports it.
13850 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13851 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13852 }
13853 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13854 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13855 }
13856
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013857 if (config.hdr_param.hdr_enable) {
13858 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13859 pp_config.hdr_param = config.hdr_param;
13860 }
13861
13862 if (mForceHdrSnapshot) {
13863 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13864 pp_config.hdr_param.hdr_enable = 1;
13865 pp_config.hdr_param.hdr_need_1x = 0;
13866 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13867 }
13868
Thierry Strudel3d639192016-09-09 11:52:26 -070013869 rc = pChannel->addReprocStreamsFromSource(pp_config,
13870 config,
13871 IS_TYPE_NONE,
13872 mMetadataChannel);
13873
13874 if (rc != NO_ERROR) {
13875 delete pChannel;
13876 return NULL;
13877 }
13878 return pChannel;
13879}
13880
13881/*===========================================================================
13882 * FUNCTION : getMobicatMask
13883 *
13884 * DESCRIPTION: returns mobicat mask
13885 *
13886 * PARAMETERS : none
13887 *
13888 * RETURN : mobicat mask
13889 *
13890 *==========================================================================*/
13891uint8_t QCamera3HardwareInterface::getMobicatMask()
13892{
13893 return m_MobicatMask;
13894}
13895
13896/*===========================================================================
13897 * FUNCTION : setMobicat
13898 *
13899 * DESCRIPTION: set Mobicat on/off.
13900 *
13901 * PARAMETERS :
13902 * @params : none
13903 *
13904 * RETURN : int32_t type of status
13905 * NO_ERROR -- success
13906 * none-zero failure code
13907 *==========================================================================*/
13908int32_t QCamera3HardwareInterface::setMobicat()
13909{
Thierry Strudel3d639192016-09-09 11:52:26 -070013910 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013911
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013912 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013913 tune_cmd_t tune_cmd;
13914 tune_cmd.type = SET_RELOAD_CHROMATIX;
13915 tune_cmd.module = MODULE_ALL;
13916 tune_cmd.value = TRUE;
13917 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13918 CAM_INTF_PARM_SET_VFE_COMMAND,
13919 tune_cmd);
13920
13921 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13922 CAM_INTF_PARM_SET_PP_COMMAND,
13923 tune_cmd);
13924 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013925
13926 return ret;
13927}
13928
13929/*===========================================================================
13930* FUNCTION : getLogLevel
13931*
13932* DESCRIPTION: Reads the log level property into a variable
13933*
13934* PARAMETERS :
13935* None
13936*
13937* RETURN :
13938* None
13939*==========================================================================*/
13940void QCamera3HardwareInterface::getLogLevel()
13941{
13942 char prop[PROPERTY_VALUE_MAX];
13943 uint32_t globalLogLevel = 0;
13944
13945 property_get("persist.camera.hal.debug", prop, "0");
13946 int val = atoi(prop);
13947 if (0 <= val) {
13948 gCamHal3LogLevel = (uint32_t)val;
13949 }
13950
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013951 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013952 gKpiDebugLevel = atoi(prop);
13953
13954 property_get("persist.camera.global.debug", prop, "0");
13955 val = atoi(prop);
13956 if (0 <= val) {
13957 globalLogLevel = (uint32_t)val;
13958 }
13959
13960 /* Highest log level among hal.logs and global.logs is selected */
13961 if (gCamHal3LogLevel < globalLogLevel)
13962 gCamHal3LogLevel = globalLogLevel;
13963
13964 return;
13965}
13966
13967/*===========================================================================
13968 * FUNCTION : validateStreamRotations
13969 *
13970 * DESCRIPTION: Check if the rotations requested are supported
13971 *
13972 * PARAMETERS :
13973 * @stream_list : streams to be configured
13974 *
13975 * RETURN : NO_ERROR on success
13976 * -EINVAL on failure
13977 *
13978 *==========================================================================*/
13979int QCamera3HardwareInterface::validateStreamRotations(
13980 camera3_stream_configuration_t *streamList)
13981{
13982 int rc = NO_ERROR;
13983
13984 /*
13985 * Loop through all streams requested in configuration
13986 * Check if unsupported rotations have been requested on any of them
13987 */
13988 for (size_t j = 0; j < streamList->num_streams; j++){
13989 camera3_stream_t *newStream = streamList->streams[j];
13990
Emilian Peev35ceeed2017-06-29 11:58:56 -070013991 switch(newStream->rotation) {
13992 case CAMERA3_STREAM_ROTATION_0:
13993 case CAMERA3_STREAM_ROTATION_90:
13994 case CAMERA3_STREAM_ROTATION_180:
13995 case CAMERA3_STREAM_ROTATION_270:
13996 //Expected values
13997 break;
13998 default:
13999 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
14000 "type:%d and stream format:%d", __func__,
14001 newStream->rotation, newStream->stream_type,
14002 newStream->format);
14003 return -EINVAL;
14004 }
14005
Thierry Strudel3d639192016-09-09 11:52:26 -070014006 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
14007 bool isImplDef = (newStream->format ==
14008 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
14009 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
14010 isImplDef);
14011
14012 if (isRotated && (!isImplDef || isZsl)) {
14013 LOGE("Error: Unsupported rotation of %d requested for stream"
14014 "type:%d and stream format:%d",
14015 newStream->rotation, newStream->stream_type,
14016 newStream->format);
14017 rc = -EINVAL;
14018 break;
14019 }
14020 }
14021
14022 return rc;
14023}
14024
14025/*===========================================================================
14026* FUNCTION : getFlashInfo
14027*
14028* DESCRIPTION: Retrieve information about whether the device has a flash.
14029*
14030* PARAMETERS :
14031* @cameraId : Camera id to query
14032* @hasFlash : Boolean indicating whether there is a flash device
14033* associated with given camera
14034* @flashNode : If a flash device exists, this will be its device node.
14035*
14036* RETURN :
14037* None
14038*==========================================================================*/
14039void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
14040 bool& hasFlash,
14041 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
14042{
14043 cam_capability_t* camCapability = gCamCapability[cameraId];
14044 if (NULL == camCapability) {
14045 hasFlash = false;
14046 flashNode[0] = '\0';
14047 } else {
14048 hasFlash = camCapability->flash_available;
14049 strlcpy(flashNode,
14050 (char*)camCapability->flash_dev_name,
14051 QCAMERA_MAX_FILEPATH_LENGTH);
14052 }
14053}
14054
14055/*===========================================================================
14056* FUNCTION : getEepromVersionInfo
14057*
14058* DESCRIPTION: Retrieve version info of the sensor EEPROM data
14059*
14060* PARAMETERS : None
14061*
14062* RETURN : string describing EEPROM version
14063* "\0" if no such info available
14064*==========================================================================*/
14065const char *QCamera3HardwareInterface::getEepromVersionInfo()
14066{
14067 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14068}
14069
14070/*===========================================================================
14071* FUNCTION : getLdafCalib
14072*
14073* DESCRIPTION: Retrieve Laser AF calibration data
14074*
14075* PARAMETERS : None
14076*
14077* RETURN : Two uint32_t describing laser AF calibration data
14078* NULL if none is available.
14079*==========================================================================*/
14080const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14081{
14082 if (mLdafCalibExist) {
14083 return &mLdafCalib[0];
14084 } else {
14085 return NULL;
14086 }
14087}
14088
14089/*===========================================================================
Arnd Geis082a4d72017-08-24 10:33:07 -070014090* FUNCTION : getEaselFwVersion
14091*
14092* DESCRIPTION: Retrieve Easel firmware version
14093*
14094* PARAMETERS : None
14095*
14096* RETURN : string describing Firmware version
14097* "\0" if Easel manager client is not open
14098*==========================================================================*/
14099const char *QCamera3HardwareInterface::getEaselFwVersion()
14100{
14101 int rc = NO_ERROR;
14102
14103 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
14104 ALOGD("%s: Querying Easel firmware version", __FUNCTION__);
14105 if (EaselManagerClientOpened) {
14106 rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
14107 if (rc != OK)
14108 ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
14109 else
14110 return (const char *)&mEaselFwVersion[0];
14111 }
14112 return NULL;
14113}
14114
14115/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014116 * FUNCTION : dynamicUpdateMetaStreamInfo
14117 *
14118 * DESCRIPTION: This function:
14119 * (1) stops all the channels
14120 * (2) returns error on pending requests and buffers
14121 * (3) sends metastream_info in setparams
14122 * (4) starts all channels
14123 * This is useful when sensor has to be restarted to apply any
14124 * settings such as frame rate from a different sensor mode
14125 *
14126 * PARAMETERS : None
14127 *
14128 * RETURN : NO_ERROR on success
14129 * Error codes on failure
14130 *
14131 *==========================================================================*/
14132int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14133{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014134 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014135 int rc = NO_ERROR;
14136
14137 LOGD("E");
14138
14139 rc = stopAllChannels();
14140 if (rc < 0) {
14141 LOGE("stopAllChannels failed");
14142 return rc;
14143 }
14144
14145 rc = notifyErrorForPendingRequests();
14146 if (rc < 0) {
14147 LOGE("notifyErrorForPendingRequests failed");
14148 return rc;
14149 }
14150
14151 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14152 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14153 "Format:%d",
14154 mStreamConfigInfo.type[i],
14155 mStreamConfigInfo.stream_sizes[i].width,
14156 mStreamConfigInfo.stream_sizes[i].height,
14157 mStreamConfigInfo.postprocess_mask[i],
14158 mStreamConfigInfo.format[i]);
14159 }
14160
14161 /* Send meta stream info once again so that ISP can start */
14162 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14163 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14164 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14165 mParameters);
14166 if (rc < 0) {
14167 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14168 }
14169
14170 rc = startAllChannels();
14171 if (rc < 0) {
14172 LOGE("startAllChannels failed");
14173 return rc;
14174 }
14175
14176 LOGD("X");
14177 return rc;
14178}
14179
14180/*===========================================================================
14181 * FUNCTION : stopAllChannels
14182 *
14183 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14184 *
14185 * PARAMETERS : None
14186 *
14187 * RETURN : NO_ERROR on success
14188 * Error codes on failure
14189 *
14190 *==========================================================================*/
14191int32_t QCamera3HardwareInterface::stopAllChannels()
14192{
14193 int32_t rc = NO_ERROR;
14194
14195 LOGD("Stopping all channels");
14196 // Stop the Streams/Channels
14197 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14198 it != mStreamInfo.end(); it++) {
14199 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14200 if (channel) {
14201 channel->stop();
14202 }
14203 (*it)->status = INVALID;
14204 }
14205
14206 if (mSupportChannel) {
14207 mSupportChannel->stop();
14208 }
14209 if (mAnalysisChannel) {
14210 mAnalysisChannel->stop();
14211 }
14212 if (mRawDumpChannel) {
14213 mRawDumpChannel->stop();
14214 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014215 if (mHdrPlusRawSrcChannel) {
14216 mHdrPlusRawSrcChannel->stop();
14217 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014218 if (mMetadataChannel) {
14219 /* If content of mStreamInfo is not 0, there is metadata stream */
14220 mMetadataChannel->stop();
14221 }
14222
14223 LOGD("All channels stopped");
14224 return rc;
14225}
14226
14227/*===========================================================================
14228 * FUNCTION : startAllChannels
14229 *
14230 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14231 *
14232 * PARAMETERS : None
14233 *
14234 * RETURN : NO_ERROR on success
14235 * Error codes on failure
14236 *
14237 *==========================================================================*/
14238int32_t QCamera3HardwareInterface::startAllChannels()
14239{
14240 int32_t rc = NO_ERROR;
14241
14242 LOGD("Start all channels ");
14243 // Start the Streams/Channels
14244 if (mMetadataChannel) {
14245 /* If content of mStreamInfo is not 0, there is metadata stream */
14246 rc = mMetadataChannel->start();
14247 if (rc < 0) {
14248 LOGE("META channel start failed");
14249 return rc;
14250 }
14251 }
14252 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14253 it != mStreamInfo.end(); it++) {
14254 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14255 if (channel) {
14256 rc = channel->start();
14257 if (rc < 0) {
14258 LOGE("channel start failed");
14259 return rc;
14260 }
14261 }
14262 }
14263 if (mAnalysisChannel) {
14264 mAnalysisChannel->start();
14265 }
14266 if (mSupportChannel) {
14267 rc = mSupportChannel->start();
14268 if (rc < 0) {
14269 LOGE("Support channel start failed");
14270 return rc;
14271 }
14272 }
14273 if (mRawDumpChannel) {
14274 rc = mRawDumpChannel->start();
14275 if (rc < 0) {
14276 LOGE("RAW dump channel start failed");
14277 return rc;
14278 }
14279 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014280 if (mHdrPlusRawSrcChannel) {
14281 rc = mHdrPlusRawSrcChannel->start();
14282 if (rc < 0) {
14283 LOGE("HDR+ RAW channel start failed");
14284 return rc;
14285 }
14286 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014287
14288 LOGD("All channels started");
14289 return rc;
14290}
14291
14292/*===========================================================================
14293 * FUNCTION : notifyErrorForPendingRequests
14294 *
14295 * DESCRIPTION: This function sends error for all the pending requests/buffers
14296 *
14297 * PARAMETERS : None
14298 *
14299 * RETURN : Error codes
14300 * NO_ERROR on success
14301 *
14302 *==========================================================================*/
14303int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14304{
Emilian Peev7650c122017-01-19 08:24:33 -080014305 notifyErrorFoPendingDepthData(mDepthChannel);
14306
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014307 auto pendingRequest = mPendingRequestsList.begin();
14308 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014309
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014310 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14311 // buffers (for which buffers aren't sent yet).
14312 while (pendingRequest != mPendingRequestsList.end() ||
14313 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14314 if (pendingRequest == mPendingRequestsList.end() ||
14315 pendingBuffer->frame_number < pendingRequest->frame_number) {
14316 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14317 // with error.
14318 for (auto &info : pendingBuffer->mPendingBufferList) {
14319 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014320 camera3_notify_msg_t notify_msg;
14321 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14322 notify_msg.type = CAMERA3_MSG_ERROR;
14323 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014324 notify_msg.message.error.error_stream = info.stream;
14325 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014326 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014327
14328 camera3_stream_buffer_t buffer = {};
14329 buffer.acquire_fence = -1;
14330 buffer.release_fence = -1;
14331 buffer.buffer = info.buffer;
14332 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14333 buffer.stream = info.stream;
14334 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014335 }
14336
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014337 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14338 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14339 pendingBuffer->frame_number > pendingRequest->frame_number) {
14340 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014341 camera3_notify_msg_t notify_msg;
14342 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14343 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014344 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14345 notify_msg.message.error.error_stream = nullptr;
14346 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014347 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014348
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014349 if (pendingRequest->input_buffer != nullptr) {
14350 camera3_capture_result result = {};
14351 result.frame_number = pendingRequest->frame_number;
14352 result.result = nullptr;
14353 result.input_buffer = pendingRequest->input_buffer;
14354 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014355 }
14356
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014357 mShutterDispatcher.clear(pendingRequest->frame_number);
14358 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14359 } else {
14360 // If both buffers and result metadata weren't sent yet, notify about a request error
14361 // and return buffers with error.
14362 for (auto &info : pendingBuffer->mPendingBufferList) {
14363 camera3_notify_msg_t notify_msg;
14364 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14365 notify_msg.type = CAMERA3_MSG_ERROR;
14366 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14367 notify_msg.message.error.error_stream = info.stream;
14368 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14369 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014370
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014371 camera3_stream_buffer_t buffer = {};
14372 buffer.acquire_fence = -1;
14373 buffer.release_fence = -1;
14374 buffer.buffer = info.buffer;
14375 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14376 buffer.stream = info.stream;
14377 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14378 }
14379
14380 if (pendingRequest->input_buffer != nullptr) {
14381 camera3_capture_result result = {};
14382 result.frame_number = pendingRequest->frame_number;
14383 result.result = nullptr;
14384 result.input_buffer = pendingRequest->input_buffer;
14385 orchestrateResult(&result);
14386 }
14387
14388 mShutterDispatcher.clear(pendingRequest->frame_number);
14389 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14390 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014391 }
14392 }
14393
14394 /* Reset pending frame Drop list and requests list */
14395 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014396 mShutterDispatcher.clear();
14397 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014398 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014399 mExpectedFrameDuration = 0;
14400 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014401 LOGH("Cleared all the pending buffers ");
14402
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014403 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014404}
14405
14406bool QCamera3HardwareInterface::isOnEncoder(
14407 const cam_dimension_t max_viewfinder_size,
14408 uint32_t width, uint32_t height)
14409{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014410 return ((width > (uint32_t)max_viewfinder_size.width) ||
14411 (height > (uint32_t)max_viewfinder_size.height) ||
14412 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14413 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014414}
14415
14416/*===========================================================================
14417 * FUNCTION : setBundleInfo
14418 *
14419 * DESCRIPTION: Set bundle info for all streams that are bundle.
14420 *
14421 * PARAMETERS : None
14422 *
14423 * RETURN : NO_ERROR on success
14424 * Error codes on failure
14425 *==========================================================================*/
14426int32_t QCamera3HardwareInterface::setBundleInfo()
14427{
14428 int32_t rc = NO_ERROR;
14429
14430 if (mChannelHandle) {
14431 cam_bundle_config_t bundleInfo;
14432 memset(&bundleInfo, 0, sizeof(bundleInfo));
14433 rc = mCameraHandle->ops->get_bundle_info(
14434 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14435 if (rc != NO_ERROR) {
14436 LOGE("get_bundle_info failed");
14437 return rc;
14438 }
14439 if (mAnalysisChannel) {
14440 mAnalysisChannel->setBundleInfo(bundleInfo);
14441 }
14442 if (mSupportChannel) {
14443 mSupportChannel->setBundleInfo(bundleInfo);
14444 }
14445 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14446 it != mStreamInfo.end(); it++) {
14447 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14448 channel->setBundleInfo(bundleInfo);
14449 }
14450 if (mRawDumpChannel) {
14451 mRawDumpChannel->setBundleInfo(bundleInfo);
14452 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014453 if (mHdrPlusRawSrcChannel) {
14454 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14455 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014456 }
14457
14458 return rc;
14459}
14460
14461/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014462 * FUNCTION : setInstantAEC
14463 *
14464 * DESCRIPTION: Set Instant AEC related params.
14465 *
14466 * PARAMETERS :
14467 * @meta: CameraMetadata reference
14468 *
14469 * RETURN : NO_ERROR on success
14470 * Error codes on failure
14471 *==========================================================================*/
14472int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14473{
14474 int32_t rc = NO_ERROR;
14475 uint8_t val = 0;
14476 char prop[PROPERTY_VALUE_MAX];
14477
14478 // First try to configure instant AEC from framework metadata
14479 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14480 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14481 }
14482
14483 // If framework did not set this value, try to read from set prop.
14484 if (val == 0) {
14485 memset(prop, 0, sizeof(prop));
14486 property_get("persist.camera.instant.aec", prop, "0");
14487 val = (uint8_t)atoi(prop);
14488 }
14489
14490 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14491 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14492 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14493 mInstantAEC = val;
14494 mInstantAECSettledFrameNumber = 0;
14495 mInstantAecFrameIdxCount = 0;
14496 LOGH("instantAEC value set %d",val);
14497 if (mInstantAEC) {
14498 memset(prop, 0, sizeof(prop));
14499 property_get("persist.camera.ae.instant.bound", prop, "10");
14500 int32_t aec_frame_skip_cnt = atoi(prop);
14501 if (aec_frame_skip_cnt >= 0) {
14502 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14503 } else {
14504 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14505 rc = BAD_VALUE;
14506 }
14507 }
14508 } else {
14509 LOGE("Bad instant aec value set %d", val);
14510 rc = BAD_VALUE;
14511 }
14512 return rc;
14513}
14514
14515/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014516 * FUNCTION : get_num_overall_buffers
14517 *
14518 * DESCRIPTION: Estimate number of pending buffers across all requests.
14519 *
14520 * PARAMETERS : None
14521 *
14522 * RETURN : Number of overall pending buffers
14523 *
14524 *==========================================================================*/
14525uint32_t PendingBuffersMap::get_num_overall_buffers()
14526{
14527 uint32_t sum_buffers = 0;
14528 for (auto &req : mPendingBuffersInRequest) {
14529 sum_buffers += req.mPendingBufferList.size();
14530 }
14531 return sum_buffers;
14532}
14533
14534/*===========================================================================
14535 * FUNCTION : removeBuf
14536 *
14537 * DESCRIPTION: Remove a matching buffer from tracker.
14538 *
14539 * PARAMETERS : @buffer: image buffer for the callback
14540 *
14541 * RETURN : None
14542 *
14543 *==========================================================================*/
14544void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14545{
14546 bool buffer_found = false;
14547 for (auto req = mPendingBuffersInRequest.begin();
14548 req != mPendingBuffersInRequest.end(); req++) {
14549 for (auto k = req->mPendingBufferList.begin();
14550 k != req->mPendingBufferList.end(); k++ ) {
14551 if (k->buffer == buffer) {
14552 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14553 req->frame_number, buffer);
14554 k = req->mPendingBufferList.erase(k);
14555 if (req->mPendingBufferList.empty()) {
14556 // Remove this request from Map
14557 req = mPendingBuffersInRequest.erase(req);
14558 }
14559 buffer_found = true;
14560 break;
14561 }
14562 }
14563 if (buffer_found) {
14564 break;
14565 }
14566 }
14567 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14568 get_num_overall_buffers());
14569}
14570
14571/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014572 * FUNCTION : getBufErrStatus
14573 *
14574 * DESCRIPTION: get buffer error status
14575 *
14576 * PARAMETERS : @buffer: buffer handle
14577 *
14578 * RETURN : Error status
14579 *
14580 *==========================================================================*/
14581int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14582{
14583 for (auto& req : mPendingBuffersInRequest) {
14584 for (auto& k : req.mPendingBufferList) {
14585 if (k.buffer == buffer)
14586 return k.bufStatus;
14587 }
14588 }
14589 return CAMERA3_BUFFER_STATUS_OK;
14590}
14591
14592/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014593 * FUNCTION : setPAAFSupport
14594 *
14595 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14596 * feature mask according to stream type and filter
14597 * arrangement
14598 *
14599 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14600 * @stream_type: stream type
14601 * @filter_arrangement: filter arrangement
14602 *
14603 * RETURN : None
14604 *==========================================================================*/
14605void QCamera3HardwareInterface::setPAAFSupport(
14606 cam_feature_mask_t& feature_mask,
14607 cam_stream_type_t stream_type,
14608 cam_color_filter_arrangement_t filter_arrangement)
14609{
Thierry Strudel3d639192016-09-09 11:52:26 -070014610 switch (filter_arrangement) {
14611 case CAM_FILTER_ARRANGEMENT_RGGB:
14612 case CAM_FILTER_ARRANGEMENT_GRBG:
14613 case CAM_FILTER_ARRANGEMENT_GBRG:
14614 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014615 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14616 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014617 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014618 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14619 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014620 }
14621 break;
14622 case CAM_FILTER_ARRANGEMENT_Y:
14623 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14624 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14625 }
14626 break;
14627 default:
14628 break;
14629 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014630 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14631 feature_mask, stream_type, filter_arrangement);
14632
14633
Thierry Strudel3d639192016-09-09 11:52:26 -070014634}
14635
14636/*===========================================================================
14637* FUNCTION : getSensorMountAngle
14638*
14639* DESCRIPTION: Retrieve sensor mount angle
14640*
14641* PARAMETERS : None
14642*
14643* RETURN : sensor mount angle in uint32_t
14644*==========================================================================*/
14645uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14646{
14647 return gCamCapability[mCameraId]->sensor_mount_angle;
14648}
14649
14650/*===========================================================================
14651* FUNCTION : getRelatedCalibrationData
14652*
14653* DESCRIPTION: Retrieve related system calibration data
14654*
14655* PARAMETERS : None
14656*
14657* RETURN : Pointer of related system calibration data
14658*==========================================================================*/
14659const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14660{
14661 return (const cam_related_system_calibration_data_t *)
14662 &(gCamCapability[mCameraId]->related_cam_calibration);
14663}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014664
14665/*===========================================================================
14666 * FUNCTION : is60HzZone
14667 *
14668 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14669 *
14670 * PARAMETERS : None
14671 *
14672 * RETURN : True if in 60Hz zone, False otherwise
14673 *==========================================================================*/
14674bool QCamera3HardwareInterface::is60HzZone()
14675{
14676 time_t t = time(NULL);
14677 struct tm lt;
14678
14679 struct tm* r = localtime_r(&t, &lt);
14680
14681 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14682 return true;
14683 else
14684 return false;
14685}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014686
14687/*===========================================================================
14688 * FUNCTION : adjustBlackLevelForCFA
14689 *
14690 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14691 * of bayer CFA (Color Filter Array).
14692 *
14693 * PARAMETERS : @input: black level pattern in the order of RGGB
14694 * @output: black level pattern in the order of CFA
14695 * @color_arrangement: CFA color arrangement
14696 *
14697 * RETURN : None
14698 *==========================================================================*/
14699template<typename T>
14700void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14701 T input[BLACK_LEVEL_PATTERN_CNT],
14702 T output[BLACK_LEVEL_PATTERN_CNT],
14703 cam_color_filter_arrangement_t color_arrangement)
14704{
14705 switch (color_arrangement) {
14706 case CAM_FILTER_ARRANGEMENT_GRBG:
14707 output[0] = input[1];
14708 output[1] = input[0];
14709 output[2] = input[3];
14710 output[3] = input[2];
14711 break;
14712 case CAM_FILTER_ARRANGEMENT_GBRG:
14713 output[0] = input[2];
14714 output[1] = input[3];
14715 output[2] = input[0];
14716 output[3] = input[1];
14717 break;
14718 case CAM_FILTER_ARRANGEMENT_BGGR:
14719 output[0] = input[3];
14720 output[1] = input[2];
14721 output[2] = input[1];
14722 output[3] = input[0];
14723 break;
14724 case CAM_FILTER_ARRANGEMENT_RGGB:
14725 output[0] = input[0];
14726 output[1] = input[1];
14727 output[2] = input[2];
14728 output[3] = input[3];
14729 break;
14730 default:
14731 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14732 break;
14733 }
14734}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014735
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014736void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14737 CameraMetadata &resultMetadata,
14738 std::shared_ptr<metadata_buffer_t> settings)
14739{
14740 if (settings == nullptr) {
14741 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14742 return;
14743 }
14744
14745 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14746 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14747 }
14748
14749 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14750 String8 str((const char *)gps_methods);
14751 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14752 }
14753
14754 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14755 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14756 }
14757
14758 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14759 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14760 }
14761
14762 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14763 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14764 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14765 }
14766
14767 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14768 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14769 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14770 }
14771
14772 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14773 int32_t fwk_thumb_size[2];
14774 fwk_thumb_size[0] = thumb_size->width;
14775 fwk_thumb_size[1] = thumb_size->height;
14776 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14777 }
14778
14779 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14780 uint8_t fwk_intent = intent[0];
14781 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14782 }
14783}
14784
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014785bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14786 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chenec328c82017-08-30 16:41:08 -070014787 if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
14788 metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
14789 ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
14790 return false;
14791 }
14792
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014793 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14794 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14795 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014796 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014797 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014798 return false;
14799 }
14800
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014801 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014802 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14803 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014804 return false;
14805 }
14806
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014807 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14808 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14809 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14810 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14811 return false;
14812 }
14813
14814 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14815 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14816 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14817 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14818 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
14819 return false;
14820 }
14821
14822 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
14823 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
14824 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
14825 return false;
14826 }
14827
14828 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
14829 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
14830 ANDROID_CONTROL_EFFECT_MODE_OFF) {
14831 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
14832 return false;
14833 }
14834
14835 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
14836 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
14837 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
14838 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
14839 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
14840 return false;
14841 }
14842
14843 // TODO (b/32585046): support non-ZSL.
14844 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
14845 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
14846 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
14847 return false;
14848 }
14849
14850 // TODO (b/32586081): support flash.
14851 if (!metadata.exists(ANDROID_FLASH_MODE) ||
14852 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
14853 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
14854 return false;
14855 }
14856
14857 // TODO (b/36492953): support digital zoom.
14858 if (!metadata.exists(ANDROID_SCALER_CROP_REGION) ||
14859 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[0] != 0 ||
14860 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[1] != 0 ||
14861 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[2] !=
14862 gCamCapability[mCameraId]->active_array_size.width ||
14863 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[3] !=
14864 gCamCapability[mCameraId]->active_array_size.height) {
14865 ALOGV("%s: ANDROID_SCALER_CROP_REGION is not the same as active array region.",
14866 __FUNCTION__);
14867 return false;
14868 }
14869
14870 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
14871 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
14872 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
14873 return false;
14874 }
14875
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014876
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014877 // TODO (b/36693254, b/36690506): support other outputs.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014878 if (!gEnableMultipleHdrplusOutputs && request.num_output_buffers != 1) {
14879 ALOGV("%s: Only support 1 output: %d", __FUNCTION__, request.num_output_buffers);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014880 return false;
14881 }
14882
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014883 switch (request.output_buffers[0].stream->format) {
14884 case HAL_PIXEL_FORMAT_BLOB:
14885 break;
14886 case HAL_PIXEL_FORMAT_YCbCr_420_888:
14887 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
14888 // TODO (b/36693254): Only support full size.
14889 if (!gEnableMultipleHdrplusOutputs) {
14890 if (static_cast<int>(request.output_buffers[0].stream->width) !=
14891 gCamCapability[mCameraId]->picture_sizes_tbl[0].width ||
14892 static_cast<int>(request.output_buffers[0].stream->height) !=
14893 gCamCapability[mCameraId]->picture_sizes_tbl[0].height) {
14894 ALOGV("%s: Only full size is supported.", __FUNCTION__);
14895 return false;
14896 }
14897 }
14898 break;
14899 default:
14900 ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
14901 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14902 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14903 request.output_buffers[0].stream->width,
14904 request.output_buffers[0].stream->height,
14905 request.output_buffers[0].stream->format);
14906 }
14907 return false;
14908 }
14909
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014910 return true;
14911}
14912
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014913void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
14914 if (hdrPlusRequest == nullptr) return;
14915
14916 for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
14917 // Find the stream for this buffer.
14918 for (auto streamInfo : mStreamInfo) {
14919 if (streamInfo->id == outputBufferIter.first) {
14920 if (streamInfo->channel == mPictureChannel) {
14921 // For picture channel, this buffer is internally allocated so return this
14922 // buffer to picture channel.
14923 mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
14924 } else {
14925 // Unregister this buffer for other channels.
14926 streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
14927 }
14928 break;
14929 }
14930 }
14931 }
14932
14933 hdrPlusRequest->outputBuffers.clear();
14934 hdrPlusRequest->frameworkOutputBuffers.clear();
14935}
14936
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014937bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14938 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14939 const CameraMetadata &metadata)
14940{
14941 if (hdrPlusRequest == nullptr) return false;
14942 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
14943
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014944 status_t res = OK;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014945 pbcamera::CaptureRequest pbRequest;
14946 pbRequest.id = request.frame_number;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014947 // Iterate through all requested output buffers and add them to an HDR+ request.
14948 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14949 // Find the index of the stream in mStreamInfo.
14950 uint32_t pbStreamId = 0;
14951 bool found = false;
14952 for (auto streamInfo : mStreamInfo) {
14953 if (streamInfo->stream == request.output_buffers[i].stream) {
14954 pbStreamId = streamInfo->id;
14955 found = true;
14956 break;
14957 }
14958 }
14959
14960 if (!found) {
14961 ALOGE("%s: requested stream was not configured.", __FUNCTION__);
14962 abortPendingHdrplusRequest(hdrPlusRequest);
14963 return false;
14964 }
14965 auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
14966 switch (request.output_buffers[i].stream->format) {
14967 case HAL_PIXEL_FORMAT_BLOB:
14968 {
14969 // For jpeg output, get a YUV buffer from pic channel.
14970 QCamera3PicChannel *picChannel =
14971 (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
14972 res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
14973 if (res != OK) {
14974 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14975 __FUNCTION__, strerror(-res), res);
14976 abortPendingHdrplusRequest(hdrPlusRequest);
14977 return false;
14978 }
14979 break;
14980 }
14981 case HAL_PIXEL_FORMAT_YCbCr_420_888:
14982 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
14983 {
14984 // For YUV output, register the buffer and get the buffer def from the channel.
14985 QCamera3ProcessingChannel *channel =
14986 (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
14987 res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
14988 outBuffer.get());
14989 if (res != OK) {
14990 ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
14991 strerror(-res), res);
14992 abortPendingHdrplusRequest(hdrPlusRequest);
14993 return false;
14994 }
14995 break;
14996 }
14997 default:
14998 abortPendingHdrplusRequest(hdrPlusRequest);
14999 return false;
15000 }
15001
15002 pbcamera::StreamBuffer buffer;
15003 buffer.streamId = pbStreamId;
15004 buffer.dmaBufFd = outBuffer->fd;
15005 buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
15006 buffer.dataSize = outBuffer->frame_len;
15007
15008 pbRequest.outputBuffers.push_back(buffer);
15009
15010 hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
15011 hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
15012 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015013
15014 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070015015 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015016 if (res != OK) {
15017 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
15018 strerror(-res), res);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015019 abortPendingHdrplusRequest(hdrPlusRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015020 return false;
15021 }
15022
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015023 return true;
15024}
15025
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015026status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
15027{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015028 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
15029 return OK;
15030 }
15031
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015032 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015033 if (res != OK) {
15034 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
15035 strerror(-res), res);
15036 return res;
15037 }
15038 gHdrPlusClientOpening = true;
15039
15040 return OK;
15041}
15042
Chien-Yu Chenee335912017-02-09 17:53:20 -080015043status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
15044{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015045 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015046
Chien-Yu Chena6c99062017-05-23 13:45:06 -070015047 if (mHdrPlusModeEnabled) {
15048 return OK;
15049 }
15050
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015051 // Check if gHdrPlusClient is opened or being opened.
15052 if (gHdrPlusClient == nullptr) {
15053 if (gHdrPlusClientOpening) {
15054 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
15055 return OK;
15056 }
15057
15058 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015059 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015060 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
15061 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015062 return res;
15063 }
15064
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015065 // When opening HDR+ client completes, HDR+ mode will be enabled.
15066 return OK;
15067
Chien-Yu Chenee335912017-02-09 17:53:20 -080015068 }
15069
15070 // Configure stream for HDR+.
15071 res = configureHdrPlusStreamsLocked();
15072 if (res != OK) {
15073 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015074 return res;
15075 }
15076
15077 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
15078 res = gHdrPlusClient->setZslHdrPlusMode(true);
15079 if (res != OK) {
15080 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080015081 return res;
15082 }
15083
15084 mHdrPlusModeEnabled = true;
15085 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
15086
15087 return OK;
15088}
15089
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015090void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
15091{
15092 if (gHdrPlusClientOpening) {
15093 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
15094 }
15095}
15096
Chien-Yu Chenee335912017-02-09 17:53:20 -080015097void QCamera3HardwareInterface::disableHdrPlusModeLocked()
15098{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015099 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015100 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015101 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
15102 if (res != OK) {
15103 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
15104 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015105
15106 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015107 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015108 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015109 }
15110
15111 mHdrPlusModeEnabled = false;
15112 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
15113}
15114
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015115bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
15116{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015117 // Check that at least one YUV or one JPEG output is configured.
15118 // TODO: Support RAW (b/36690506)
15119 for (auto streamInfo : mStreamInfo) {
15120 if (streamInfo != nullptr && streamInfo->stream != nullptr) {
15121 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
15122 (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
15123 streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15124 streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
15125 return true;
15126 }
15127 }
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015128 }
15129
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015130 return false;
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015131}
15132
Chien-Yu Chenee335912017-02-09 17:53:20 -080015133status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015134{
15135 pbcamera::InputConfiguration inputConfig;
15136 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
15137 status_t res = OK;
15138
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015139 // Sensor MIPI will send data to Easel.
15140 inputConfig.isSensorInput = true;
15141 inputConfig.sensorMode.cameraId = mCameraId;
15142 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
15143 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
15144 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
15145 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
15146 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
15147 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
15148 if (mSensorModeInfo.num_raw_bits != 10) {
15149 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
15150 mSensorModeInfo.num_raw_bits);
15151 return BAD_VALUE;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015152 }
15153
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015154 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015155
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015156 // Iterate through configured output streams in HAL and configure those streams in HDR+
15157 // service.
15158 for (auto streamInfo : mStreamInfo) {
15159 pbcamera::StreamConfiguration outputConfig;
15160 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
15161 switch (streamInfo->stream->format) {
15162 case HAL_PIXEL_FORMAT_BLOB:
15163 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15164 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15165 res = fillPbStreamConfig(&outputConfig, streamInfo->id,
15166 streamInfo->channel, /*stream index*/0);
15167 if (res != OK) {
15168 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
15169 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015170
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015171 return res;
15172 }
15173
15174 outputStreamConfigs.push_back(outputConfig);
15175 break;
15176 default:
15177 // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
15178 break;
15179 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015180 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015181 }
15182
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015183 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015184 if (res != OK) {
15185 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
15186 strerror(-res), res);
15187 return res;
15188 }
15189
15190 return OK;
15191}
15192
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015193void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15194{
15195 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15196 // Set HAL state to error.
15197 pthread_mutex_lock(&mMutex);
15198 mState = ERROR;
15199 pthread_mutex_unlock(&mMutex);
15200
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015201 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015202}
15203
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015204void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15205{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015206 if (client == nullptr) {
15207 ALOGE("%s: Opened client is null.", __FUNCTION__);
15208 return;
15209 }
15210
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015211 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015212 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15213
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015214 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015215 if (!gHdrPlusClientOpening) {
15216 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15217 return;
15218 }
15219
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015220 gHdrPlusClient = std::move(client);
15221 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015222 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015223
15224 // Set static metadata.
15225 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15226 if (res != OK) {
15227 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15228 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015229 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015230 gHdrPlusClient = nullptr;
15231 return;
15232 }
15233
15234 // Enable HDR+ mode.
15235 res = enableHdrPlusModeLocked();
15236 if (res != OK) {
15237 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15238 }
15239}
15240
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015241void QCamera3HardwareInterface::onOpenFailed(status_t err)
15242{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015243 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015244 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015245 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015246 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015247}
15248
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015249void QCamera3HardwareInterface::onFatalError()
15250{
15251 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
15252
15253 // Set HAL state to error.
15254 pthread_mutex_lock(&mMutex);
15255 mState = ERROR;
15256 pthread_mutex_unlock(&mMutex);
15257
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015258 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015259}
15260
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015261void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15262{
15263 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15264 __LINE__, requestId, apSensorTimestampNs);
15265
15266 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15267}
15268
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015269void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15270{
15271 pthread_mutex_lock(&mMutex);
15272
15273 // Find the pending request for this result metadata.
15274 auto requestIter = mPendingRequestsList.begin();
15275 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15276 requestIter++;
15277 }
15278
15279 if (requestIter == mPendingRequestsList.end()) {
15280 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15281 pthread_mutex_unlock(&mMutex);
15282 return;
15283 }
15284
15285 requestIter->partial_result_cnt++;
15286
15287 CameraMetadata metadata;
15288 uint8_t ready = true;
15289 metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15290
15291 // Send it to framework.
15292 camera3_capture_result_t result = {};
15293
15294 result.result = metadata.getAndLock();
15295 // Populate metadata result
15296 result.frame_number = requestId;
15297 result.num_output_buffers = 0;
15298 result.output_buffers = NULL;
15299 result.partial_result = requestIter->partial_result_cnt;
15300
15301 orchestrateResult(&result);
15302 metadata.unlock(result.result);
15303
15304 pthread_mutex_unlock(&mMutex);
15305}
15306
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070015307void QCamera3HardwareInterface::onPostview(uint32_t requestId,
15308 std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
15309 uint32_t stride, int32_t format)
15310{
15311 if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
15312 ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
15313 __LINE__, width, height, requestId);
15314 char buf[FILENAME_MAX] = {};
15315 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
15316 requestId, width, height);
15317
15318 pbcamera::StreamConfiguration config = {};
15319 config.image.width = width;
15320 config.image.height = height;
15321 config.image.format = format;
15322
15323 pbcamera::PlaneConfiguration plane = {};
15324 plane.stride = stride;
15325 plane.scanline = height;
15326
15327 config.image.planes.push_back(plane);
15328
15329 pbcamera::StreamBuffer buffer = {};
15330 buffer.streamId = 0;
15331 buffer.dmaBufFd = -1;
15332 buffer.data = postview->data();
15333 buffer.dataSize = postview->size();
15334
15335 hdrplus_client_utils::writePpm(buf, config, buffer);
15336 }
15337
15338 pthread_mutex_lock(&mMutex);
15339
15340 // Find the pending request for this result metadata.
15341 auto requestIter = mPendingRequestsList.begin();
15342 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15343 requestIter++;
15344 }
15345
15346 if (requestIter == mPendingRequestsList.end()) {
15347 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15348 pthread_mutex_unlock(&mMutex);
15349 return;
15350 }
15351
15352 requestIter->partial_result_cnt++;
15353
15354 CameraMetadata metadata;
15355 int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
15356 static_cast<int32_t>(stride)};
15357 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
15358 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
15359
15360 // Send it to framework.
15361 camera3_capture_result_t result = {};
15362
15363 result.result = metadata.getAndLock();
15364 // Populate metadata result
15365 result.frame_number = requestId;
15366 result.num_output_buffers = 0;
15367 result.output_buffers = NULL;
15368 result.partial_result = requestIter->partial_result_cnt;
15369
15370 orchestrateResult(&result);
15371 metadata.unlock(result.result);
15372
15373 pthread_mutex_unlock(&mMutex);
15374}
15375
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015376void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015377 const camera_metadata_t &resultMetadata)
15378{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015379 if (result == nullptr) {
15380 ALOGE("%s: result is nullptr.", __FUNCTION__);
15381 return;
15382 }
15383
15384
15385 // TODO (b/34854987): initiate this from HDR+ service.
15386 onNextCaptureReady(result->requestId);
15387
15388 // Find the pending HDR+ request.
15389 HdrPlusPendingRequest pendingRequest;
15390 {
15391 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15392 auto req = mHdrPlusPendingRequests.find(result->requestId);
15393 pendingRequest = req->second;
15394 }
15395
15396 // Update the result metadata with the settings of the HDR+ still capture request because
15397 // the result metadata belongs to a ZSL buffer.
15398 CameraMetadata metadata;
15399 metadata = &resultMetadata;
15400 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15401 camera_metadata_t* updatedResultMetadata = metadata.release();
15402
15403 uint32_t halSnapshotStreamId = 0;
15404 if (mPictureChannel != nullptr) {
15405 halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
15406 }
15407
15408 auto halMetadata = std::make_shared<metadata_buffer_t>();
15409 clear_metadata_buffer(halMetadata.get());
15410
15411 // Convert updated result metadata to HAL metadata.
15412 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15413 halSnapshotStreamId, /*minFrameDuration*/0);
15414 if (res != 0) {
15415 ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
15416 }
15417
15418 for (auto &outputBuffer : result->outputBuffers) {
15419 uint32_t streamId = outputBuffer.streamId;
15420
15421 // Find the framework output buffer in the pending request.
15422 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15423 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15424 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15425 streamId);
15426 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015427 }
15428
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015429 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15430
15431 // Find the channel for the output buffer.
15432 QCamera3ProcessingChannel *channel =
15433 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15434
15435 // Find the output buffer def.
15436 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15437 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15438 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15439 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015440 }
15441
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015442 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015443
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015444 // Check whether to dump the buffer.
15445 if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15446 frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
15447 // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
15448 char prop[PROPERTY_VALUE_MAX];
15449 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15450 bool dumpYuvOutput = atoi(prop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015451
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015452 if (dumpYuvOutput) {
15453 // Dump yuv buffer to a ppm file.
15454 pbcamera::StreamConfiguration outputConfig;
15455 status_t rc = fillPbStreamConfig(&outputConfig, streamId,
15456 channel, /*stream index*/0);
15457 if (rc == OK) {
15458 char buf[FILENAME_MAX] = {};
15459 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15460 result->requestId, streamId,
15461 outputConfig.image.width, outputConfig.image.height);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015462
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015463 hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
15464 } else {
15465 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
15466 "%s (%d).", __FUNCTION__, strerror(-rc), rc);
15467 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015468 }
15469 }
15470
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015471 if (channel == mPictureChannel) {
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015472 // Return the buffer to pic channel for encoding.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015473 mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
15474 frameworkOutputBuffer->buffer, result->requestId,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015475 halMetadata);
15476 } else {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015477 // Return the buffer to camera framework.
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015478 pthread_mutex_lock(&mMutex);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015479 handleBufferWithLock(frameworkOutputBuffer, result->requestId);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015480 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015481
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015482 channel->unregisterBuffer(outputBufferDef.get());
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015483 }
15484 }
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015485
15486 // Send HDR+ metadata to framework.
15487 {
15488 pthread_mutex_lock(&mMutex);
15489
15490 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15491 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
15492 pthread_mutex_unlock(&mMutex);
15493 }
15494
15495 // Remove the HDR+ pending request.
15496 {
15497 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15498 auto req = mHdrPlusPendingRequests.find(result->requestId);
15499 mHdrPlusPendingRequests.erase(req);
15500 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015501}
15502
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015503void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15504{
15505 if (failedResult == nullptr) {
15506 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15507 return;
15508 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015509
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015510 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015511
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015512 // Find the pending HDR+ request.
15513 HdrPlusPendingRequest pendingRequest;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015514 {
15515 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015516 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15517 if (req == mHdrPlusPendingRequests.end()) {
15518 ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
15519 return;
15520 }
15521 pendingRequest = req->second;
15522 }
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015523
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015524 for (auto &outputBuffer : failedResult->outputBuffers) {
15525 uint32_t streamId = outputBuffer.streamId;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015526
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015527 // Find the channel
15528 // Find the framework output buffer in the pending request.
15529 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15530 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15531 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15532 streamId);
15533 continue;
15534 }
15535
15536 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15537
15538 // Find the channel for the output buffer.
15539 QCamera3ProcessingChannel *channel =
15540 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15541
15542 // Find the output buffer def.
15543 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15544 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15545 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15546 continue;
15547 }
15548
15549 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
15550
15551 if (channel == mPictureChannel) {
15552 // Return the buffer to pic channel.
15553 mPictureChannel->returnYuvBuffer(outputBufferDef.get());
15554 } else {
15555 channel->unregisterBuffer(outputBufferDef.get());
15556 }
15557 }
15558
15559 // Remove the HDR+ pending request.
15560 {
15561 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15562 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15563 mHdrPlusPendingRequests.erase(req);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015564 }
15565
15566 pthread_mutex_lock(&mMutex);
15567
15568 // Find the pending buffers.
15569 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15570 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15571 if (pendingBuffers->frame_number == failedResult->requestId) {
15572 break;
15573 }
15574 pendingBuffers++;
15575 }
15576
15577 // Send out buffer errors for the pending buffers.
15578 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15579 std::vector<camera3_stream_buffer_t> streamBuffers;
15580 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15581 // Prepare a stream buffer.
15582 camera3_stream_buffer_t streamBuffer = {};
15583 streamBuffer.stream = buffer.stream;
15584 streamBuffer.buffer = buffer.buffer;
15585 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15586 streamBuffer.acquire_fence = -1;
15587 streamBuffer.release_fence = -1;
15588
15589 streamBuffers.push_back(streamBuffer);
15590
15591 // Send out error buffer event.
15592 camera3_notify_msg_t notify_msg = {};
15593 notify_msg.type = CAMERA3_MSG_ERROR;
15594 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15595 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15596 notify_msg.message.error.error_stream = buffer.stream;
15597
15598 orchestrateNotify(&notify_msg);
15599 }
15600
15601 camera3_capture_result_t result = {};
15602 result.frame_number = pendingBuffers->frame_number;
15603 result.num_output_buffers = streamBuffers.size();
15604 result.output_buffers = &streamBuffers[0];
15605
15606 // Send out result with buffer errors.
15607 orchestrateResult(&result);
15608
15609 // Remove pending buffers.
15610 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15611 }
15612
15613 // Remove pending request.
15614 auto halRequest = mPendingRequestsList.begin();
15615 while (halRequest != mPendingRequestsList.end()) {
15616 if (halRequest->frame_number == failedResult->requestId) {
15617 mPendingRequestsList.erase(halRequest);
15618 break;
15619 }
15620 halRequest++;
15621 }
15622
15623 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015624}
15625
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015626
15627ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15628 mParent(parent) {}
15629
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015630void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015631{
15632 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015633
15634 if (isReprocess) {
15635 mReprocessShutters.emplace(frameNumber, Shutter());
15636 } else {
15637 mShutters.emplace(frameNumber, Shutter());
15638 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015639}
15640
15641void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15642{
15643 std::lock_guard<std::mutex> lock(mLock);
15644
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015645 std::map<uint32_t, Shutter> *shutters = nullptr;
15646
15647 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015648 auto shutter = mShutters.find(frameNumber);
15649 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015650 shutter = mReprocessShutters.find(frameNumber);
15651 if (shutter == mReprocessShutters.end()) {
15652 // Shutter was already sent.
15653 return;
15654 }
15655 shutters = &mReprocessShutters;
15656 } else {
15657 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015658 }
15659
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015660 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015661 shutter->second.ready = true;
15662 shutter->second.timestamp = timestamp;
15663
15664 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015665 shutter = shutters->begin();
15666 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015667 if (!shutter->second.ready) {
15668 // If this shutter is not ready, the following shutters can't be sent.
15669 break;
15670 }
15671
15672 camera3_notify_msg_t msg = {};
15673 msg.type = CAMERA3_MSG_SHUTTER;
15674 msg.message.shutter.frame_number = shutter->first;
15675 msg.message.shutter.timestamp = shutter->second.timestamp;
15676 mParent->orchestrateNotify(&msg);
15677
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015678 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015679 }
15680}
15681
15682void ShutterDispatcher::clear(uint32_t frameNumber)
15683{
15684 std::lock_guard<std::mutex> lock(mLock);
15685 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015686 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015687}
15688
15689void ShutterDispatcher::clear()
15690{
15691 std::lock_guard<std::mutex> lock(mLock);
15692
15693 // Log errors for stale shutters.
15694 for (auto &shutter : mShutters) {
15695 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15696 __FUNCTION__, shutter.first, shutter.second.ready,
15697 shutter.second.timestamp);
15698 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015699
15700 // Log errors for stale reprocess shutters.
15701 for (auto &shutter : mReprocessShutters) {
15702 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15703 __FUNCTION__, shutter.first, shutter.second.ready,
15704 shutter.second.timestamp);
15705 }
15706
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015707 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015708 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015709}
15710
15711OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15712 mParent(parent) {}
15713
15714status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15715{
15716 std::lock_guard<std::mutex> lock(mLock);
15717 mStreamBuffers.clear();
15718 if (!streamList) {
15719 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15720 return -EINVAL;
15721 }
15722
15723 // Create a "frame-number -> buffer" map for each stream.
15724 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15725 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15726 }
15727
15728 return OK;
15729}
15730
15731status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15732{
15733 std::lock_guard<std::mutex> lock(mLock);
15734
15735 // Find the "frame-number -> buffer" map for the stream.
15736 auto buffers = mStreamBuffers.find(stream);
15737 if (buffers == mStreamBuffers.end()) {
15738 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15739 return -EINVAL;
15740 }
15741
15742 // Create an unready buffer for this frame number.
15743 buffers->second.emplace(frameNumber, Buffer());
15744 return OK;
15745}
15746
15747void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15748 const camera3_stream_buffer_t &buffer)
15749{
15750 std::lock_guard<std::mutex> lock(mLock);
15751
15752 // Find the frame number -> buffer map for the stream.
15753 auto buffers = mStreamBuffers.find(buffer.stream);
15754 if (buffers == mStreamBuffers.end()) {
15755 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15756 return;
15757 }
15758
15759 // Find the unready buffer this frame number and mark it ready.
15760 auto pendingBuffer = buffers->second.find(frameNumber);
15761 if (pendingBuffer == buffers->second.end()) {
15762 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15763 return;
15764 }
15765
15766 pendingBuffer->second.ready = true;
15767 pendingBuffer->second.buffer = buffer;
15768
15769 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15770 pendingBuffer = buffers->second.begin();
15771 while (pendingBuffer != buffers->second.end()) {
15772 if (!pendingBuffer->second.ready) {
15773 // If this buffer is not ready, the following buffers can't be sent.
15774 break;
15775 }
15776
15777 camera3_capture_result_t result = {};
15778 result.frame_number = pendingBuffer->first;
15779 result.num_output_buffers = 1;
15780 result.output_buffers = &pendingBuffer->second.buffer;
15781
15782 // Send out result with buffer errors.
15783 mParent->orchestrateResult(&result);
15784
15785 pendingBuffer = buffers->second.erase(pendingBuffer);
15786 }
15787}
15788
15789void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15790{
15791 std::lock_guard<std::mutex> lock(mLock);
15792
15793 // Log errors for stale buffers.
15794 for (auto &buffers : mStreamBuffers) {
15795 for (auto &buffer : buffers.second) {
15796 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15797 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15798 }
15799 buffers.second.clear();
15800 }
15801
15802 if (clearConfiguredStreams) {
15803 mStreamBuffers.clear();
15804 }
15805}
15806
Thierry Strudel3d639192016-09-09 11:52:26 -070015807}; //end namespace qcamera