Merge "Some fixes for saved surfaces"
diff --git a/core/java/android/content/Intent.java b/core/java/android/content/Intent.java
index ee469da..f486bd4 100644
--- a/core/java/android/content/Intent.java
+++ b/core/java/android/content/Intent.java
@@ -2575,16 +2575,6 @@
@SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
public static final String ACTION_MEDIA_BUTTON = "android.intent.action.MEDIA_BUTTON";
- /**
- * Broadcast Action: The "Picture-in-picture (PIP) Button" was pressed.
- * Includes a single extra field, {@link #EXTRA_KEY_EVENT}, containing the key event that
- * caused the broadcast.
- * @hide
- */
- @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
- public static final String ACTION_PICTURE_IN_PICTURE_BUTTON =
- "android.intent.action.PICTURE_IN_PICTURE_BUTTON";
-
/**
* Broadcast Action: The "Camera Button" was pressed. Includes a single
* extra field, {@link #EXTRA_KEY_EVENT}, containing the key event that
diff --git a/core/java/com/android/internal/statusbar/IStatusBar.aidl b/core/java/com/android/internal/statusbar/IStatusBar.aidl
index 632285c..3854923 100644
--- a/core/java/com/android/internal/statusbar/IStatusBar.aidl
+++ b/core/java/com/android/internal/statusbar/IStatusBar.aidl
@@ -78,5 +78,14 @@
* @param source the identifier for the gesture, see {@link StatusBarManager}
*/
void onCameraLaunchGestureDetected(int source);
-}
+ /**
+ * Request picture-in-picture.
+ *
+ * <p>
+ * This is called when an user presses picture-in-picture key or equivalent.
+ * TV device may start picture-in-picture from foreground activity if there's none.
+ * Picture-in-picture overlay menu will be shown instead otherwise.
+ */
+ void requestTvPictureInPicture();
+}
diff --git a/core/java/com/android/internal/statusbar/IStatusBarService.aidl b/core/java/com/android/internal/statusbar/IStatusBarService.aidl
index 32de45c..a6fd9f9 100644
--- a/core/java/com/android/internal/statusbar/IStatusBarService.aidl
+++ b/core/java/com/android/internal/statusbar/IStatusBarService.aidl
@@ -93,4 +93,14 @@
void appTransitionStarting(long statusBarAnimationsStartTime, long statusBarAnimationsDuration);
void startAssist(in Bundle args);
+
+ /**
+ * Request picture-in-picture.
+ *
+ * <p>
+ * This is called when an user presses picture-in-picture key or equivalent.
+ * TV device may start picture-in-picture from foreground activity if there's none.
+ * Picture-in-picture overlay menu will be shown instead otherwise.
+ */
+ void requestTvPictureInPicture();
}
diff --git a/core/res/AndroidManifest.xml b/core/res/AndroidManifest.xml
index 1c3db10..a04146f 100644
--- a/core/res/AndroidManifest.xml
+++ b/core/res/AndroidManifest.xml
@@ -226,8 +226,6 @@
<protected-broadcast android:name="android.intent.action.MEDIA_UNMOUNTABLE" />
<protected-broadcast android:name="android.intent.action.MEDIA_EJECT" />
- <protected-broadcast android:name="android.intent.action.PICTURE_IN_PICTURE_BUTTON" />
-
<protected-broadcast android:name="android.net.conn.CAPTIVE_PORTAL" />
<protected-broadcast android:name="android.net.conn.CONNECTIVITY_CHANGE" />
<!-- @deprecated. Only {@link android.net.ConnectivityManager.CONNECTIVITY_ACTION} is sent. -->
@@ -724,7 +722,7 @@
android:protectionLevel="dangerous" />
<!-- Allows an application to write (but not read) the user's
- contacts data.
+ call log data.
<p class="note"><strong>Note:</strong> If your app uses the
{@link #WRITE_CONTACTS} permission and <em>both</em> your <a
href="{@docRoot}guide/topics/manifest/uses-sdk-element.html#min">{@code
diff --git a/media/tests/MediaFrameworkTest/Android.mk b/media/tests/MediaFrameworkTest/Android.mk
index 42da48d..6b9fdb6 100644
--- a/media/tests/MediaFrameworkTest/Android.mk
+++ b/media/tests/MediaFrameworkTest/Android.mk
@@ -7,7 +7,11 @@
LOCAL_JAVA_LIBRARIES := android.test.runner
-LOCAL_STATIC_JAVA_LIBRARIES := easymocklib mockito-target core-tests android-support-test
+LOCAL_STATIC_JAVA_LIBRARIES := easymocklib \
+ mockito-target \
+ core-tests \
+ android-support-test \
+ android-ex-camera2
LOCAL_PACKAGE_NAME := mediaframeworktest
diff --git a/media/tests/MediaFrameworkTest/AndroidManifest.xml b/media/tests/MediaFrameworkTest/AndroidManifest.xml
index c62199f..3185ea2 100644
--- a/media/tests/MediaFrameworkTest/AndroidManifest.xml
+++ b/media/tests/MediaFrameworkTest/AndroidManifest.xml
@@ -34,50 +34,61 @@
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
</activity>
+ <activity android:label="Camera2CtsActivity"
+ android:name="Camera2SurfaceViewActivity"
+ android:screenOrientation="landscape"
+ android:configChanges="keyboardHidden|orientation|screenSize">
+ </activity>
</application>
<instrumentation android:name=".CameraStressTestRunner"
- android:targetPackage="com.android.mediaframeworktest"
- android:label="Camera stress tests InstrumentationRunner">
+ android:targetPackage="com.android.mediaframeworktest"
+ android:label="Camera stress tests InstrumentationRunner">
+ </instrumentation>
+
+ <instrumentation android:name=".Camera2InstrumentationTestRunner"
+ android:targetPackage="com.android.mediaframeworktest"
+ android:label="Camera2 InstrumentationTestRunner">
</instrumentation>
<instrumentation android:name=".MediaFrameworkTestRunner"
- android:targetPackage="com.android.mediaframeworktest"
- android:label="MediaFramework tests InstrumentationRunner">
- </instrumentation>
+ android:targetPackage="com.android.mediaframeworktest"
+ android:label="MediaFramework tests InstrumentationRunner">
+ </instrumentation>
- <instrumentation android:name=".MediaFrameworkPerfTestRunner"
- android:targetPackage="com.android.mediaframeworktest"
- android:label="MediaFramework Performance tests InstrumentationRunner">
- </instrumentation>
+ <instrumentation android:name=".MediaFrameworkPerfTestRunner"
+ android:targetPackage="com.android.mediaframeworktest"
+ android:label="MediaFramework Performance tests InstrumentationRunner">
+ </instrumentation>
- <instrumentation android:name=".MediaFrameworkUnitTestRunner"
- android:targetPackage="com.android.mediaframeworktest"
- android:label="MediaFramework unit tests InstrumentationRunner">
- </instrumentation>
+ <instrumentation android:name=".MediaFrameworkUnitTestRunner"
+ android:targetPackage="com.android.mediaframeworktest"
+ android:label="MediaFramework unit tests InstrumentationRunner">
+ </instrumentation>
- <instrumentation android:name=".MediaRecorderStressTestRunner"
- android:targetPackage="com.android.mediaframeworktest"
- android:label="MediaRecorder stress tests InstrumentationRunner">
- </instrumentation>
+ <instrumentation android:name=".MediaRecorderStressTestRunner"
+ android:targetPackage="com.android.mediaframeworktest"
+ android:label="MediaRecorder stress tests InstrumentationRunner">
+ </instrumentation>
- <instrumentation android:name=".MediaFrameworkPowerTestRunner"
- android:targetPackage="com.android.mediaframeworktest"
- android:label="Media Power tests InstrumentationRunner">
- </instrumentation>
+ <instrumentation android:name=".MediaFrameworkPowerTestRunner"
+ android:targetPackage="com.android.mediaframeworktest"
+ android:label="Media Power tests InstrumentationRunner">
+ </instrumentation>
- <instrumentation android:name=".MediaPlayerStressTestRunner"
- android:targetPackage="com.android.mediaframeworktest"
- android:label="Media Power tests InstrumentationRunner">
- </instrumentation>
+ <instrumentation android:name=".MediaPlayerStressTestRunner"
+ android:targetPackage="com.android.mediaframeworktest"
+ android:label="Media Power tests InstrumentationRunner">
+ </instrumentation>
<instrumentation android:name=".MediaFrameworkIntegrationTestRunner"
- android:targetPackage="com.android.mediaframeworktest"
- android:label="MediaFramework integration tests InstrumentationRunner">
- </instrumentation>
+ android:targetPackage="com.android.mediaframeworktest"
+ android:label="MediaFramework integration tests InstrumentationRunner">
+ </instrumentation>
- <instrumentation android:name="android.support.test.runner.AndroidJUnitRunner"
- android:targetPackage="com.android.mediaframeworktest"
- android:label="media framework tests"/>
+ <instrumentation android:name="android.support.test.runner.AndroidJUnitRunner"
+ android:targetPackage="com.android.mediaframeworktest"
+ android:label="media framework tests">
+ </instrumentation>
</manifest>
diff --git a/media/tests/MediaFrameworkTest/res/layout/surface_view_2.xml b/media/tests/MediaFrameworkTest/res/layout/surface_view_2.xml
new file mode 100644
index 0000000..fe53c71
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/res/layout/surface_view_2.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent">
+
+ <SurfaceView
+ android:id="@+id/surface_view"
+ android:layout_width="320dp"
+ android:layout_height="240dp"/>
+
+</LinearLayout>
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/Camera2InstrumentationTestRunner.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/Camera2InstrumentationTestRunner.java
new file mode 100644
index 0000000..d9228dc
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/Camera2InstrumentationTestRunner.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest;
+
+import com.android.mediaframeworktest.stress.Camera2CaptureRequestTest;
+import com.android.mediaframeworktest.stress.Camera2RecordingTest;
+import com.android.mediaframeworktest.stress.Camera2ReprocessCaptureTest;
+import com.android.mediaframeworktest.stress.Camera2StillCaptureTest;
+
+import junit.framework.TestSuite;
+
+import android.test.InstrumentationTestRunner;
+import android.test.InstrumentationTestSuite;
+
+/**
+ * This is Camera2 framework test runner to execute the specified test classes if no target class
+ * is defined in the meta-data or command line argument parameters.
+ */
+public class Camera2InstrumentationTestRunner extends InstrumentationTestRunner {
+
+ @Override
+ public TestSuite getAllTests() {
+ TestSuite suite = new InstrumentationTestSuite(this);
+ // Note the following test cases are compatible with Camera API2
+ suite.addTestSuite(Camera2StillCaptureTest.class);
+ suite.addTestSuite(Camera2RecordingTest.class);
+ suite.addTestSuite(Camera2ReprocessCaptureTest.class);
+ suite.addTestSuite(Camera2CaptureRequestTest.class);
+ return suite;
+ }
+
+ @Override
+ public ClassLoader getLoader() {
+ return Camera2InstrumentationTestRunner.class.getClassLoader();
+ }
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/Camera2SurfaceViewActivity.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/Camera2SurfaceViewActivity.java
new file mode 100644
index 0000000..c3dd842
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/Camera2SurfaceViewActivity.java
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest;
+
+import android.app.Activity;
+import android.os.Bundle;
+import android.os.ConditionVariable;
+import android.os.SystemClock;
+import android.util.Log;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.WindowManager;
+
+/**
+ * (non-Javadoc)
+ * @see android.hardware.camera2.cts.Camera2SurfaceViewCtsActivity
+ */
+public class Camera2SurfaceViewActivity extends Activity implements SurfaceHolder.Callback {
+ private static final String TAG = "SurfaceViewActivity";
+ private final ConditionVariable surfaceChangedDone = new ConditionVariable();
+
+ private SurfaceView mSurfaceView;
+ private int currentWidth = 0;
+ private int currentHeight = 0;
+ private final Object sizeLock = new Object();
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ setContentView(R.layout.surface_view_2);
+ mSurfaceView = (SurfaceView) findViewById(R.id.surface_view);
+ mSurfaceView.getHolder().addCallback(this);
+
+ //Acquire the full wake lock to keep the device up
+ getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
+ }
+
+ public SurfaceView getSurfaceView() {
+ return mSurfaceView;
+ }
+
+ public boolean waitForSurfaceSizeChanged(int timeOutMs, int expectWidth, int expectHeight) {
+ if (timeOutMs <= 0 || expectWidth <= 0 || expectHeight <= 0) {
+ throw new IllegalArgumentException(
+ String.format(
+ "timeout(%d), expectWidth(%d), and expectHeight(%d) " +
+ "should all be positive numbers",
+ timeOutMs, expectWidth, expectHeight));
+ }
+
+ synchronized(sizeLock) {
+ if (expectWidth == currentWidth && expectHeight == currentHeight) {
+ return true;
+ }
+ }
+
+ int waitTimeMs = timeOutMs;
+ boolean changeSucceeded = false;
+ while (!changeSucceeded && waitTimeMs > 0) {
+ long startTimeMs = SystemClock.elapsedRealtime();
+ changeSucceeded = surfaceChangedDone.block(waitTimeMs);
+ if (!changeSucceeded) {
+ Log.e(TAG, "Wait for surface change timed out after " + timeOutMs + " ms");
+ return changeSucceeded;
+ } else {
+ // Get a surface change callback, need to check if the size is expected.
+ surfaceChangedDone.close();
+ if (currentWidth == expectWidth && currentHeight == expectHeight) {
+ return changeSucceeded;
+ }
+ // Do a further iteration surface change check as surfaceChanged could be called
+ // again.
+ changeSucceeded = false;
+ }
+ waitTimeMs -= (SystemClock.elapsedRealtime() - startTimeMs);
+ }
+
+ // Couldn't get expected surface size change.
+ return false;
+ }
+
+ @Override
+ public void surfaceCreated(SurfaceHolder holder) {
+ }
+
+ @Override
+ public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+ Log.i(TAG, "Surface Changed to: " + width + "x" + height);
+ synchronized (sizeLock) {
+ currentWidth = width;
+ currentHeight = height;
+ }
+ surfaceChangedDone.open();
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder holder) {
+ }
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/Camera2SurfaceViewTestCase.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/Camera2SurfaceViewTestCase.java
new file mode 100644
index 0000000..74da2c9
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/Camera2SurfaceViewTestCase.java
@@ -0,0 +1,804 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest;
+
+import com.android.ex.camera2.blocking.BlockingSessionCallback;
+import com.android.ex.camera2.blocking.BlockingStateCallback;
+import com.android.ex.camera2.exceptions.TimeoutRuntimeException;
+import com.android.mediaframeworktest.helpers.CameraErrorCollector;
+import com.android.mediaframeworktest.helpers.CameraTestResultPrinter;
+import com.android.mediaframeworktest.helpers.CameraTestUtils;
+import com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback;
+import com.android.mediaframeworktest.helpers.StaticMetadata;
+import com.android.mediaframeworktest.helpers.StaticMetadata.CheckLevel;
+
+import android.content.Context;
+import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.media.ImageReader;
+import android.os.Bundle;
+import android.os.Environment;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.Looper;
+import android.test.ActivityInstrumentationTestCase2;
+import android.test.InstrumentationTestRunner;
+import android.util.Log;
+import android.util.Range;
+import android.util.Size;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import android.view.WindowManager;
+
+import java.text.NumberFormat;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import static com.android.ex.camera2.blocking.BlockingStateCallback.STATE_CLOSED;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.CAMERA_CLOSE_TIMEOUT_MS;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.MAX_READER_IMAGES;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.PREVIEW_SIZE_BOUND;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.configureCameraSession;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.getPreviewSizeBound;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.getSupportedPreviewSizes;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.getSupportedStillSizes;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.getSupportedVideoSizes;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.makeImageReader;
+
+/**
+ * Camera2 Preview test case base class by using SurfaceView as rendering target.
+ *
+ * <p>This class encapsulates the SurfaceView based preview common functionalities.
+ * The setup and teardown of CameraManager, test HandlerThread, Activity, Camera IDs
+ * and CameraStateCallback are handled in this class. Some basic preview related utility
+ * functions are provided to facilitate the derived preview-based test classes.
+ * </p>
+ */
+/**
+ * (non-Javadoc)
+ * @see android.hardware.camera2.cts.Camera2SurfaceViewTestCase
+ */
+public class Camera2SurfaceViewTestCase extends
+ ActivityInstrumentationTestCase2<Camera2SurfaceViewActivity> {
+
+ private static final String TAG = "SurfaceViewTestCase";
+ private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
+ private static final int WAIT_FOR_SURFACE_CHANGE_TIMEOUT_MS = 1000;
+
+ // Instrumentation arguments
+ protected static final String ARG_KEY_REPEAT = "repeat";
+ protected static final String ARG_KEY_WAIT_INTERVAL_MS = "waitIntervalMs";
+ protected static final String ARG_KEY_RESULT_TO_FILE = "resultToFile";
+
+ // TODO: Use internal storage for this to make sure the file is only visible to test.
+ protected static final String DEBUG_FILE_NAME_BASE =
+ Environment.getExternalStorageDirectory().getPath();
+ protected static final int WAIT_FOR_RESULT_TIMEOUT_MS = 3000;
+ protected static final float FRAME_DURATION_ERROR_MARGIN = 0.005f; // 0.5 percent error margin.
+ protected static final int NUM_RESULTS_WAIT_TIMEOUT = 100;
+ protected static final int NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY = 8;
+ protected static final int MIN_FRAME_DURATION_ERROR_MARGIN = 100; // ns
+
+ protected Context mContext;
+ protected CameraManager mCameraManager;
+ protected String[] mCameraIds;
+ protected HandlerThread mHandlerThread;
+ protected Handler mHandler;
+ protected BlockingStateCallback mCameraListener;
+ protected BlockingSessionCallback mSessionListener;
+ protected CameraErrorCollector mCollector;
+ // Per device fields:
+ protected StaticMetadata mStaticInfo;
+ protected CameraDevice mCamera;
+ protected CameraCaptureSession mSession;
+ protected ImageReader mReader;
+ protected Surface mReaderSurface;
+ protected Surface mPreviewSurface;
+ protected Size mPreviewSize;
+ protected List<Size> mOrderedPreviewSizes; // In descending order.
+ protected List<Size> mOrderedVideoSizes; // In descending order.
+ protected List<Size> mOrderedStillSizes; // In descending order.
+ protected HashMap<Size, Long> mMinPreviewFrameDurationMap;
+
+ protected WindowManager mWindowManager;
+
+ // Repeat tests a given times. Default to 1.
+ protected int mRepeat = 1;
+ // The interval between test iterations used for stress test.
+ protected long mTestWaitIntervalMs = 1 * 1000; // 1 sec
+ protected boolean mWriteToFile = true;
+ protected CameraTestResultPrinter mResultPrinter;
+
+
+ public Camera2SurfaceViewTestCase() {
+ super(Camera2SurfaceViewActivity.class);
+ }
+
+ @Override
+ protected void setUp() throws Exception {
+ /**
+ * Set up the camera preview required environments, including activity,
+ * CameraManager, HandlerThread, Camera IDs, and CameraStateCallback.
+ */
+ super.setUp();
+ mContext = getActivity();
+ /**
+ * Workaround for mockito and JB-MR2 incompatibility
+ *
+ * Avoid java.lang.IllegalArgumentException: dexcache == null
+ * https://code.google.com/p/dexmaker/issues/detail?id=2
+ */
+ System.setProperty("dexmaker.dexcache", mContext.getCacheDir().toString());
+ mCameraManager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
+ assertNotNull("Unable to get CameraManager", mCameraManager);
+ mCameraIds = mCameraManager.getCameraIdList();
+ assertNotNull("Unable to get camera ids", mCameraIds);
+ mHandlerThread = new HandlerThread(TAG);
+ mHandlerThread.start();
+ mHandler = new Handler(mHandlerThread.getLooper());
+ mCameraListener = new BlockingStateCallback();
+ mCollector = new CameraErrorCollector();
+
+ mWindowManager = (WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE);
+
+ mRepeat = getArgumentsAsNumber(ARG_KEY_REPEAT, 1).intValue();
+ mTestWaitIntervalMs = getArgumentsAsNumber(ARG_KEY_WAIT_INTERVAL_MS, 1000).longValue();
+ mWriteToFile = getArgumentsAsBoolean(ARG_KEY_RESULT_TO_FILE, true);
+ Log.i(TAG, "Argument: repeat count=" + mRepeat);
+ Log.i(TAG, "Argument: interval (ms)=" + mTestWaitIntervalMs);
+ Log.i(TAG, "Argument: result to file=" + (mWriteToFile ? "true" : "false"));
+ mResultPrinter = new CameraTestResultPrinter(getInstrumentation(), mWriteToFile);
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ // Teardown the camera preview required environments.
+ mHandlerThread.quitSafely();
+ mHandler = null;
+ mCameraListener = null;
+
+ try {
+ mCollector.verify();
+ } catch (Throwable e) {
+ // When new Exception(e) is used, exception info will be printed twice.
+ throw new Exception(e.getMessage());
+ } finally {
+ super.tearDown();
+ }
+ }
+
+ /**
+ * Start camera preview by using the given request, preview size and capture
+ * listener.
+ * <p>
+ * If preview is already started, calling this function will stop the
+ * current preview stream and start a new preview stream with given
+ * parameters. No need to call stopPreview between two startPreview calls.
+ * </p>
+ *
+ * @param request The request builder used to start the preview.
+ * @param previewSz The size of the camera device output preview stream.
+ * @param listener The callbacks the camera device will notify when preview
+ * capture is available.
+ */
+ protected void startPreview(CaptureRequest.Builder request, Size previewSz,
+ CaptureCallback listener) throws Exception {
+ // Update preview size.
+ updatePreviewSurface(previewSz);
+ if (VERBOSE) {
+ Log.v(TAG, "start preview with size " + mPreviewSize.toString());
+ }
+
+ configurePreviewOutput(request);
+
+ mSession.setRepeatingRequest(request.build(), listener, mHandler);
+ }
+
+ /**
+ * Configure the preview output stream.
+ *
+ * @param request The request to be configured with preview surface
+ */
+ protected void configurePreviewOutput(CaptureRequest.Builder request)
+ throws CameraAccessException {
+ List<Surface> outputSurfaces = new ArrayList<Surface>(/*capacity*/1);
+ outputSurfaces.add(mPreviewSurface);
+ mSessionListener = new BlockingSessionCallback();
+ mSession = configureCameraSession(mCamera, outputSurfaces, mSessionListener, mHandler);
+
+ request.addTarget(mPreviewSurface);
+ }
+
+ /**
+ * Create a {@link CaptureRequest#Builder} and add the default preview surface.
+ *
+ * @return The {@link CaptureRequest#Builder} to be created
+ * @throws CameraAccessException When create capture request from camera fails
+ */
+ protected CaptureRequest.Builder createRequestForPreview() throws CameraAccessException {
+ if (mPreviewSurface == null) {
+ throw new IllegalStateException(
+ "Preview surface is not set yet, call updatePreviewSurface or startPreview"
+ + "first to set the preview surface properly.");
+ }
+ CaptureRequest.Builder requestBuilder =
+ mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+ requestBuilder.addTarget(mPreviewSurface);
+ return requestBuilder;
+ }
+
+ /**
+ * Stop preview for current camera device.
+ */
+ protected void stopPreview() throws Exception {
+ if (VERBOSE) Log.v(TAG, "Stopping preview and waiting for idle");
+ // Stop repeat, wait for captures to complete, and disconnect from surfaces
+ mSession.close();
+ }
+
+ /**
+ * Setup still (JPEG) capture configuration and start preview.
+ * <p>
+ * The default max number of image is set to image reader.
+ * </p>
+ *
+ * @param previewRequest The capture request to be used for preview
+ * @param stillRequest The capture request to be used for still capture
+ * @param previewSz Preview size
+ * @param stillSz The still capture size
+ * @param resultListener Capture result listener
+ * @param imageListener The still capture image listener
+ */
+ protected void prepareStillCaptureAndStartPreview(CaptureRequest.Builder previewRequest,
+ CaptureRequest.Builder stillRequest, Size previewSz, Size stillSz,
+ CaptureCallback resultListener,
+ ImageReader.OnImageAvailableListener imageListener) throws Exception {
+ prepareCaptureAndStartPreview(previewRequest, stillRequest, previewSz, stillSz,
+ ImageFormat.JPEG, resultListener, MAX_READER_IMAGES, imageListener);
+ }
+
+ /**
+ * Setup still (JPEG) capture configuration and start preview.
+ *
+ * @param previewRequest The capture request to be used for preview
+ * @param stillRequest The capture request to be used for still capture
+ * @param previewSz Preview size
+ * @param stillSz The still capture size
+ * @param resultListener Capture result listener
+ * @param maxNumImages The max number of images set to the image reader
+ * @param imageListener The still capture image listener
+ */
+ protected void prepareStillCaptureAndStartPreview(CaptureRequest.Builder previewRequest,
+ CaptureRequest.Builder stillRequest, Size previewSz, Size stillSz,
+ CaptureCallback resultListener, int maxNumImages,
+ ImageReader.OnImageAvailableListener imageListener) throws Exception {
+ prepareCaptureAndStartPreview(previewRequest, stillRequest, previewSz, stillSz,
+ ImageFormat.JPEG, resultListener, maxNumImages, imageListener);
+ }
+
+ /**
+ * Setup raw capture configuration and start preview.
+ *
+ * <p>
+ * The default max number of image is set to image reader.
+ * </p>
+ *
+ * @param previewRequest The capture request to be used for preview
+ * @param rawRequest The capture request to be used for raw capture
+ * @param previewSz Preview size
+ * @param rawSz The raw capture size
+ * @param resultListener Capture result listener
+ * @param imageListener The raw capture image listener
+ */
+ protected void prepareRawCaptureAndStartPreview(CaptureRequest.Builder previewRequest,
+ CaptureRequest.Builder rawRequest, Size previewSz, Size rawSz,
+ CaptureCallback resultListener,
+ ImageReader.OnImageAvailableListener imageListener) throws Exception {
+ prepareCaptureAndStartPreview(previewRequest, rawRequest, previewSz, rawSz,
+ ImageFormat.RAW_SENSOR, resultListener, MAX_READER_IMAGES, imageListener);
+ }
+
+ /**
+ * Wait for expected result key value available in a certain number of results.
+ *
+ * <p>
+ * Check the result immediately if numFramesWait is 0.
+ * </p>
+ *
+ * @param listener The capture listener to get capture result
+ * @param resultKey The capture result key associated with the result value
+ * @param expectedValue The result value need to be waited for
+ * @param numResultsWait Number of frame to wait before times out
+ * @throws TimeoutRuntimeException If more than numResultsWait results are
+ * seen before the result matching myRequest arrives, or each individual wait
+ * for result times out after {@value #WAIT_FOR_RESULT_TIMEOUT_MS}ms.
+ */
+ protected static <T> void waitForResultValue(SimpleCaptureCallback listener,
+ CaptureResult.Key<T> resultKey,
+ T expectedValue, int numResultsWait) {
+ List<T> expectedValues = new ArrayList<T>();
+ expectedValues.add(expectedValue);
+ waitForAnyResultValue(listener, resultKey, expectedValues, numResultsWait);
+ }
+
+ /**
+ * Wait for any expected result key values available in a certain number of results.
+ *
+ * <p>
+ * Check the result immediately if numFramesWait is 0.
+ * </p>
+ *
+ * @param listener The capture listener to get capture result.
+ * @param resultKey The capture result key associated with the result value.
+ * @param expectedValues The list of result value need to be waited for,
+ * return immediately if the list is empty.
+ * @param numResultsWait Number of frame to wait before times out.
+ * @throws TimeoutRuntimeException If more than numResultsWait results are.
+ * seen before the result matching myRequest arrives, or each individual wait
+ * for result times out after {@value #WAIT_FOR_RESULT_TIMEOUT_MS}ms.
+ */
+ protected static <T> void waitForAnyResultValue(SimpleCaptureCallback listener,
+ CaptureResult.Key<T> resultKey,
+ List<T> expectedValues, int numResultsWait) {
+ if (numResultsWait < 0 || listener == null || expectedValues == null) {
+ throw new IllegalArgumentException(
+ "Input must be non-negative number and listener/expectedValues "
+ + "must be non-null");
+ }
+
+ int i = 0;
+ CaptureResult result;
+ do {
+ result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
+ T value = result.get(resultKey);
+ for ( T expectedValue : expectedValues) {
+ if (VERBOSE) {
+ Log.v(TAG, "Current result value for key " + resultKey.getName() + " is: "
+ + value.toString());
+ }
+ if (value.equals(expectedValue)) {
+ return;
+ }
+ }
+ } while (i++ < numResultsWait);
+
+ throw new TimeoutRuntimeException(
+ "Unable to get the expected result value " + expectedValues + " for key " +
+ resultKey.getName() + " after waiting for " + numResultsWait + " results");
+ }
+
+ /**
+ * Submit a capture once, then submit additional captures in order to ensure that
+ * the camera will be synchronized.
+ *
+ * <p>
+ * The additional capture count is determined by android.sync.maxLatency (or
+ * a fixed {@value #NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY}) captures if maxLatency is unknown).
+ * </p>
+ *
+ * <p>Returns the number of captures that were submitted (at least 1), which is useful
+ * with {@link #waitForNumResults}.</p>
+ *
+ * @param request capture request to forward to {@link CameraDevice#capture}
+ * @param listener request listener to forward to {@link CameraDevice#capture}
+ * @param handler handler to forward to {@link CameraDevice#capture}
+ *
+ * @return the number of captures that were submitted
+ *
+ * @throws CameraAccessException if capturing failed
+ */
+ protected int captureRequestsSynchronized(
+ CaptureRequest request, CaptureCallback listener, Handler handler)
+ throws CameraAccessException {
+ return captureRequestsSynchronized(request, /*count*/1, listener, handler);
+ }
+
+ /**
+ * Submit a capture {@code count} times, then submit additional captures in order to ensure that
+ * the camera will be synchronized.
+ *
+ * <p>
+ * The additional capture count is determined by android.sync.maxLatency (or
+ * a fixed {@value #NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY}) captures if maxLatency is unknown).
+ * </p>
+ *
+ * <p>Returns the number of captures that were submitted (at least 1), which is useful
+ * with {@link #waitForNumResults}.</p>
+ *
+ * @param request capture request to forward to {@link CameraDevice#capture}
+ * @param count the number of times to submit the request (minimally), must be at least 1
+ * @param listener request listener to forward to {@link CameraDevice#capture}
+ * @param handler handler to forward to {@link CameraDevice#capture}
+ *
+ * @return the number of captures that were submitted
+ *
+ * @throws IllegalArgumentException if {@code count} was not at least 1
+ * @throws CameraAccessException if capturing failed
+ */
+ protected int captureRequestsSynchronized(
+ CaptureRequest request, int count, CaptureCallback listener, Handler handler)
+ throws CameraAccessException {
+ if (count < 1) {
+ throw new IllegalArgumentException("count must be positive");
+ }
+
+ int maxLatency = mStaticInfo.getSyncMaxLatency();
+ if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) {
+ maxLatency = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY;
+ }
+
+ assertTrue("maxLatency is non-negative", maxLatency >= 0);
+
+ int numCaptures = maxLatency + count;
+
+ for (int i = 0; i < numCaptures; ++i) {
+ mSession.capture(request, listener, handler);
+ }
+
+ return numCaptures;
+ }
+
+ /**
+ * Wait for numResultWait frames
+ *
+ * @param resultListener The capture listener to get capture result back.
+ * @param numResultsWait Number of frame to wait
+ *
+ * @return the last result, or {@code null} if there was none
+ */
+ protected static CaptureResult waitForNumResults(SimpleCaptureCallback resultListener,
+ int numResultsWait) {
+ if (numResultsWait < 0 || resultListener == null) {
+ throw new IllegalArgumentException(
+ "Input must be positive number and listener must be non-null");
+ }
+
+ CaptureResult result = null;
+ for (int i = 0; i < numResultsWait; i++) {
+ result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
+ }
+
+ return result;
+ }
+
+ /**
+ * Wait for enough results for settings to be applied
+ *
+ * @param resultListener The capture listener to get capture result back.
+ * @param numResultWaitForUnknownLatency Number of frame to wait if camera device latency is
+ * unknown.
+ */
+ protected void waitForSettingsApplied(SimpleCaptureCallback resultListener,
+ int numResultWaitForUnknownLatency) {
+ int maxLatency = mStaticInfo.getSyncMaxLatency();
+ if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) {
+ maxLatency = numResultWaitForUnknownLatency;
+ }
+ // Wait for settings to take effect
+ waitForNumResults(resultListener, maxLatency);
+ }
+
+
+ /**
+ * Wait for AE to be stabilized before capture: CONVERGED or FLASH_REQUIRED.
+ *
+ * <p>Waits for {@code android.sync.maxLatency} number of results first, to make sure
+ * that the result is synchronized (or {@code numResultWaitForUnknownLatency} if the latency
+ * is unknown.</p>
+ *
+ * <p>This is a no-op for {@code LEGACY} devices since they don't report
+ * the {@code aeState} result.</p>
+ *
+ * @param resultListener The capture listener to get capture result back.
+ * @param numResultWaitForUnknownLatency Number of frame to wait if camera device latency is
+ * unknown.
+ */
+ protected void waitForAeStable(SimpleCaptureCallback resultListener,
+ int numResultWaitForUnknownLatency) {
+ waitForSettingsApplied(resultListener, numResultWaitForUnknownLatency);
+
+ if (!mStaticInfo.isHardwareLevelLimitedOrBetter()) {
+ // No-op for metadata
+ return;
+ }
+ List<Integer> expectedAeStates = new ArrayList<Integer>();
+ expectedAeStates.add(new Integer(CaptureResult.CONTROL_AE_STATE_CONVERGED));
+ expectedAeStates.add(new Integer(CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED));
+ waitForAnyResultValue(resultListener, CaptureResult.CONTROL_AE_STATE, expectedAeStates,
+ NUM_RESULTS_WAIT_TIMEOUT);
+ }
+
+ /**
+ * Wait for AE to be: LOCKED
+ *
+ * <p>Waits for {@code android.sync.maxLatency} number of results first, to make sure
+ * that the result is synchronized (or {@code numResultWaitForUnknownLatency} if the latency
+ * is unknown.</p>
+ *
+ * <p>This is a no-op for {@code LEGACY} devices since they don't report
+ * the {@code aeState} result.</p>
+ *
+ * @param resultListener The capture listener to get capture result back.
+ * @param numResultWaitForUnknownLatency Number of frame to wait if camera device latency is
+ * unknown.
+ */
+ protected void waitForAeLocked(SimpleCaptureCallback resultListener,
+ int numResultWaitForUnknownLatency) {
+
+ waitForSettingsApplied(resultListener, numResultWaitForUnknownLatency);
+
+ if (!mStaticInfo.isHardwareLevelLimitedOrBetter()) {
+ // No-op for legacy devices
+ return;
+ }
+
+ List<Integer> expectedAeStates = new ArrayList<Integer>();
+ expectedAeStates.add(new Integer(CaptureResult.CONTROL_AE_STATE_LOCKED));
+ waitForAnyResultValue(resultListener, CaptureResult.CONTROL_AE_STATE, expectedAeStates,
+ NUM_RESULTS_WAIT_TIMEOUT);
+ }
+
+ /**
+ * Create an {@link ImageReader} object and get the surface.
+ *
+ * @param size The size of this ImageReader to be created.
+ * @param format The format of this ImageReader to be created
+ * @param maxNumImages The max number of images that can be acquired simultaneously.
+ * @param listener The listener used by this ImageReader to notify callbacks.
+ */
+ protected void createImageReader(Size size, int format, int maxNumImages,
+ ImageReader.OnImageAvailableListener listener) throws Exception {
+ closeImageReader();
+
+ ImageReader r = makeImageReader(size, format, maxNumImages, listener,
+ mHandler);
+ mReader = r;
+ mReaderSurface = r.getSurface();
+ }
+
+ /**
+ * Close the pending images then close current active {@link ImageReader} object.
+ */
+ protected void closeImageReader() {
+ CameraTestUtils.closeImageReader(mReader);
+ mReader = null;
+ mReaderSurface = null;
+ }
+
+ /**
+ * Open a camera device and get the StaticMetadata for a given camera id.
+ *
+ * @param cameraId The id of the camera device to be opened.
+ */
+ protected void openDevice(String cameraId) throws Exception {
+ mCamera = CameraTestUtils.openCamera(
+ mCameraManager, cameraId, mCameraListener, mHandler);
+ mCollector.setCameraId(cameraId);
+ mStaticInfo = new StaticMetadata(mCameraManager.getCameraCharacteristics(cameraId),
+ CheckLevel.ASSERT, /*collector*/null);
+ if (mStaticInfo.isColorOutputSupported()) {
+ mOrderedPreviewSizes = getSupportedPreviewSizes(cameraId, mCameraManager,
+ getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
+ mOrderedVideoSizes = getSupportedVideoSizes(cameraId, mCameraManager, PREVIEW_SIZE_BOUND);
+ mOrderedStillSizes = getSupportedStillSizes(cameraId, mCameraManager, null);
+ // Use ImageFormat.YUV_420_888 for now. TODO: need figure out what's format for preview
+ // in public API side.
+ mMinPreviewFrameDurationMap =
+ mStaticInfo.getAvailableMinFrameDurationsForFormatChecked(ImageFormat.YUV_420_888);
+ }
+ }
+
+ /**
+ * Close the current actively used camera device.
+ */
+ protected void closeDevice() {
+ if (mCamera != null) {
+ mCamera.close();
+ mCameraListener.waitForState(STATE_CLOSED, CAMERA_CLOSE_TIMEOUT_MS);
+ mCamera = null;
+ mSession = null;
+ mSessionListener = null;
+ mStaticInfo = null;
+ mOrderedPreviewSizes = null;
+ mOrderedVideoSizes = null;
+ mOrderedStillSizes = null;
+ }
+ }
+
+ /**
+ * Update the preview surface size.
+ *
+ * @param size The preview size to be updated.
+ */
+ protected void updatePreviewSurface(Size size) {
+ if (size.equals(mPreviewSize) && mPreviewSurface != null) {
+ Log.w(TAG, "Skipping update preview surface size...");
+ return;
+ }
+
+ mPreviewSize = size;
+ Camera2SurfaceViewActivity ctsActivity = getActivity();
+ final SurfaceHolder holder = ctsActivity.getSurfaceView().getHolder();
+ Handler handler = new Handler(Looper.getMainLooper());
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ holder.setFixedSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
+ }
+ });
+
+ boolean res = ctsActivity.waitForSurfaceSizeChanged(
+ WAIT_FOR_SURFACE_CHANGE_TIMEOUT_MS, mPreviewSize.getWidth(),
+ mPreviewSize.getHeight());
+ assertTrue("wait for surface change to " + mPreviewSize.toString() + " timed out", res);
+ mPreviewSurface = holder.getSurface();
+ assertNotNull("Preview surface is null", mPreviewSurface);
+ assertTrue("Preview surface is invalid", mPreviewSurface.isValid());
+ }
+
+ /**
+ * Setup single capture configuration and start preview.
+ *
+ * @param previewRequest The capture request to be used for preview
+ * @param stillRequest The capture request to be used for still capture
+ * @param previewSz Preview size
+ * @param captureSz Still capture size
+ * @param format The single capture image format
+ * @param resultListener Capture result listener
+ * @param maxNumImages The max number of images set to the image reader
+ * @param imageListener The single capture capture image listener
+ */
+ protected void prepareCaptureAndStartPreview(CaptureRequest.Builder previewRequest,
+ CaptureRequest.Builder stillRequest, Size previewSz, Size captureSz, int format,
+ CaptureCallback resultListener, int maxNumImages,
+ ImageReader.OnImageAvailableListener imageListener) throws Exception {
+ if (VERBOSE) {
+ Log.v(TAG, String.format("Prepare single capture (%s) and preview (%s)",
+ captureSz.toString(), previewSz.toString()));
+ }
+
+ // Update preview size.
+ updatePreviewSurface(previewSz);
+
+ // Create ImageReader.
+ createImageReader(captureSz, format, maxNumImages, imageListener);
+
+ // Configure output streams with preview and jpeg streams.
+ List<Surface> outputSurfaces = new ArrayList<Surface>();
+ outputSurfaces.add(mPreviewSurface);
+ outputSurfaces.add(mReaderSurface);
+ mSessionListener = new BlockingSessionCallback();
+ mSession = configureCameraSession(mCamera, outputSurfaces, mSessionListener, mHandler);
+
+ // Configure the requests.
+ previewRequest.addTarget(mPreviewSurface);
+ stillRequest.addTarget(mPreviewSurface);
+ stillRequest.addTarget(mReaderSurface);
+
+ // Start preview.
+ mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
+ }
+
+ /**
+ * Get the max preview size that supports the given fpsRange.
+ *
+ * @param fpsRange The fps range the returned size must support.
+ * @return max size that support the given fps range.
+ */
+ protected Size getMaxPreviewSizeForFpsRange(Range<Integer> fpsRange) {
+ if (fpsRange == null || fpsRange.getLower() <= 0 || fpsRange.getUpper() <= 0) {
+ throw new IllegalArgumentException("Invalid fps range argument");
+ }
+ if (mOrderedPreviewSizes == null || mMinPreviewFrameDurationMap == null) {
+ throw new IllegalStateException("mOrderedPreviewSizes and mMinPreviewFrameDurationMap"
+ + " must be initialized");
+ }
+
+ long[] frameDurationRange =
+ new long[]{(long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())};
+ for (Size size : mOrderedPreviewSizes) {
+ Long minDuration = mMinPreviewFrameDurationMap.get(size);
+ if (minDuration == null ||
+ minDuration == 0) {
+ if (mStaticInfo.isCapabilitySupported(
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
+ throw new IllegalArgumentException(
+ "No min frame duration available for the size " + size);
+ }
+ continue;
+ }
+ if (minDuration <= (frameDurationRange[0] + MIN_FRAME_DURATION_ERROR_MARGIN)) {
+ return size;
+ }
+ }
+
+ return null;
+ }
+
+ protected boolean isReprocessSupported(String cameraId, int format)
+ throws CameraAccessException {
+ if (format != ImageFormat.YUV_420_888 && format != ImageFormat.PRIVATE) {
+ throw new IllegalArgumentException(
+ "format " + format + " is not supported for reprocessing");
+ }
+
+ StaticMetadata info =
+ new StaticMetadata(mCameraManager.getCameraCharacteristics(cameraId),
+ CheckLevel.ASSERT, /*collector*/ null);
+ int cap = CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING;
+ if (format == ImageFormat.PRIVATE) {
+ cap = CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING;
+ }
+ return info.isCapabilitySupported(cap);
+ }
+
+ //--------------------------------------------------------------------------------
+ //---------Below are common functions for Camera framework test run.--------------
+ //--------------------------------------------------------------------------------
+
+ protected Bundle getArguments() {
+ return ((InstrumentationTestRunner)getInstrumentation()).getArguments();
+ }
+
+ protected <E extends Number> Number getArgumentsAsNumber(String key, E defaultValue) {
+ String stringValue = getArguments().getString(key);
+ if (stringValue != null) {
+ try {
+ return NumberFormat.getInstance().parse(stringValue);
+ } catch (ParseException e) {
+ Log.w(TAG, "Unable to parse arg " + key + " with value " + stringValue
+ + " to a integer.", e);
+ }
+ }
+ return defaultValue;
+ }
+
+ protected boolean getArgumentsAsBoolean(String key, boolean defaultValue) {
+ String stringValue = getArguments().getString(key);
+ if (stringValue != null) {
+ try {
+ return Boolean.parseBoolean(stringValue);
+ } catch (Exception e) {
+ Log.w(TAG, "Unable to parse arg " + key + " with value " + stringValue
+ + " to a boolean.", e);
+ }
+ }
+ return defaultValue;
+ }
+
+ protected int getRepeatCount() {
+ return mRepeat;
+ }
+
+ protected long getTestWaitIntervalMs() {
+ return mTestWaitIntervalMs;
+ }
+
+ public CameraTestResultPrinter getResultPrinter() {
+ return mResultPrinter;
+ }
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/CameraStressTestRunner.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/CameraStressTestRunner.java
index 23b9705..ddc5ded 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/CameraStressTestRunner.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/CameraStressTestRunner.java
@@ -18,6 +18,7 @@
import android.test.InstrumentationTestRunner;
import android.test.InstrumentationTestSuite;
+
import com.android.mediaframeworktest.stress.CameraStressTest;
import com.android.mediaframeworktest.functional.camera.CameraFunctionalTest;
import com.android.mediaframeworktest.functional.camera.CameraPairwiseTest;
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/camera/CameraFunctionalTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/camera/CameraFunctionalTest.java
index d12ef2e..9c08d48 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/camera/CameraFunctionalTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/camera/CameraFunctionalTest.java
@@ -17,31 +17,20 @@
package com.android.mediaframeworktest.functional.camera;
import com.android.mediaframeworktest.MediaFrameworkTest;
-import com.android.mediaframeworktest.CameraTestHelper;
+import com.android.mediaframeworktest.helpers.CameraTestHelper;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
import java.io.Writer;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
-import java.util.HashMap;
import java.util.List;
-import java.util.Map;
-import android.hardware.Camera;
import android.hardware.Camera.Parameters;
-import android.os.Environment;
import android.os.Handler;
import android.os.Looper;
import android.test.ActivityInstrumentationTestCase2;
import android.test.suitebuilder.annotation.LargeTest;
import android.util.Log;
import android.view.SurfaceHolder;
-import com.android.mediaframeworktest.CameraStressTestRunner;
-
-import junit.framework.Assert;
/**
* Junit / Instrumentation test case for the following camera APIs:
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/camera/CameraPairwiseTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/camera/CameraPairwiseTest.java
index 8f67598..f9d4964 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/camera/CameraPairwiseTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/camera/CameraPairwiseTest.java
@@ -17,9 +17,6 @@
package com.android.mediaframeworktest.functional.camera;
import android.hardware.Camera;
-import android.hardware.Camera.PictureCallback;
-import android.hardware.Camera.ShutterCallback;
-import android.os.Environment;
import android.os.Handler;
import android.os.Looper;
import android.test.ActivityInstrumentationTestCase2;
@@ -32,9 +29,7 @@
import java.util.List;
import com.android.mediaframeworktest.MediaFrameworkTest;
-import com.android.mediaframeworktest.CameraStressTestRunner;
-import com.android.mediaframeworktest.CameraTestHelper;
-import junit.framework.Assert;
+import com.android.mediaframeworktest.helpers.CameraTestHelper;
/**
* Junit / Instrumentation test case for camera API pairwise testing
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/AssertHelpers.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/AssertHelpers.java
new file mode 100644
index 0000000..e7b4275
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/AssertHelpers.java
@@ -0,0 +1,239 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest.helpers;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertTrue;
+import static junit.framework.Assert.fail;
+
+/**
+ * Helper set of methods to add extra useful assert functionality missing in junit.
+ */
+/**
+ * (non-Javadoc)
+ * @see android.hardware.camera2.cts.helpers.AssertHelpers
+ */
+public class AssertHelpers {
+
+ private static final int MAX_FORMAT_STRING = 50;
+
+ /**
+ * Assert that at least one of the elements in data is non-zero.
+ *
+ * <p>An empty or a null array always fails.</p>
+ */
+ public static void assertArrayNotAllZeroes(String message, byte[] data) {
+ int size = data.length;
+
+ int i = 0;
+ for (i = 0; i < size; ++i) {
+ if (data[i] != 0) {
+ break;
+ }
+ }
+
+ assertTrue(message, i < size);
+ }
+
+ /**
+ * Assert that every element in left is less than or equals to the corresponding element in
+ * right.
+ *
+ * <p>Array sizes must match.</p>
+ *
+ * @param message Message to use in case the assertion fails
+ * @param left Left array
+ * @param right Right array
+ */
+ public static void assertArrayNotGreater(String message, float[] left, float[] right) {
+ assertEquals("Array lengths did not match", left.length, right.length);
+
+ String leftString = Arrays.toString(left);
+ String rightString = Arrays.toString(right);
+
+ for (int i = 0; i < left.length; ++i) {
+ String msg = String.format(
+ "%s: (%s should be less than or equals than %s; item index %d; left = %s; " +
+ "right = %s)",
+ message, left[i], right[i], i, leftString, rightString);
+
+ assertTrue(msg, left[i] <= right[i]);
+ }
+ }
+
+ /**
+ * Assert that every element in the value array is greater than the lower bound (exclusive).
+ *
+ * @param value an array of items
+ * @param lowerBound the exclusive lower bound
+ */
+ public static void assertArrayWithinLowerBound(String message, float[] value, float lowerBound)
+ {
+ for (int i = 0; i < value.length; ++i) {
+ assertTrue(
+ String.format("%s: (%s should be greater than than %s; item index %d in %s)",
+ message, value[i], lowerBound, i, Arrays.toString(value)),
+ value[i] > lowerBound);
+ }
+ }
+
+ /**
+ * Assert that every element in the value array is less than the upper bound (exclusive).
+ *
+ * @param value an array of items
+ * @param upperBound the exclusive upper bound
+ */
+ public static void assertArrayWithinUpperBound(String message, float[] value, float upperBound)
+ {
+ for (int i = 0; i < value.length; ++i) {
+ assertTrue(
+ String.format("%s: (%s should be less than than %s; item index %d in %s)",
+ message, value[i], upperBound, i, Arrays.toString(value)),
+ value[i] < upperBound);
+ }
+ }
+
+ /**
+ * Assert that {@code low <= value <= high}
+ */
+ public static void assertInRange(float value, float low, float high) {
+ assertTrue(
+ String.format("Value %s must be greater or equal to %s, but was lower", value, low),
+ value >= low);
+ assertTrue(
+ String.format("Value %s must be less than or equal to %s, but was higher",
+ value, high),
+ value <= high);
+
+ // TODO: generic by using comparators
+ }
+
+ /**
+ * Assert that the given array contains the given value.
+ *
+ * @param message message to print on failure.
+ * @param actual array to test.
+ * @param checkVals value to check for array membership.
+ */
+ public static <T> void assertArrayContains(String message, T[] actual, T checkVals) {
+ assertCollectionContainsAnyOf(message, buildList(actual), Arrays.asList(checkVals));
+ }
+
+
+ /**
+ * Assert that the given array contains the given value.
+ *
+ * @param message message to print on failure.
+ * @param actual array to test.
+ * @param checkVals value to check for array membership.
+ */
+ public static void assertArrayContains(String message, int[] actual, int checkVals) {
+ assertCollectionContainsAnyOf(message, buildList(actual), Arrays.asList(checkVals));
+ }
+
+ /**
+ * Assert that the given array contains at least one of the given values.
+ *
+ * @param message message to print on failure.
+ * @param actual array to test
+ * @param checkVals values to check for array membership.
+ * @return the value contained, or null.
+ */
+ public static <T> T assertArrayContainsAnyOf(String message, T[] actual, T[] checkVals) {
+ return assertCollectionContainsAnyOf(message, buildList(actual), buildList(checkVals));
+ }
+
+ /**
+ * Assert that the given array contains at least one of the given values.
+ *
+ * @param message message to print on failure.
+ * @param actual array to test
+ * @param checkVals values to check for array membership.
+ * @return the value contained.
+ */
+ public static int assertArrayContainsAnyOf(String message, int[] actual, int[] checkVals) {
+ return assertCollectionContainsAnyOf(message, buildList(actual), buildList(checkVals));
+ }
+
+ /**
+ * Assert that the given {@link Collection} contains at least one of the given values.
+ *
+ * @param message message to print on failure.
+ * @param actual {@link Collection} to test.
+ * @param checkVals a {@link Collection} of values to check for membership.
+ * @return the value contained, or null.
+ */
+ public static <T> T assertCollectionContainsAnyOf(String message, Collection<T> actual,
+ Collection<T> checkVals) {
+ boolean contains = false;
+ T selected = null;
+ for (T check : checkVals) {
+ contains = actual.contains(check);
+ if (contains) {
+ selected = check;
+ break;
+ }
+ }
+
+ if (!contains) {
+ fail(String.format("%s : No elements from %s in %s", message,
+ formatCollection(actual, MAX_FORMAT_STRING),
+ formatCollection(checkVals, MAX_FORMAT_STRING)));
+ }
+ return selected;
+ }
+
+ private static <T> List<T> buildList(T[] array) {
+ return new ArrayList<T>(Arrays.asList(array));
+ }
+
+ private static List<Integer> buildList(int[] array) {
+ List<Integer> list = new ArrayList<Integer>(array.length);
+ for (Integer val : array) {
+ list.add(val);
+ }
+ return list;
+ }
+
+ private static <T> String formatCollection(Collection<T> collection, int maxLen) {
+ StringBuilder builder = new StringBuilder();
+ builder.append("[");
+
+ boolean first = true;
+ for (T elem : collection) {
+ String val = ((first) ? ", " : "") + ((elem != null) ? elem.toString() : "null");
+ first = false;
+ if ((builder.length() + val.length()) > maxLen - "...]".length()) {
+ builder.append("...");
+ break;
+ } else {
+ builder.append(val);
+ }
+ }
+ builder.append("]");
+ return builder.toString();
+ }
+
+
+ // Suppress default constructor for noninstantiability
+ private AssertHelpers() { throw new AssertionError(); }
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/Camera2Focuser.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/Camera2Focuser.java
new file mode 100644
index 0000000..3f792e0
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/Camera2Focuser.java
@@ -0,0 +1,395 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest.helpers;
+
+import com.android.ex.camera2.pos.AutoFocusStateMachine;
+import com.android.ex.camera2.pos.AutoFocusStateMachine.AutoFocusStateListener;
+
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.os.Handler;
+import android.util.Log;
+import android.view.Surface;
+
+/**
+ * A focuser utility class to assist camera to do auto focus.
+ * <p>
+ * This class need create repeating request and single request to do auto focus.
+ * The repeating request is used to get the auto focus states; the single
+ * request is used to trigger the auto focus. This class assumes the camera device
+ * supports auto-focus. Don't use this class if the camera device doesn't have focuser
+ * unit.
+ * </p>
+ */
+/**
+ * (non-Javadoc)
+ * @see android.hardware.camera2.cts.helpers.Camera2Focuser
+ */
+public class Camera2Focuser implements AutoFocusStateListener {
+ private static final String TAG = "Focuser";
+ private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
+
+ private final AutoFocusStateMachine mAutoFocus = new AutoFocusStateMachine(this);
+ private final Handler mHandler;
+ private final AutoFocusListener mAutoFocusListener;
+ private final CameraDevice mCamera;
+ private final CameraCaptureSession mSession;
+ private final Surface mRequestSurface;
+ private final StaticMetadata mStaticInfo;
+
+ private int mAfRun = 0;
+ private MeteringRectangle[] mAfRegions;
+ private boolean mLocked = false;
+ private boolean mSuccess = false;
+ private CaptureRequest.Builder mRepeatingBuilder;
+
+ /**
+ * The callback interface to notify auto focus result.
+ */
+ public interface AutoFocusListener {
+ /**
+ * This callback is called when auto focus completes and locked.
+ *
+ * @param success true if focus was successful, false if otherwise
+ */
+ void onAutoFocusLocked(boolean success);
+ }
+
+ /**
+ * Construct a focuser object, with given capture requestSurface, listener
+ * and handler.
+ * <p>
+ * The focuser object will use camera and requestSurface to submit capture
+ * request and receive focus state changes. The {@link AutoFocusListener} is
+ * used to notify the auto focus callback.
+ * </p>
+ *
+ * @param camera The camera device associated with this focuser
+ * @param session The camera capture session associated with this focuser
+ * @param requestSurface The surface to issue the capture request with
+ * @param listener The auto focus listener to notify AF result
+ * @param staticInfo The CameraCharacteristics of the camera device
+ * @param handler The handler used to post auto focus callbacks
+ * @throws CameraAccessException
+ */
+ public Camera2Focuser(CameraDevice camera, CameraCaptureSession session, Surface requestSurface,
+ AutoFocusListener listener, CameraCharacteristics staticInfo, Handler handler)
+ throws CameraAccessException {
+ if (camera == null) {
+ throw new IllegalArgumentException("camera must not be null");
+ }
+ if (session == null) {
+ throw new IllegalArgumentException("session must not be null");
+ }
+ if (listener == null) {
+ throw new IllegalArgumentException("listener must not be null");
+ }
+ if (handler == null) {
+ throw new IllegalArgumentException("handler must not be null");
+ }
+ if (requestSurface == null) {
+ throw new IllegalArgumentException("requestSurface must not be null");
+ }
+ if (staticInfo == null) {
+ throw new IllegalArgumentException("staticInfo must not be null");
+ }
+
+ mCamera = camera;
+ mSession = session;
+ mRequestSurface = requestSurface;
+ mAutoFocusListener = listener;
+ mStaticInfo = new StaticMetadata(staticInfo,
+ StaticMetadata.CheckLevel.ASSERT, /*collector*/null);
+ mHandler = handler;
+
+ if (!mStaticInfo.hasFocuser()) {
+ throw new IllegalArgumentException("this camera doesn't have a focuser");
+ }
+
+ /**
+ * Begin by always being in passive auto focus.
+ */
+ cancelAutoFocus();
+ }
+
+ @Override
+ public synchronized void onAutoFocusSuccess(CaptureResult result, boolean locked) {
+ mSuccess = true;
+ mLocked = locked;
+
+ if (locked) {
+ dispatchAutoFocusStatusLocked(/*success*/true);
+ }
+ }
+
+ @Override
+ public synchronized void onAutoFocusFail(CaptureResult result, boolean locked) {
+ mSuccess = false;
+ mLocked = locked;
+
+ if (locked) {
+ dispatchAutoFocusStatusLocked(/*success*/false);
+ }
+ }
+
+ @Override
+ public synchronized void onAutoFocusScan(CaptureResult result) {
+ mSuccess = false;
+ mLocked = false;
+ }
+
+ @Override
+ public synchronized void onAutoFocusInactive(CaptureResult result) {
+ mSuccess = false;
+ mLocked = false;
+ }
+
+ /**
+ * Start a active auto focus scan based on the given regions.
+ *
+ * <p>This is usually used for touch for focus, it can make the auto-focus converge based
+ * on some particular region aggressively. But it is usually slow as a full active scan
+ * is initiated. After the auto focus is converged, the {@link cancelAutoFocus} must be called
+ * to resume the continuous auto-focus.</p>
+ *
+ * @param afRegions The AF regions used by focuser auto focus, full active
+ * array size is used if afRegions is null.
+ * @throws CameraAccessException
+ */
+ public synchronized void touchForAutoFocus(MeteringRectangle[] afRegions)
+ throws CameraAccessException {
+ startAutoFocusLocked(/*active*/true, afRegions);
+ }
+
+ /**
+ * Start auto focus scan.
+ * <p>
+ * Start an auto focus scan if it was not done yet. If AF passively focused,
+ * lock it. If AF is already locked, return. Otherwise, initiate a full
+ * active scan. This is suitable for still capture: focus should need to be
+ * accurate, but the AF latency also need to be as short as possible.
+ * </p>
+ *
+ * @param afRegions The AF regions used by focuser auto focus, full active
+ * array size is used if afRegions is null.
+ * @throws CameraAccessException
+ */
+ public synchronized void startAutoFocus(MeteringRectangle[] afRegions)
+ throws CameraAccessException {
+ startAutoFocusLocked(/*forceActive*/false, afRegions);
+ }
+
+ /**
+ * Cancel ongoing auto focus, unlock the auto-focus if it was locked, and
+ * resume to passive continuous auto focus.
+ *
+ * @throws CameraAccessException
+ */
+ public synchronized void cancelAutoFocus() throws CameraAccessException {
+ mSuccess = false;
+ mLocked = false;
+
+ // reset the AF regions:
+ setAfRegions(null);
+
+ // Create request builders, the af regions are automatically updated.
+ mRepeatingBuilder = createRequestBuilder();
+ CaptureRequest.Builder requestBuilder = createRequestBuilder();
+ mAutoFocus.setPassiveAutoFocus(/*picture*/true, mRepeatingBuilder);
+ mAutoFocus.unlockAutoFocus(mRepeatingBuilder, requestBuilder);
+ CaptureCallback listener = createCaptureListener();
+ mSession.setRepeatingRequest(mRepeatingBuilder.build(), listener, mHandler);
+ mSession.capture(requestBuilder.build(), listener, mHandler);
+ }
+
+ /**
+ * Get current AF mode.
+ * @return current AF mode
+ * @throws IllegalStateException if there auto focus is not running.
+ */
+ public synchronized int getCurrentAfMode() {
+ if (mRepeatingBuilder == null) {
+ throw new IllegalStateException("Auto focus is not running, unable to get AF mode");
+ }
+
+ return mRepeatingBuilder.get(CaptureRequest.CONTROL_AF_MODE);
+ }
+
+ private void startAutoFocusLocked(
+ boolean forceActive, MeteringRectangle[] afRegions) throws CameraAccessException {
+
+ setAfRegions(afRegions);
+ mAfRun++;
+
+ // Create request builders, the af regions are automatically updated.
+ mRepeatingBuilder = createRequestBuilder();
+ CaptureRequest.Builder requestBuilder = createRequestBuilder();
+ if (forceActive) {
+ startAutoFocusFullActiveLocked();
+ } else {
+ // Not forcing a full active scan. If AF passively focused, lock it. If AF is already
+ // locked, return. Otherwise, initiate a full active scan.
+ if (mSuccess && mLocked) {
+ dispatchAutoFocusStatusLocked(/*success*/true);
+ return;
+ } else if (mSuccess) {
+ mAutoFocus.lockAutoFocus(mRepeatingBuilder, requestBuilder);
+ CaptureCallback listener = createCaptureListener();
+ mSession.setRepeatingRequest(mRepeatingBuilder.build(), listener, mHandler);
+ mSession.capture(requestBuilder.build(), listener, mHandler);
+ } else {
+ startAutoFocusFullActiveLocked();
+ }
+ }
+ }
+
+ private void startAutoFocusFullActiveLocked() throws CameraAccessException {
+ // Create request builders, the af regions are automatically updated.
+ mRepeatingBuilder = createRequestBuilder();
+ CaptureRequest.Builder requestBuilder = createRequestBuilder();
+ mAutoFocus.setActiveAutoFocus(mRepeatingBuilder, requestBuilder);
+ if (mRepeatingBuilder.get(CaptureRequest.CONTROL_AF_TRIGGER)
+ != CaptureRequest.CONTROL_AF_TRIGGER_IDLE) {
+ throw new AssertionError("Wrong trigger set in repeating request");
+ }
+ if (requestBuilder.get(CaptureRequest.CONTROL_AF_TRIGGER)
+ != CaptureRequest.CONTROL_AF_TRIGGER_START) {
+ throw new AssertionError("Wrong trigger set in queued request");
+ }
+ mAutoFocus.resetState();
+
+ CaptureCallback listener = createCaptureListener();
+ mSession.setRepeatingRequest(mRepeatingBuilder.build(), listener, mHandler);
+ mSession.capture(requestBuilder.build(), listener, mHandler);
+ }
+
+ private void dispatchAutoFocusStatusLocked(final boolean success) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mAutoFocusListener.onAutoFocusLocked(success);
+ }
+ });
+ }
+
+ /**
+ * Create request builder, set the af regions.
+ * @throws CameraAccessException
+ */
+ private CaptureRequest.Builder createRequestBuilder() throws CameraAccessException {
+ CaptureRequest.Builder requestBuilder =
+ mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+
+ requestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, mAfRegions);
+ requestBuilder.addTarget(mRequestSurface);
+
+ return requestBuilder;
+ }
+
+ /**
+ * Set AF regions, fall back to default region if afRegions is null.
+ *
+ * @param afRegions The AF regions to set
+ * @throws IllegalArgumentException if the region is malformed (length is 0).
+ */
+ private void setAfRegions(MeteringRectangle[] afRegions) {
+ if (afRegions == null) {
+ setDefaultAfRegions();
+ return;
+ }
+ // Throw IAE if AF regions are malformed.
+ if (afRegions.length == 0) {
+ throw new IllegalArgumentException("afRegions is malformed, length: 0");
+ }
+
+ mAfRegions = afRegions;
+ }
+
+ /**
+ * Set default AF region to full active array size.
+ */
+ private void setDefaultAfRegions() {
+ // Initialize AF regions with all zeros, meaning that it is up to camera device to device
+ // the regions used by AF.
+ mAfRegions = new MeteringRectangle[] {
+ new MeteringRectangle(0, 0, 0, 0, MeteringRectangle.METERING_WEIGHT_DONT_CARE)};
+ }
+ private CaptureCallback createCaptureListener() {
+
+ int thisAfRun;
+ synchronized (this) {
+ thisAfRun = mAfRun;
+ }
+
+ final int finalAfRun = thisAfRun;
+
+ return new CaptureCallback() {
+ private long mLatestFrameCount = -1;
+
+ @Override
+ public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
+ CaptureResult result) {
+ // In case of a partial result, send to focuser if necessary
+ // 3A fields are present
+ if (result.get(CaptureResult.CONTROL_AF_STATE) != null &&
+ result.get(CaptureResult.CONTROL_AF_MODE) != null) {
+ if (VERBOSE) {
+ Log.v(TAG, "Focuser - got early AF state");
+ }
+
+ dispatchToFocuser(result);
+ }
+ }
+
+ @Override
+ public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
+ TotalCaptureResult result) {
+ dispatchToFocuser(result);
+ }
+
+ private void dispatchToFocuser(CaptureResult result) {
+ int afRun;
+ synchronized (Camera2Focuser.this) {
+ // In case of partial results, don't send AF update twice
+ long frameCount = result.getFrameNumber();
+ if (frameCount <= mLatestFrameCount) return;
+ mLatestFrameCount = frameCount;
+
+ afRun = mAfRun;
+ }
+
+ if (afRun != finalAfRun) {
+ if (VERBOSE) {
+ Log.w(TAG,
+ "onCaptureCompleted - Ignoring results from previous AF run "
+ + finalAfRun);
+ }
+ return;
+ }
+
+ mAutoFocus.onCaptureCompleted(result);
+ }
+ };
+ }
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraErrorCollector.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraErrorCollector.java
new file mode 100644
index 0000000..6facec4
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraErrorCollector.java
@@ -0,0 +1,1066 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest.helpers;
+
+import org.hamcrest.CoreMatchers;
+import org.hamcrest.Matcher;
+import org.junit.rules.ErrorCollector;
+
+import android.graphics.Rect;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureRequest.Builder;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.media.Image;
+import android.util.Log;
+import android.util.Size;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Objects;
+import java.util.Set;
+
+/**
+ * A camera test ErrorCollector class to gather the test failures during a test,
+ * instead of failing the test immediately for each failure.
+ */
+/**
+ * (non-Javadoc)
+ * @see android.hardware.camera2.cts.helpers.CameraErrorCollector
+ */
+public class CameraErrorCollector extends ErrorCollector {
+
+ private static final String TAG = "CameraErrorCollector";
+ private static final boolean LOG_ERRORS = Log.isLoggable(TAG, Log.ERROR);
+
+ private String mCameraMsg = "";
+
+ @Override
+ public void verify() throws Throwable {
+ // Do not remove if using JUnit 3 test runners. super.verify() is protected.
+ super.verify();
+ }
+
+ /**
+ * Adds an unconditional error to the table.
+ *
+ * <p>Execution continues, but test will fail at the end.</p>
+ *
+ * @param message A string containing the failure reason.
+ */
+ public void addMessage(String message) {
+ addErrorSuper(new Throwable(mCameraMsg + message));
+ }
+
+ /**
+ * Adds a Throwable to the table. <p>Execution continues, but the test will fail at the end.</p>
+ */
+ @Override
+ public void addError(Throwable error) {
+ addErrorSuper(new Throwable(mCameraMsg + error.getMessage(), error));
+ }
+
+ private void addErrorSuper(Throwable error) {
+ if (LOG_ERRORS) Log.e(TAG, error.getMessage());
+ super.addError(error);
+ }
+
+ /**
+ * Adds a failure to the table if {@code matcher} does not match {@code value}.
+ * Execution continues, but the test will fail at the end if the match fails.
+ * The camera id is included into the failure log.
+ */
+ @Override
+ public <T> void checkThat(final T value, final Matcher<T> matcher) {
+ super.checkThat(mCameraMsg, value, matcher);
+ }
+
+ /**
+ * Adds a failure with the given {@code reason} to the table if
+ * {@code matcher} does not match {@code value}. Execution continues, but
+ * the test will fail at the end if the match fails. The camera id is
+ * included into the failure log.
+ */
+ @Override
+ public <T> void checkThat(final String reason, final T value, final Matcher<T> matcher) {
+ super.checkThat(mCameraMsg + reason, value, matcher);
+ }
+
+ /**
+ * Set the camera id to this error collector object for logging purpose.
+ *
+ * @param id The camera id to be set.
+ */
+ public void setCameraId(String id) {
+ if (id != null) {
+ mCameraMsg = "Test failed for camera " + id + ": ";
+ } else {
+ mCameraMsg = "";
+ }
+ }
+
+ /**
+ * Adds a failure to the table if {@code condition} is not {@code true}.
+ * <p>
+ * Execution continues, but the test will fail at the end if the condition
+ * failed.
+ * </p>
+ *
+ * @param msg Message to be logged when check fails.
+ * @param condition Log the failure if it is not true.
+ */
+ public boolean expectTrue(String msg, boolean condition) {
+ if (!condition) {
+ addMessage(msg);
+ }
+
+ return condition;
+ }
+
+ /**
+ * Check if the two values are equal.
+ *
+ * @param msg Message to be logged when check fails.
+ * @param expected Expected value to be checked against.
+ * @param actual Actual value to be checked.
+ * @return {@code true} if the two values are equal, {@code false} otherwise.
+ *
+ * @throws IllegalArgumentException if {@code expected} was {@code null}
+ */
+ public <T> boolean expectEquals(String msg, T expected, T actual) {
+ if (expected == null) {
+ throw new IllegalArgumentException("expected value shouldn't be null");
+ }
+
+ if (!Objects.equals(expected, actual)) {
+ addMessage(String.format("%s (expected = %s, actual = %s) ", msg, expected,
+ actual));
+ return false;
+ }
+
+ return true;
+ }
+
+ /**
+ * Check if the two values are not equal.
+ *
+ * @param msg Message to be logged when check fails.
+ * @param expected Expected value to be checked against.
+ * @param actual Actual value to be checked.
+ * @return {@code true} if the two values are not equal, {@code false} otherwise.
+ */
+ public <T> boolean expectNotEquals(String msg, T expected, T actual) {
+ if (Objects.equals(expected, actual)) {
+ addMessage(String.format("%s (expected = %s, actual = %s) ", msg, expected,
+ actual));
+ return false;
+ }
+
+ return true;
+ }
+
+ /**
+ * Check if the two arrays of values are deeply equal.
+ *
+ * @param msg Message to be logged when check fails.
+ * @param expected Expected array of values to be checked against.
+ * @param actual Actual array of values to be checked.
+ * @return {@code true} if the two arrays of values are deeply equal, {@code false} otherwise.
+ *
+ * @throws IllegalArgumentException if {@code expected} was {@code null}
+ */
+ public <T> boolean expectEquals(String msg, T[] expected, T[] actual) {
+ if (expected == null) {
+ throw new IllegalArgumentException("expected value shouldn't be null");
+ }
+
+ if (!Arrays.deepEquals(expected, actual)) {
+ addMessage(String.format("%s (expected = %s, actual = %s) ", msg,
+ Arrays.deepToString(expected), Arrays.deepToString(actual)));
+ return false;
+ }
+
+ return true;
+ }
+
+ /**
+ * Check if the two arrays of values are not deeply equal.
+ *
+ * @param msg Message to be logged when check fails.
+ * @param expected Expected array of values to be checked against.
+ * @param actual Actual array of values to be checked.
+ * @return {@code true} if the two arrays of values are not deeply equal, {@code false}
+ * otherwise.
+ *
+ * @throws IllegalArgumentException if {@code expected} was {@code null}
+ */
+ public <T> boolean expectNotEquals(String msg, T[] expected, T[] actual) {
+ if (expected == null) {
+ throw new IllegalArgumentException("expected value shouldn't be null");
+ }
+
+ if (Arrays.deepEquals(expected, actual)) {
+ addMessage(String.format("%s (expected = %s, actual = %s) ", msg,
+ Arrays.deepToString(expected), Arrays.deepToString(actual)));
+ return false;
+ }
+
+ return true;
+ }
+
+ /**
+ * Check that the {@code actual} value is greater than the {@code expected} value.
+ *
+ * @param msg Message to be logged when check fails.
+ * @param expected The expected value to check that the actual value is larger than.
+ * @param actual Actual value to check.
+ * @return {@code true} if {@code actual} is greater than {@code expected}.
+ */
+ public <T extends Comparable<? super T>> boolean expectGreater(String msg, T expected,
+ T actual) {
+ return expectTrue(String.format("%s: (expected = %s was not greater than actual = %s) ",
+ msg, expected, actual), actual.compareTo(expected) > 0);
+ }
+
+ /**
+ * Check that the {@code actual} value is greater than or equal to the {@code expected} value.
+ *
+ * @param msg Message to be logged when check fails.
+ * @param expected The expected value to check that the actual value is larger than or equal to.
+ * @param actual Actual value to check.
+ * @return {@code true} if {@code actual} is greater than or equal to {@code expected}.
+ */
+ public <T extends Comparable<? super T>> boolean expectGreaterOrEqual(String msg, T expected,
+ T actual) {
+ return expectTrue(String.format("%s: (expected = %s was not greater than actual = %s) ",
+ msg, expected, actual), actual.compareTo(expected) >= 0);
+ }
+
+ /**
+ * Check that the {@code actual} value is less than the {@code expected} value.
+ *
+ * @param msg Message to be logged when check fails.
+ * @param expected The expected value to check that the actual value is less than.
+ * @param actual Actual value to check.
+ * @return {@code true} if {@code actual} is less than {@code expected}.
+ */
+ public <T extends Comparable<? super T>> boolean expectLess(String msg, T expected,
+ T actual) {
+ return expectTrue(String.format("%s: (expected = %s was not greater than actual = %s) ",
+ msg, expected, actual), actual.compareTo(expected) < 0);
+ }
+
+ /**
+ * Check that the {@code actual} value is less than or equal to the {@code expected} value.
+ *
+ * @param msg Message to be logged when check fails.
+ * @param expected The expected value to check that the actual value is less than or equal to.
+ * @param actual Actual value to check.
+ * @return {@code true} if {@code actual} is less than or equal to {@code expected}.
+ */
+ public <T extends Comparable<? super T>> boolean expectLessOrEqual(String msg, T expected,
+ T actual) {
+ return expectTrue(String.format("%s: (expected = %s was not greater than actual = %s) ",
+ msg, expected, actual), actual.compareTo(expected) <= 0);
+ }
+
+ /**
+ * Check if the two float values are equal with given error tolerance.
+ *
+ * @param msg Message to be logged when check fails.
+ * @param expected Expected value to be checked against.
+ * @param actual Actual value to be checked.
+ * @param tolerance The error margin for the equality check.
+ * @return {@code true} if the two values are equal, {@code false} otherwise.
+ */
+ public <T> boolean expectEquals(String msg, float expected, float actual, float tolerance) {
+ if (expected == actual) {
+ return true;
+ }
+
+ if (!(Math.abs(expected - actual) <= tolerance)) {
+ addMessage(String.format("%s (expected = %s, actual = %s, tolerance = %s) ", msg,
+ expected, actual, tolerance));
+ return false;
+ }
+
+ return true;
+ }
+
+ /**
+ * Check if the two double values are equal with given error tolerance.
+ *
+ * @param msg Message to be logged when check fails.
+ * @param expected Expected value to be checked against.
+ * @param actual Actual value to be checked.
+ * @param tolerance The error margin for the equality check
+ * @return {@code true} if the two values are equal, {@code false} otherwise.
+ */
+ public <T> boolean expectEquals(String msg, double expected, double actual, double tolerance) {
+ if (expected == actual) {
+ return true;
+ }
+
+ if (!(Math.abs(expected - actual) <= tolerance)) {
+ addMessage(String.format("%s (expected = %s, actual = %s, tolerance = %s) ", msg,
+ expected, actual, tolerance));
+ return false;
+ }
+
+ return true;
+ }
+
+ /**
+ * Check that all values in the list are greater than or equal to the min value.
+ *
+ * @param msg Message to be logged when check fails
+ * @param list The list of values to be checked
+ * @param min The smallest allowed value
+ */
+ public <T extends Comparable<? super T>> void expectValuesGreaterOrEqual(String msg,
+ List<T> list, T min) {
+ for (T value : list) {
+ expectTrue(msg + String.format(", array value " + value.toString() +
+ " is less than %s",
+ min.toString()), value.compareTo(min) >= 0);
+ }
+ }
+
+ /**
+ * Check that all values in the array are greater than or equal to the min value.
+ *
+ * @param msg Message to be logged when check fails
+ * @param array The array of values to be checked
+ * @param min The smallest allowed value
+ */
+ public <T extends Comparable<? super T>> void expectValuesGreaterOrEqual(String msg,
+ T[] array, T min) {
+ expectValuesGreaterOrEqual(msg, Arrays.asList(array), min);
+ }
+
+ /**
+ * Expect the list of values are in the range.
+ *
+ * @param msg Message to be logged
+ * @param list The list of values to be checked
+ * @param min The min value of the range
+ * @param max The max value of the range
+ */
+ public <T extends Comparable<? super T>> void expectValuesInRange(String msg, List<T> list,
+ T min, T max) {
+ for (T value : list) {
+ expectTrue(msg + String.format(", array value " + value.toString() +
+ " is out of range [%s, %s]",
+ min.toString(), max.toString()),
+ value.compareTo(max)<= 0 && value.compareTo(min) >= 0);
+ }
+ }
+
+ /**
+ * Expect the array of values are in the range.
+ *
+ * @param msg Message to be logged
+ * @param array The array of values to be checked
+ * @param min The min value of the range
+ * @param max The max value of the range
+ */
+ public <T extends Comparable<? super T>> void expectValuesInRange(String msg, T[] array,
+ T min, T max) {
+ expectValuesInRange(msg, Arrays.asList(array), min, max);
+ }
+
+ /**
+ * Expect the array of values are in the range.
+ *
+ * @param msg Message to be logged
+ * @param array The array of values to be checked
+ * @param min The min value of the range
+ * @param max The max value of the range
+ */
+ public void expectValuesInRange(String msg, int[] array, int min, int max) {
+ ArrayList<Integer> l = new ArrayList<>(array.length);
+ for (int i : array) {
+ l.add(i);
+ }
+ expectValuesInRange(msg, l, min, max);
+ }
+
+ /**
+ * Expect the value is in the range.
+ *
+ * @param msg Message to be logged
+ * @param value The value to be checked
+ * @param min The min value of the range
+ * @param max The max value of the range
+ *
+ * @return {@code true} if the value was in range, {@code false} otherwise
+ */
+ public <T extends Comparable<? super T>> boolean expectInRange(String msg, T value,
+ T min, T max) {
+ return expectTrue(msg + String.format(", value " + value.toString()
+ + " is out of range [%s, %s]",
+ min.toString(), max.toString()),
+ value.compareTo(max)<= 0 && value.compareTo(min) >= 0);
+ }
+
+
+ /**
+ * Check that two metering region arrays are similar enough by ensuring that each of their width,
+ * height, and all corners are within {@code errorPercent} of each other.
+ *
+ * <p>Note that the length of the arrays must be the same, and each weight must be the same
+ * as well. We assume the order is also equivalent.</p>
+ *
+ * <p>At most 1 error per each dissimilar metering region is collected.</p>
+ *
+ * @param msg Message to be logged
+ * @param expected The reference 'expected' values to be used to check against
+ * @param actual The actual values that were received
+ * @param errorPercent Within how many percent the components should be
+ *
+ * @return {@code true} if all expects passed, {@code false} otherwise
+ */
+ public boolean expectMeteringRegionsAreSimilar(String msg,
+ MeteringRectangle[] expected, MeteringRectangle[] actual,
+ float errorPercent) {
+ String expectedActualMsg = String.format("expected (%s), actual (%s)",
+ Arrays.deepToString(expected), Arrays.deepToString(actual));
+
+ String differentSizesMsg = String.format(
+ "%s: rect lists are different sizes; %s",
+ msg, expectedActualMsg);
+
+ String differentWeightsMsg = String.format(
+ "%s: rect weights are different; %s",
+ msg, expectedActualMsg);
+
+ if (!expectTrue(differentSizesMsg, actual != null)) {
+ return false;
+ }
+
+ if (!expectEquals(differentSizesMsg, expected.length, actual.length)) return false;
+
+ boolean succ = true;
+ for (int i = 0; i < expected.length; ++i) {
+ if (i < actual.length) {
+ // Avoid printing multiple errors for the same rectangle
+ if (!expectRectsAreSimilar(
+ msg, expected[i].getRect(), actual[i].getRect(), errorPercent)) {
+ succ = false;
+ continue;
+ }
+ if (!expectEquals(differentWeightsMsg,
+ expected[i].getMeteringWeight(), actual[i].getMeteringWeight())) {
+ succ = false;
+ continue;
+ }
+ }
+ }
+
+ return succ;
+ }
+
+ /**
+ * Check that two rectangles are similar enough by ensuring that their width, height,
+ * and all corners are within {@code errorPercent} of each other.
+ *
+ * <p>Only the first error is collected, to avoid spamming several error messages when
+ * the rectangle is hugely dissimilar.</p>
+ *
+ * @param msg Message to be logged
+ * @param expected The reference 'expected' value to be used to check against
+ * @param actual The actual value that was received
+ * @param errorPercent Within how many percent the components should be
+ *
+ * @return {@code true} if all expects passed, {@code false} otherwise
+ */
+ public boolean expectRectsAreSimilar(String msg, Rect expected, Rect actual,
+ float errorPercent) {
+ String formattedMsg = String.format("%s: rects are not similar enough; expected (%s), " +
+ "actual (%s), error percent (%s), reason: ",
+ msg, expected, actual, errorPercent);
+
+ if (!expectSimilarValues(
+ formattedMsg, "too wide", "too narrow", actual.width(), expected.width(),
+ errorPercent)) return false;
+
+ if (!expectSimilarValues(
+ formattedMsg, "too tall", "too short", actual.height(), expected.height(),
+ errorPercent)) return false;
+
+ if (!expectSimilarValues(
+ formattedMsg, "left pt too right", "left pt too left", actual.left, expected.left,
+ errorPercent)) return false;
+
+ if (!expectSimilarValues(
+ formattedMsg, "right pt too right", "right pt too left",
+ actual.right, expected.right, errorPercent)) return false;
+
+ if (!expectSimilarValues(
+ formattedMsg, "top pt too low", "top pt too high", actual.top, expected.top,
+ errorPercent)) return false;
+
+ if (!expectSimilarValues(
+ formattedMsg, "bottom pt too low", "bottom pt too high", actual.top, expected.top,
+ errorPercent)) return false;
+
+ return true;
+ }
+
+ /**
+ * Check that two sizes are similar enough by ensuring that their width and height
+ * are within {@code errorPercent} of each other.
+ *
+ * <p>Only the first error is collected, to avoid spamming several error messages when
+ * the rectangle is hugely dissimilar.</p>
+ *
+ * @param msg Message to be logged
+ * @param expected The reference 'expected' value to be used to check against
+ * @param actual The actual value that was received
+ * @param errorPercent Within how many percent the components should be
+ *
+ * @return {@code true} if all expects passed, {@code false} otherwise
+ */
+ public boolean expectSizesAreSimilar(String msg, Size expected, Size actual,
+ float errorPercent) {
+ String formattedMsg = String.format("%s: rects are not similar enough; expected (%s), " +
+ "actual (%s), error percent (%s), reason: ",
+ msg, expected, actual, errorPercent);
+
+ if (!expectSimilarValues(
+ formattedMsg, "too wide", "too narrow", actual.getWidth(), expected.getWidth(),
+ errorPercent)) return false;
+
+ if (!expectSimilarValues(
+ formattedMsg, "too tall", "too short", actual.getHeight(), expected.getHeight(),
+ errorPercent)) return false;
+
+ return true;
+ }
+
+ /**
+ * Check that the rectangle is centered within a certain tolerance of {@code errorPercent},
+ * with respect to the {@code bounds} bounding rectangle.
+ *
+ * @param msg Message to be logged
+ * @param expectedBounds The width/height of the bounding rectangle
+ * @param actual The actual value that was received
+ * @param errorPercent Within how many percent the centering should be
+ */
+ public void expectRectCentered(String msg, Size expectedBounds, Rect actual,
+ float errorPercent) {
+ String formattedMsg = String.format("%s: rect should be centered; expected bounds (%s), " +
+ "actual (%s), error percent (%s), reason: ",
+ msg, expectedBounds, actual, errorPercent);
+
+ int centerBoundX = expectedBounds.getWidth() / 2;
+ int centerBoundY = expectedBounds.getHeight() / 2;
+
+ expectSimilarValues(
+ formattedMsg, "too low", "too high", actual.centerY(), centerBoundY,
+ errorPercent);
+
+ expectSimilarValues(
+ formattedMsg, "too right", "too left", actual.centerX(), centerBoundX,
+ errorPercent);
+ }
+
+ private boolean expectSimilarValues(
+ String formattedMsg, String tooSmall, String tooLarge, int actualValue,
+ int expectedValue, float errorPercent) {
+ boolean succ = true;
+ succ = expectTrue(formattedMsg + tooLarge,
+ actualValue <= (expectedValue * (1.0f + errorPercent))) && succ;
+ succ = expectTrue(formattedMsg + tooSmall,
+ actualValue >= (expectedValue * (1.0f - errorPercent))) && succ;
+
+ return succ;
+ }
+
+ public void expectNotNull(String msg, Object obj) {
+ checkThat(msg, obj, CoreMatchers.notNullValue());
+ }
+
+ public void expectNull(String msg, Object obj) {
+ if (obj != null) {
+ addMessage(msg);
+ }
+ }
+
+ /**
+ * Check if the values in the array are monotonically increasing (decreasing) and not all
+ * equal.
+ *
+ * @param array The array of values to be checked
+ * @param ascendingOrder The monotonicity ordering to be checked with
+ */
+ public <T extends Comparable<? super T>> void checkArrayMonotonicityAndNotAllEqual(T[] array,
+ boolean ascendingOrder) {
+ String orderMsg = ascendingOrder ? ("increasing order") : ("decreasing order");
+ for (int i = 0; i < array.length - 1; i++) {
+ int compareResult = array[i + 1].compareTo(array[i]);
+ boolean condition = compareResult >= 0;
+ if (!ascendingOrder) {
+ condition = compareResult <= 0;
+ }
+
+ expectTrue(String.format("Adjacent values (%s and %s) %s monotonicity is broken",
+ array[i].toString(), array[i + 1].toString(), orderMsg), condition);
+ }
+
+ expectTrue("All values of this array are equal: " + array[0].toString(),
+ array[0].compareTo(array[array.length - 1]) != 0);
+ }
+
+ /**
+ * Check if the key value is not null and return the value.
+ *
+ * @param characteristics The {@link CameraCharacteristics} to get the key from.
+ * @param key The {@link CameraCharacteristics} key to be checked.
+ *
+ * @return The value of the key.
+ */
+ public <T> T expectKeyValueNotNull(CameraCharacteristics characteristics,
+ CameraCharacteristics.Key<T> key) {
+
+ T value = characteristics.get(key);
+ if (value == null) {
+ addMessage("Key " + key.getName() + " shouldn't be null");
+ }
+
+ return value;
+ }
+
+ /**
+ * Check if the key value is not null and return the value.
+ *
+ * @param request The {@link CaptureRequest} to get the key from.
+ * @param key The {@link CaptureRequest} key to be checked.
+ *
+ * @return The value of the key.
+ */
+ public <T> T expectKeyValueNotNull(CaptureRequest request,
+ CaptureRequest.Key<T> key) {
+
+ T value = request.get(key);
+ if (value == null) {
+ addMessage("Key " + key.getName() + " shouldn't be null");
+ }
+
+ return value;
+ }
+
+ /**
+ * Check if the key value is not null and return the value.
+ *
+ * @param request The {@link CaptureRequest#Builder} to get the key from.
+ * @param key The {@link CaptureRequest} key to be checked.
+ * @return The value of the key.
+ */
+ public <T> T expectKeyValueNotNull(Builder request, CaptureRequest.Key<T> key) {
+
+ T value = request.get(key);
+ if (value == null) {
+ addMessage("Key " + key.getName() + " shouldn't be null");
+ }
+
+ return value;
+ }
+
+ /**
+ * Check if the key value is not null and return the value.
+ *
+ * @param result The {@link CaptureResult} to get the key from.
+ * @param key The {@link CaptureResult} key to be checked.
+ * @return The value of the key.
+ */
+ public <T> T expectKeyValueNotNull(CaptureResult result, CaptureResult.Key<T> key) {
+ return expectKeyValueNotNull("", result, key);
+ }
+
+ /**
+ * Check if the key value is not null and return the value.
+ *
+ * @param msg The message to be logged.
+ * @param result The {@link CaptureResult} to get the key from.
+ * @param key The {@link CaptureResult} key to be checked.
+ * @return The value of the key.
+ */
+ public <T> T expectKeyValueNotNull(String msg, CaptureResult result, CaptureResult.Key<T> key) {
+
+ T value = result.get(key);
+ if (value == null) {
+ addMessage(msg + " Key " + key.getName() + " shouldn't be null");
+ }
+
+ return value;
+ }
+
+ /**
+ * Check if the key is non-null and the value is not equal to target.
+ *
+ * @param request The The {@link CaptureRequest#Builder} to get the key from.
+ * @param key The {@link CaptureRequest} key to be checked.
+ * @param expected The expected value of the CaptureRequest key.
+ */
+ public <T> void expectKeyValueNotEquals(
+ Builder request, CaptureRequest.Key<T> key, T expected) {
+ if (request == null || key == null || expected == null) {
+ throw new IllegalArgumentException("request, key and expected shouldn't be null");
+ }
+
+ T value;
+ if ((value = expectKeyValueNotNull(request, key)) == null) {
+ return;
+ }
+
+ String reason = "Key " + key.getName() + " shouldn't have value " + value.toString();
+ checkThat(reason, value, CoreMatchers.not(expected));
+ }
+
+ /**
+ * Check if the key is non-null and the value is not equal to target.
+ *
+ * @param result The {@link CaptureResult} to get the key from.
+ * @param key The {@link CaptureResult} key to be checked.
+ * @param expected The expected value of the CaptureResult key.
+ */
+ public <T> void expectKeyValueNotEquals(
+ CaptureResult result, CaptureResult.Key<T> key, T expected) {
+ if (result == null || key == null || expected == null) {
+ throw new IllegalArgumentException("result, key and expected shouldn't be null");
+ }
+
+ T value;
+ if ((value = expectKeyValueNotNull(result, key)) == null) {
+ return;
+ }
+
+ String reason = "Key " + key.getName() + " shouldn't have value " + value.toString();
+ checkThat(reason, value, CoreMatchers.not(expected));
+ }
+
+ /**
+ * Check if the value is non-null and the value is equal to target.
+ *
+ * @param result The {@link CaptureResult} to lookup the value in.
+ * @param key The {@link CaptureResult} key to be checked.
+ * @param expected The expected value of the {@link CaptureResult} key.
+ */
+ public <T> void expectKeyValueEquals(CaptureResult result, CaptureResult.Key<T> key,
+ T expected) {
+ if (result == null || key == null || expected == null) {
+ throw new IllegalArgumentException("request, key and expected shouldn't be null");
+ }
+
+ T value;
+ if ((value = expectKeyValueNotNull(result, key)) == null) {
+ return;
+ }
+
+ String reason = "Key " + key.getName() + " value " + value.toString()
+ + " doesn't match the expected value " + expected.toString();
+ checkThat(reason, value, CoreMatchers.equalTo(expected));
+ }
+
+ /**
+ * Check if the key is non-null and the value is equal to target.
+ *
+ * <p>Only check non-null if the target is null.</p>
+ *
+ * @param request The The {@link CaptureRequest#Builder} to get the key from.
+ * @param key The {@link CaptureRequest} key to be checked.
+ * @param expected The expected value of the CaptureRequest key.
+ */
+ public <T> void expectKeyValueEquals(Builder request, CaptureRequest.Key<T> key, T expected) {
+ if (request == null || key == null || expected == null) {
+ throw new IllegalArgumentException("request, key and expected shouldn't be null");
+ }
+
+ T value;
+ if ((value = expectKeyValueNotNull(request, key)) == null) {
+ return;
+ }
+
+ String reason = "Key " + key.getName() + " value " + value.toString()
+ + " doesn't match the expected value " + expected.toString();
+ checkThat(reason, value, CoreMatchers.equalTo(expected));
+ }
+
+ /**
+ * Check if the key is non-null, and the key value is greater than the expected value.
+ *
+ * @param result {@link CaptureResult} to check.
+ * @param key The {@link CaptureResult} key to be checked.
+ * @param expected The expected to be compared to the value for the given key.
+ */
+ public <T extends Comparable<? super T>> void expectKeyValueGreaterOrEqual(
+ CaptureResult result, CaptureResult.Key<T> key, T expected) {
+ T value;
+ if ((value = expectKeyValueNotNull(result, key)) == null) {
+ return;
+ }
+
+ expectGreaterOrEqual(key.getName(), expected, value);
+ }
+
+ /**
+ * Check if the key is non-null, and the key value is greater than the expected value.
+ *
+ * @param characteristics {@link CameraCharacteristics} to check.
+ * @param key The {@link CameraCharacteristics} key to be checked.
+ * @param expected The expected to be compared to the value for the given key.
+ */
+ public <T extends Comparable<? super T>> void expectKeyValueGreaterThan(
+ CameraCharacteristics characteristics, CameraCharacteristics.Key<T> key, T expected) {
+ T value;
+ if ((value = expectKeyValueNotNull(characteristics, key)) == null) {
+ return;
+ }
+
+ expectGreater(key.getName(), expected, value);
+ }
+
+ /**
+ * Check if the key is non-null, and the key value is in the expected range.
+ *
+ * @param characteristics {@link CameraCharacteristics} to check.
+ * @param key The {@link CameraCharacteristics} key to be checked.
+ * @param min The min value of the range
+ * @param max The max value of the range
+ */
+ public <T extends Comparable<? super T>> void expectKeyValueInRange(
+ CameraCharacteristics characteristics, CameraCharacteristics.Key<T> key, T min, T max) {
+ T value;
+ if ((value = expectKeyValueNotNull(characteristics, key)) == null) {
+ return;
+ }
+ expectInRange(key.getName(), value, min, max);
+ }
+
+ /**
+ * Check if the key is non-null, and the key value is one of the expected values.
+ *
+ * @param characteristics {@link CameraCharacteristics} to check.
+ * @param key The {@link CameraCharacteristics} key to be checked.
+ * @param expected The expected values for the given key.
+ */
+ public <T> void expectKeyValueIsIn(CameraCharacteristics characteristics,
+ CameraCharacteristics.Key<T> key, T... expected) {
+ T value = expectKeyValueNotNull(characteristics, key);
+ if (value == null) {
+ return;
+ }
+ String reason = "Key " + key.getName() + " value " + value
+ + " isn't one of the expected values " + Arrays.deepToString(expected);
+ expectContains(reason, expected, value);
+ }
+
+ /**
+ * Check if the key is non-null, and the key value is one of the expected values.
+ *
+ * @param request The The {@link CaptureRequest#Builder} to get the key from.
+ * @param key The {@link CaptureRequest} key to be checked.
+ * @param expected The expected values of the CaptureRequest key.
+ */
+ public <T> void expectKeyValueIsIn(Builder request, CaptureRequest.Key<T> key, T... expected) {
+ T value = expectKeyValueNotNull(request, key);
+ if (value == null) {
+ return;
+ }
+ String reason = "Key " + key.getName() + " value " + value
+ + " isn't one of the expected values " + Arrays.deepToString(expected);
+ expectContains(reason, expected, value);
+ }
+
+ /**
+ * Check if the key is non-null, and the key value contains the expected element.
+ *
+ * @param characteristics {@link CameraCharacteristics} to check.
+ * @param key The {@link CameraCharacteristics} key to be checked.
+ * @param expected The expected element to be contained in the value for the given key.
+ */
+ public <T> void expectKeyValueContains(CameraCharacteristics characteristics,
+ CameraCharacteristics.Key<T[]> key, T expected) {
+ T[] value;
+ if ((value = expectKeyValueNotNull(characteristics, key)) == null) {
+ return;
+ }
+ String reason = "Key " + key.getName() + " value " + value
+ + " doesn't contain the expected value " + expected;
+ expectContains(reason, value, expected);
+ }
+
+ /**
+ * Check if the key is non-null, and the key value contains the expected element.
+ *
+ * @param characteristics {@link CameraCharacteristics} to check.
+ * @param key The {@link CameraCharacteristics} key to be checked.
+ * @param expected The expected element to be contained in the value for the given key.
+ */
+ public void expectKeyValueContains(CameraCharacteristics characteristics,
+ CameraCharacteristics.Key<int[]> key, int expected) {
+ int[] value;
+ if ((value = expectKeyValueNotNull(characteristics, key)) == null) {
+ return;
+ }
+ String reason = "Key " + key.getName() + " value " + value
+ + " doesn't contain the expected value " + expected;
+ expectContains(reason, value, expected);
+ }
+
+ /**
+ * Check if the key is non-null, and the key value contains the expected element.
+ *
+ * @param characteristics {@link CameraCharacteristics} to check.
+ * @param key The {@link CameraCharacteristics} key to be checked.
+ * @param expected The expected element to be contained in the value for the given key.
+ */
+ public void expectKeyValueContains(CameraCharacteristics characteristics,
+ CameraCharacteristics.Key<boolean[]> key, boolean expected) {
+ boolean[] value;
+ if ((value = expectKeyValueNotNull(characteristics, key)) == null) {
+ return;
+ }
+ String reason = "Key " + key.getName() + " value " + value
+ + " doesn't contain the expected value " + expected;
+ expectContains(reason, value, expected);
+ }
+
+ /**
+ * Check if the {@code values} array contains the expected element.
+ *
+ * @param reason reason to print for failure.
+ * @param values array to check for membership in.
+ * @param expected the value to check.
+ */
+ public <T> void expectContains(String reason, T[] values, T expected) {
+ if (values == null) {
+ throw new NullPointerException();
+ }
+ checkThat(reason, expected, InMatcher.in(values));
+ }
+
+ public <T> void expectContains(T[] values, T expected) {
+ String reason = "Expected value " + expected
+ + " is not contained in the given values " + values;
+ expectContains(reason, values, expected);
+ }
+
+ /**
+ * Specialize {@link InMatcher} class for integer primitive array.
+ */
+ private static class IntInMatcher extends InMatcher<Integer> {
+ public IntInMatcher(int[] values) {
+ Preconditions.checkNotNull("values", values);
+ mValues = new ArrayList<>(values.length);
+ for (int i : values) {
+ mValues.add(i);
+ }
+ }
+ }
+
+ /**
+ * Check if the {@code values} array contains the expected element.
+ *
+ * <p>Specialized for primitive int arrays</p>
+ *
+ * @param reason reason to print for failure.
+ * @param values array to check for membership in.
+ * @param expected the value to check.
+ */
+ public void expectContains(String reason, int[] values, int expected) {
+ if (values == null) {
+ throw new NullPointerException();
+ }
+
+ checkThat(reason, expected, new IntInMatcher(values));
+ }
+
+ public void expectContains(int[] values, int expected) {
+ String reason = "Expected value " + expected
+ + " is not contained in the given values " + values;
+ expectContains(reason, values, expected);
+ }
+
+ /**
+ * Specialize {@link BooleanInMatcher} class for boolean primitive array.
+ */
+ private static class BooleanInMatcher extends InMatcher<Boolean> {
+ public BooleanInMatcher(boolean[] values) {
+ Preconditions.checkNotNull("values", values);
+ mValues = new ArrayList<>(values.length);
+ for (boolean i : values) {
+ mValues.add(i);
+ }
+ }
+ }
+
+ /**
+ * Check if the {@code values} array contains the expected element.
+ *
+ * <p>Specialized for primitive boolean arrays</p>
+ *
+ * @param reason reason to print for failure.
+ * @param values array to check for membership in.
+ * @param expected the value to check.
+ */
+ public void expectContains(String reason, boolean[] values, boolean expected) {
+ if (values == null) {
+ throw new NullPointerException();
+ }
+
+ checkThat(reason, expected, new BooleanInMatcher(values));
+ }
+
+ /**
+ * Check if the {@code values} array contains the expected element.
+ *
+ * <p>Specialized for primitive boolean arrays</p>
+ *
+ * @param values array to check for membership in.
+ * @param expected the value to check.
+ */
+ public void expectContains(boolean[] values, boolean expected) {
+ String reason = "Expected value " + expected
+ + " is not contained in the given values " + values;
+ expectContains(reason, values, expected);
+ }
+
+ /**
+ * Check if the element inside of the list are unique.
+ *
+ * @param msg The message to be logged
+ * @param list The list of values to be checked
+ */
+ public <T> void expectValuesUnique(String msg, List<T> list) {
+ Set<T> sizeSet = new HashSet<T>(list);
+ expectTrue(msg + " each element must be distinct", sizeSet.size() == list.size());
+ }
+
+ public void expectImageProperties(String msg, Image image, int format, Size size,
+ long timestampNs) {
+ expectEquals(msg + "Image format is wrong.", image.getFormat(), format);
+ expectEquals(msg + "Image width is wrong.", image.getWidth(), size.getWidth());
+ expectEquals(msg + "Image height is wrong.", image.getHeight(), size.getHeight());
+ expectEquals(msg + "Image timestamp is wrong.", image.getTimestamp(), timestampNs);
+ }
+
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/CameraTestHelper.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraTestHelper.java
similarity index 97%
rename from media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/CameraTestHelper.java
rename to media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraTestHelper.java
index 8bf741c..84153d60 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/CameraTestHelper.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraTestHelper.java
@@ -14,7 +14,9 @@
* limitations under the License.
*/
-package com.android.mediaframeworktest;
+package com.android.mediaframeworktest.helpers;
+
+import junit.framework.Assert;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
@@ -24,15 +26,11 @@
import android.util.Log;
import android.view.SurfaceHolder;
-import java.io.BufferedWriter;
import java.io.File;
-import java.io.FilenameFilter;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
-import junit.framework.Assert;
-
public class CameraTestHelper {
public Camera mCamera;
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraTestResultPrinter.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraTestResultPrinter.java
new file mode 100644
index 0000000..2492109
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraTestResultPrinter.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest.helpers;
+
+import android.app.Instrumentation;
+import android.os.Bundle;
+import android.os.Environment;
+import android.util.Log;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+
+public class CameraTestResultPrinter {
+
+ private static final String TAG = CameraTestResultPrinter.class.getSimpleName();
+ private static final String RESULT_DIR = Environment.getExternalStorageDirectory() +
+ "/camera-out/";
+ private static final String RESULT_FILE_FORMAT = "fwk-stress_camera_%s.txt";
+ private static final String RESULT_SWAP_FILE = "fwk-stress.swp";
+ private static final String KEY_NUM_ATTEMPTS = "numAttempts"; // Total number of iterations
+ private static final String KEY_ITERATION = "iteration";
+ private static final String KEY_CAMERA_ID = "cameraId";
+ private static final int INST_STATUS_IN_PROGRESS = 2;
+
+ private Instrumentation mInst = null;
+ private boolean mWriteToFile = true;
+
+
+ public CameraTestResultPrinter(Instrumentation instrumentation, boolean writeToFile) {
+ mInst = instrumentation;
+ mWriteToFile = writeToFile;
+
+ // Create a log directory if not exists.
+ File baseDir = new File(RESULT_DIR);
+ if (!baseDir.exists() && !baseDir.mkdirs()) {
+ throw new IllegalStateException("Couldn't create directory for logs: " + baseDir);
+ }
+ Log.v(TAG, String.format("Saving test results under: %s", baseDir.getAbsolutePath()));
+ // Remove all logs but not the base directory before a test run.
+ purgeFiles(baseDir);
+ }
+
+ public void printStatus(int numAttempts, int iteration, String cameraId) throws Exception {
+ Log.v(TAG, String.format("Print status: numAttempts=%d iteration=%d cameraId=%s",
+ numAttempts, iteration, cameraId));
+ // Write stats to instrumentation results.
+ sendInstrumentationStatus(numAttempts, iteration, cameraId);
+
+ if (mWriteToFile) {
+ writeToFile(numAttempts, iteration, cameraId);
+ }
+ }
+
+ /**
+ * Report the test results to instrumentation status or a file.
+ */
+ public void printStatus(int numAttempts, int iteration) throws Exception {
+ printStatus(numAttempts, iteration, "");
+ }
+
+ /**
+ * Write stats to instrumentation results.
+ */
+ private void sendInstrumentationStatus(int numAttempts, int iteration, String cameraId)
+ throws Exception {
+ Bundle output = new Bundle();
+ output.putString(KEY_NUM_ATTEMPTS, String.valueOf(numAttempts));
+ output.putString(KEY_ITERATION, String.valueOf(iteration));
+ if (!"".equals(cameraId)) {
+ output.putString(KEY_CAMERA_ID, cameraId);
+ }
+ mInst.sendStatus(INST_STATUS_IN_PROGRESS, output);
+ }
+
+ private void writeToFile(final int numAttempts, final int iteration, String cameraId) {
+ // Format output in a form of pairs of key and value
+ // eg, "numAttempts=500|iteration=400[|cameraId=0]"
+ String results = String.format("%s=%d|%s=%d", KEY_NUM_ATTEMPTS, numAttempts,
+ KEY_ITERATION, iteration);
+ if (!"".equals(cameraId)) {
+ results += String.format("|%s=%s", KEY_CAMERA_ID, cameraId);
+ }
+ Log.v(TAG, String.format("Writing result to a file: %s", results));
+
+ // Write results to a swap file temporarily, then rename it to a text file when writing
+ // has successfully completed, so that process crash during file writing would
+ // not corrupt the file.
+ File swapFile = new File(RESULT_DIR, RESULT_SWAP_FILE);
+ BufferedWriter out = null;
+ try {
+ out = new BufferedWriter(new FileWriter(swapFile));
+ out.write(results);
+ out.flush();
+ } catch (Exception e) {
+ Log.w(TAG, String.format("Failed to write results to a file: %s", e));
+ } finally {
+ if (out != null) {
+ try {
+ out.close();
+ // Delete an old file just before renaming, instead of overwriting.
+ String resultFileName = String.format(RESULT_FILE_FORMAT, cameraId);
+ File txtFile = new File(RESULT_DIR, resultFileName);
+ txtFile.delete();
+ swapFile.renameTo(txtFile);
+ } catch (Exception e) {
+ Log.w(TAG, String.format("Failed to write results to a file: %s", e));
+ }
+ }
+ }
+ }
+
+ // Remove sub directories and their contents, but not given directory.
+ private void purgeFiles(File path) {
+ File[] files = path.listFiles();
+ if (files != null) {
+ for (File child : files) {
+ if (path.isDirectory()) {
+ purgeFiles(child);
+ }
+ child.delete();
+ }
+ }
+ }
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraTestUtils.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraTestUtils.java
new file mode 100644
index 0000000..b2be464
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraTestUtils.java
@@ -0,0 +1,2158 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest.helpers;
+
+import com.android.ex.camera2.blocking.BlockingCameraManager;
+import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException;
+import com.android.ex.camera2.blocking.BlockingSessionCallback;
+import com.android.ex.camera2.blocking.BlockingStateCallback;
+import com.android.ex.camera2.exceptions.TimeoutRuntimeException;
+
+import junit.framework.Assert;
+
+import org.mockito.Mockito;
+
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.ImageFormat;
+import android.graphics.PointF;
+import android.graphics.Rect;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CaptureFailure;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.params.InputConfiguration;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.location.Location;
+import android.location.LocationManager;
+import android.media.ExifInterface;
+import android.media.Image;
+import android.media.Image.Plane;
+import android.media.ImageReader;
+import android.media.ImageWriter;
+import android.os.Build;
+import android.os.Environment;
+import android.os.Handler;
+import android.util.Log;
+import android.util.Pair;
+import android.util.Size;
+import android.view.Display;
+import android.view.Surface;
+import android.view.WindowManager;
+
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.lang.reflect.Array;
+import java.nio.ByteBuffer;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicLong;
+
+/**
+ * A package private utility class for wrapping up the camera2 framework test common utility
+ * functions
+ */
+/**
+ * (non-Javadoc)
+ * @see android.hardware.camera2.cts.CameraTestUtils
+ */
+public class CameraTestUtils extends Assert {
+ private static final String TAG = "CameraTestUtils";
+ private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
+ private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
+ public static final Size SIZE_BOUND_1080P = new Size(1920, 1088);
+ public static final Size SIZE_BOUND_2160P = new Size(3840, 2160);
+ // Only test the preview size that is no larger than 1080p.
+ public static final Size PREVIEW_SIZE_BOUND = SIZE_BOUND_1080P;
+ // Default timeouts for reaching various states
+ public static final int CAMERA_OPEN_TIMEOUT_MS = 3000;
+ public static final int CAMERA_CLOSE_TIMEOUT_MS = 3000;
+ public static final int CAMERA_IDLE_TIMEOUT_MS = 3000;
+ public static final int CAMERA_ACTIVE_TIMEOUT_MS = 1000;
+ public static final int CAMERA_BUSY_TIMEOUT_MS = 1000;
+ public static final int CAMERA_UNCONFIGURED_TIMEOUT_MS = 1000;
+ public static final int CAMERA_CONFIGURE_TIMEOUT_MS = 3000;
+ public static final int CAPTURE_RESULT_TIMEOUT_MS = 3000;
+ public static final int CAPTURE_IMAGE_TIMEOUT_MS = 3000;
+
+ public static final int SESSION_CONFIGURE_TIMEOUT_MS = 3000;
+ public static final int SESSION_CLOSE_TIMEOUT_MS = 3000;
+ public static final int SESSION_READY_TIMEOUT_MS = 3000;
+ public static final int SESSION_ACTIVE_TIMEOUT_MS = 1000;
+
+ public static final int MAX_READER_IMAGES = 5;
+
+ private static final int EXIF_DATETIME_LENGTH = 19;
+ private static final int EXIF_DATETIME_ERROR_MARGIN_SEC = 60;
+ private static final float EXIF_FOCAL_LENGTH_ERROR_MARGIN = 0.001f;
+ private static final float EXIF_EXPOSURE_TIME_ERROR_MARGIN_RATIO = 0.05f;
+ private static final float EXIF_EXPOSURE_TIME_MIN_ERROR_MARGIN_SEC = 0.002f;
+ private static final float EXIF_APERTURE_ERROR_MARGIN = 0.001f;
+
+ private static final Location sTestLocation0 = new Location(LocationManager.GPS_PROVIDER);
+ private static final Location sTestLocation1 = new Location(LocationManager.GPS_PROVIDER);
+ private static final Location sTestLocation2 = new Location(LocationManager.NETWORK_PROVIDER);
+
+ protected static final String DEBUG_FILE_NAME_BASE =
+ Environment.getExternalStorageDirectory().getPath();
+
+ static {
+ sTestLocation0.setTime(1199145600L);
+ sTestLocation0.setLatitude(37.736071);
+ sTestLocation0.setLongitude(-122.441983);
+ sTestLocation0.setAltitude(21.0);
+
+ sTestLocation1.setTime(1199145601L);
+ sTestLocation1.setLatitude(0.736071);
+ sTestLocation1.setLongitude(0.441983);
+ sTestLocation1.setAltitude(1.0);
+
+ sTestLocation2.setTime(1199145602L);
+ sTestLocation2.setLatitude(-89.736071);
+ sTestLocation2.setLongitude(-179.441983);
+ sTestLocation2.setAltitude(100000.0);
+ }
+
+ // Exif test data vectors.
+ public static final ExifTestData[] EXIF_TEST_DATA = {
+ new ExifTestData(
+ /*gpsLocation*/ sTestLocation0,
+ /* orientation */90,
+ /* jpgQuality */(byte) 80,
+ /* thumbQuality */(byte) 75),
+ new ExifTestData(
+ /*gpsLocation*/ sTestLocation1,
+ /* orientation */180,
+ /* jpgQuality */(byte) 90,
+ /* thumbQuality */(byte) 85),
+ new ExifTestData(
+ /*gpsLocation*/ sTestLocation2,
+ /* orientation */270,
+ /* jpgQuality */(byte) 100,
+ /* thumbQuality */(byte) 100)
+ };
+
+ /**
+ * Create an {@link ImageReader} object and get the surface.
+ *
+ * @param size The size of this ImageReader to be created.
+ * @param format The format of this ImageReader to be created
+ * @param maxNumImages The max number of images that can be acquired simultaneously.
+ * @param listener The listener used by this ImageReader to notify callbacks.
+ * @param handler The handler to use for any listener callbacks.
+ */
+ public static ImageReader makeImageReader(Size size, int format, int maxNumImages,
+ ImageReader.OnImageAvailableListener listener, Handler handler) {
+ ImageReader reader;
+ reader = ImageReader.newInstance(size.getWidth(), size.getHeight(), format,
+ maxNumImages);
+ reader.setOnImageAvailableListener(listener, handler);
+ if (VERBOSE) Log.v(TAG, "Created ImageReader size " + size);
+ return reader;
+ }
+
+ /**
+ * Create an ImageWriter and hook up the ImageListener.
+ *
+ * @param inputSurface The input surface of the ImageWriter.
+ * @param maxImages The max number of Images that can be dequeued simultaneously.
+ * @param listener The listener used by this ImageWriter to notify callbacks
+ * @param handler The handler to post listener callbacks.
+ * @return ImageWriter object created.
+ */
+ public static ImageWriter makeImageWriter(
+ Surface inputSurface, int maxImages,
+ ImageWriter.OnImageReleasedListener listener, Handler handler) {
+ ImageWriter writer = ImageWriter.newInstance(inputSurface, maxImages);
+ writer.setOnImageReleasedListener(listener, handler);
+ return writer;
+ }
+
+ /**
+ * Close pending images and clean up an {@link ImageReader} object.
+ * @param reader an {@link ImageReader} to close.
+ */
+ public static void closeImageReader(ImageReader reader) {
+ if (reader != null) {
+ reader.close();
+ }
+ }
+
+ /**
+ * Close pending images and clean up an {@link ImageWriter} object.
+ * @param writer an {@link ImageWriter} to close.
+ */
+ public static void closeImageWriter(ImageWriter writer) {
+ if (writer != null) {
+ writer.close();
+ }
+ }
+
+ /**
+ * Dummy listener that release the image immediately once it is available.
+ *
+ * <p>
+ * It can be used for the case where we don't care the image data at all.
+ * </p>
+ */
+ public static class ImageDropperListener implements ImageReader.OnImageAvailableListener {
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ Image image = null;
+ try {
+ image = reader.acquireNextImage();
+ } finally {
+ if (image != null) {
+ image.close();
+ }
+ }
+ }
+ }
+
+ /**
+ * Image listener that release the image immediately after validating the image
+ */
+ public static class ImageVerifierListener implements ImageReader.OnImageAvailableListener {
+ private Size mSize;
+ private int mFormat;
+
+ public ImageVerifierListener(Size sz, int format) {
+ mSize = sz;
+ mFormat = format;
+ }
+
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ Image image = null;
+ try {
+ image = reader.acquireNextImage();
+ } finally {
+ if (image != null) {
+ validateImage(image, mSize.getWidth(), mSize.getHeight(), mFormat, null);
+ image.close();
+ }
+ }
+ }
+ }
+
+ public static class SimpleImageReaderListener
+ implements ImageReader.OnImageAvailableListener {
+ private final LinkedBlockingQueue<Image> mQueue =
+ new LinkedBlockingQueue<Image>();
+ // Indicate whether this listener will drop images or not,
+ // when the queued images reaches the reader maxImages
+ private final boolean mAsyncMode;
+ // maxImages held by the queue in async mode.
+ private final int mMaxImages;
+
+ /**
+ * Create a synchronous SimpleImageReaderListener that queues the images
+ * automatically when they are available, no image will be dropped. If
+ * the caller doesn't call getImage(), the producer will eventually run
+ * into buffer starvation.
+ */
+ public SimpleImageReaderListener() {
+ mAsyncMode = false;
+ mMaxImages = 0;
+ }
+
+ /**
+ * Create a synchronous/asynchronous SimpleImageReaderListener that
+ * queues the images automatically when they are available. For
+ * asynchronous listener, image will be dropped if the queued images
+ * reach to maxImages queued. If the caller doesn't call getImage(), the
+ * producer will not be blocked. For synchronous listener, no image will
+ * be dropped. If the caller doesn't call getImage(), the producer will
+ * eventually run into buffer starvation.
+ *
+ * @param asyncMode If the listener is operating at asynchronous mode.
+ * @param maxImages The max number of images held by this listener.
+ */
+ /**
+ *
+ * @param asyncMode
+ */
+ public SimpleImageReaderListener(boolean asyncMode, int maxImages) {
+ mAsyncMode = asyncMode;
+ mMaxImages = maxImages;
+ }
+
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ try {
+ mQueue.put(reader.acquireNextImage());
+ if (mAsyncMode && mQueue.size() >= mMaxImages) {
+ Image img = mQueue.poll();
+ img.close();
+ }
+ } catch (InterruptedException e) {
+ throw new UnsupportedOperationException(
+ "Can't handle InterruptedException in onImageAvailable");
+ }
+ }
+
+ /**
+ * Get an image from the image reader.
+ *
+ * @param timeout Timeout value for the wait.
+ * @return The image from the image reader.
+ */
+ public Image getImage(long timeout) throws InterruptedException {
+ Image image = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
+ assertNotNull("Wait for an image timed out in " + timeout + "ms", image);
+ return image;
+ }
+
+ /**
+ * Drain the pending images held by this listener currently.
+ *
+ */
+ public void drain() {
+ while (!mQueue.isEmpty()) {
+ Image image = mQueue.poll();
+ assertNotNull("Unable to get an image", image);
+ image.close();
+ }
+ }
+ }
+
+ public static class SimpleImageWriterListener implements ImageWriter.OnImageReleasedListener {
+ private final Semaphore mImageReleasedSema = new Semaphore(0);
+ private final ImageWriter mWriter;
+ @Override
+ public void onImageReleased(ImageWriter writer) {
+ if (writer != mWriter) {
+ return;
+ }
+
+ if (VERBOSE) {
+ Log.v(TAG, "Input image is released");
+ }
+ mImageReleasedSema.release();
+ }
+
+ public SimpleImageWriterListener(ImageWriter writer) {
+ if (writer == null) {
+ throw new IllegalArgumentException("writer cannot be null");
+ }
+ mWriter = writer;
+ }
+
+ public void waitForImageReleased(long timeoutMs) throws InterruptedException {
+ if (!mImageReleasedSema.tryAcquire(timeoutMs, TimeUnit.MILLISECONDS)) {
+ fail("wait for image available timed out after " + timeoutMs + "ms");
+ }
+ }
+ }
+
+ public static class SimpleCaptureCallback extends CameraCaptureSession.CaptureCallback {
+ private final LinkedBlockingQueue<TotalCaptureResult> mQueue =
+ new LinkedBlockingQueue<TotalCaptureResult>();
+ private final LinkedBlockingQueue<CaptureFailure> mFailureQueue =
+ new LinkedBlockingQueue<>();
+ // Pair<CaptureRequest, Long> is a pair of capture request and timestamp.
+ private final LinkedBlockingQueue<Pair<CaptureRequest, Long>> mCaptureStartQueue =
+ new LinkedBlockingQueue<>();
+
+ private AtomicLong mNumFramesArrived = new AtomicLong(0);
+
+ @Override
+ public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
+ long timestamp, long frameNumber) {
+ try {
+ mCaptureStartQueue.put(new Pair(request, timestamp));
+ } catch (InterruptedException e) {
+ throw new UnsupportedOperationException(
+ "Can't handle InterruptedException in onCaptureStarted");
+ }
+ }
+
+ @Override
+ public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
+ TotalCaptureResult result) {
+ try {
+ mNumFramesArrived.incrementAndGet();
+ mQueue.put(result);
+ } catch (InterruptedException e) {
+ throw new UnsupportedOperationException(
+ "Can't handle InterruptedException in onCaptureCompleted");
+ }
+ }
+
+ @Override
+ public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request,
+ CaptureFailure failure) {
+ try {
+ mFailureQueue.put(failure);
+ } catch (InterruptedException e) {
+ throw new UnsupportedOperationException(
+ "Can't handle InterruptedException in onCaptureFailed");
+ }
+ }
+
+ @Override
+ public void onCaptureSequenceCompleted(CameraCaptureSession session, int sequenceId,
+ long frameNumber) {
+ }
+
+ public long getTotalNumFrames() {
+ return mNumFramesArrived.get();
+ }
+
+ public CaptureResult getCaptureResult(long timeout) {
+ return getTotalCaptureResult(timeout);
+ }
+
+ public TotalCaptureResult getCaptureResult(long timeout, long timestamp) {
+ try {
+ long currentTs = -1L;
+ TotalCaptureResult result;
+ while (true) {
+ result = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
+ if (result == null) {
+ throw new RuntimeException(
+ "Wait for a capture result timed out in " + timeout + "ms");
+ }
+ currentTs = result.get(CaptureResult.SENSOR_TIMESTAMP);
+ if (currentTs == timestamp) {
+ return result;
+ }
+ }
+
+ } catch (InterruptedException e) {
+ throw new UnsupportedOperationException("Unhandled interrupted exception", e);
+ }
+ }
+
+ public TotalCaptureResult getTotalCaptureResult(long timeout) {
+ try {
+ TotalCaptureResult result = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
+ assertNotNull("Wait for a capture result timed out in " + timeout + "ms", result);
+ return result;
+ } catch (InterruptedException e) {
+ throw new UnsupportedOperationException("Unhandled interrupted exception", e);
+ }
+ }
+
+ /**
+ * Get the {@link #CaptureResult capture result} for a given
+ * {@link #CaptureRequest capture request}.
+ *
+ * @param myRequest The {@link #CaptureRequest capture request} whose
+ * corresponding {@link #CaptureResult capture result} was
+ * being waited for
+ * @param numResultsWait Number of frames to wait for the capture result
+ * before timeout.
+ * @throws TimeoutRuntimeException If more than numResultsWait results are
+ * seen before the result matching myRequest arrives, or each
+ * individual wait for result times out after
+ * {@value #CAPTURE_RESULT_TIMEOUT_MS}ms.
+ */
+ public CaptureResult getCaptureResultForRequest(CaptureRequest myRequest,
+ int numResultsWait) {
+ return getTotalCaptureResultForRequest(myRequest, numResultsWait);
+ }
+
+ /**
+ * Get the {@link #TotalCaptureResult total capture result} for a given
+ * {@link #CaptureRequest capture request}.
+ *
+ * @param myRequest The {@link #CaptureRequest capture request} whose
+ * corresponding {@link #TotalCaptureResult capture result} was
+ * being waited for
+ * @param numResultsWait Number of frames to wait for the capture result
+ * before timeout.
+ * @throws TimeoutRuntimeException If more than numResultsWait results are
+ * seen before the result matching myRequest arrives, or each
+ * individual wait for result times out after
+ * {@value #CAPTURE_RESULT_TIMEOUT_MS}ms.
+ */
+ public TotalCaptureResult getTotalCaptureResultForRequest(CaptureRequest myRequest,
+ int numResultsWait) {
+ ArrayList<CaptureRequest> captureRequests = new ArrayList<>(1);
+ captureRequests.add(myRequest);
+ return getTotalCaptureResultsForRequests(captureRequests, numResultsWait)[0];
+ }
+
+ /**
+ * Get an array of {@link #TotalCaptureResult total capture results} for a given list of
+ * {@link #CaptureRequest capture requests}. This can be used when the order of results
+ * may not the same as the order of requests.
+ *
+ * @param captureRequests The list of {@link #CaptureRequest capture requests} whose
+ * corresponding {@link #TotalCaptureResult capture results} are
+ * being waited for.
+ * @param numResultsWait Number of frames to wait for the capture results
+ * before timeout.
+ * @throws TimeoutRuntimeException If more than numResultsWait results are
+ * seen before all the results matching captureRequests arrives.
+ */
+ public TotalCaptureResult[] getTotalCaptureResultsForRequests(
+ List<CaptureRequest> captureRequests, int numResultsWait) {
+ if (numResultsWait < 0) {
+ throw new IllegalArgumentException("numResultsWait must be no less than 0");
+ }
+ if (captureRequests == null || captureRequests.size() == 0) {
+ throw new IllegalArgumentException("captureRequests must have at least 1 request.");
+ }
+
+ // Create a request -> a list of result indices map that it will wait for.
+ HashMap<CaptureRequest, ArrayList<Integer>> remainingResultIndicesMap = new HashMap<>();
+ for (int i = 0; i < captureRequests.size(); i++) {
+ CaptureRequest request = captureRequests.get(i);
+ ArrayList<Integer> indices = remainingResultIndicesMap.get(request);
+ if (indices == null) {
+ indices = new ArrayList<>();
+ remainingResultIndicesMap.put(request, indices);
+ }
+ indices.add(i);
+ }
+
+ TotalCaptureResult[] results = new TotalCaptureResult[captureRequests.size()];
+ int i = 0;
+ do {
+ TotalCaptureResult result = getTotalCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
+ CaptureRequest request = result.getRequest();
+ ArrayList<Integer> indices = remainingResultIndicesMap.get(request);
+ if (indices != null) {
+ results[indices.get(0)] = result;
+ indices.remove(0);
+
+ // Remove the entry if all results for this request has been fulfilled.
+ if (indices.isEmpty()) {
+ remainingResultIndicesMap.remove(request);
+ }
+ }
+
+ if (remainingResultIndicesMap.isEmpty()) {
+ return results;
+ }
+ } while (i++ < numResultsWait);
+
+ throw new TimeoutRuntimeException("Unable to get the expected capture result after "
+ + "waiting for " + numResultsWait + " results");
+ }
+
+ /**
+ * Get an array list of {@link #CaptureFailure capture failure} with maxNumFailures entries
+ * at most. If it times out before maxNumFailures failures are received, return the failures
+ * received so far.
+ *
+ * @param maxNumFailures The maximal number of failures to return. If it times out before
+ * the maximal number of failures are received, return the received
+ * failures so far.
+ * @throws UnsupportedOperationException If an error happens while waiting on the failure.
+ */
+ public ArrayList<CaptureFailure> getCaptureFailures(long maxNumFailures) {
+ ArrayList<CaptureFailure> failures = new ArrayList<>();
+ try {
+ for (int i = 0; i < maxNumFailures; i++) {
+ CaptureFailure failure = mFailureQueue.poll(CAPTURE_RESULT_TIMEOUT_MS,
+ TimeUnit.MILLISECONDS);
+ if (failure == null) {
+ // If waiting on a failure times out, return the failures so far.
+ break;
+ }
+ failures.add(failure);
+ }
+ } catch (InterruptedException e) {
+ throw new UnsupportedOperationException("Unhandled interrupted exception", e);
+ }
+
+ return failures;
+ }
+
+ /**
+ * Wait until the capture start of a request and expected timestamp arrives or it times
+ * out after a number of capture starts.
+ *
+ * @param request The request for the capture start to wait for.
+ * @param timestamp The timestamp for the capture start to wait for.
+ * @param numCaptureStartsWait The number of capture start events to wait for before timing
+ * out.
+ */
+ public void waitForCaptureStart(CaptureRequest request, Long timestamp,
+ int numCaptureStartsWait) throws Exception {
+ Pair<CaptureRequest, Long> expectedShutter = new Pair<>(request, timestamp);
+
+ int i = 0;
+ do {
+ Pair<CaptureRequest, Long> shutter = mCaptureStartQueue.poll(
+ CAPTURE_RESULT_TIMEOUT_MS, TimeUnit.MILLISECONDS);
+
+ if (shutter == null) {
+ throw new TimeoutRuntimeException("Unable to get any more capture start " +
+ "event after waiting for " + CAPTURE_RESULT_TIMEOUT_MS + " ms.");
+ } else if (expectedShutter.equals(shutter)) {
+ return;
+ }
+
+ } while (i++ < numCaptureStartsWait);
+
+ throw new TimeoutRuntimeException("Unable to get the expected capture start " +
+ "event after waiting for " + numCaptureStartsWait + " capture starts");
+ }
+
+ public boolean hasMoreResults()
+ {
+ return mQueue.isEmpty();
+ }
+
+ public void drain() {
+ mQueue.clear();
+ mNumFramesArrived.getAndSet(0);
+ mFailureQueue.clear();
+ mCaptureStartQueue.clear();
+ }
+ }
+
+ /**
+ * Block until the camera is opened.
+ *
+ * <p>Don't use this to test #onDisconnected/#onError since this will throw
+ * an AssertionError if it fails to open the camera device.</p>
+ *
+ * @return CameraDevice opened camera device
+ *
+ * @throws IllegalArgumentException
+ * If the handler is null, or if the handler's looper is current.
+ * @throws CameraAccessException
+ * If open fails immediately.
+ * @throws BlockingOpenException
+ * If open fails after blocking for some amount of time.
+ * @throws TimeoutRuntimeException
+ * If opening times out. Typically unrecoverable.
+ */
+ public static CameraDevice openCamera(CameraManager manager, String cameraId,
+ CameraDevice.StateCallback listener, Handler handler) throws CameraAccessException,
+ BlockingOpenException {
+
+ /**
+ * Although camera2 API allows 'null' Handler (it will just use the current
+ * thread's Looper), this is not what we want for CTS.
+ *
+ * In Camera framework test the default looper is used only to process events
+ * in between test runs,
+ * so anything sent there would not be executed inside a test and the test would fail.
+ *
+ * In this case, BlockingCameraManager#openCamera performs the check for us.
+ */
+ return (new BlockingCameraManager(manager)).openCamera(cameraId, listener, handler);
+ }
+
+
+ /**
+ * Block until the camera is opened.
+ *
+ * <p>Don't use this to test #onDisconnected/#onError since this will throw
+ * an AssertionError if it fails to open the camera device.</p>
+ *
+ * @throws IllegalArgumentException
+ * If the handler is null, or if the handler's looper is current.
+ * @throws CameraAccessException
+ * If open fails immediately.
+ * @throws BlockingOpenException
+ * If open fails after blocking for some amount of time.
+ * @throws TimeoutRuntimeException
+ * If opening times out. Typically unrecoverable.
+ */
+ public static CameraDevice openCamera(CameraManager manager, String cameraId, Handler handler)
+ throws CameraAccessException,
+ BlockingOpenException {
+ return openCamera(manager, cameraId, /*listener*/null, handler);
+ }
+
+ /**
+ * Configure a new camera session with output surfaces and type.
+ *
+ * @param camera The CameraDevice to be configured.
+ * @param outputSurfaces The surface list that used for camera output.
+ * @param listener The callback CameraDevice will notify when capture results are available.
+ */
+ public static CameraCaptureSession configureCameraSession(CameraDevice camera,
+ List<Surface> outputSurfaces, boolean isHighSpeed,
+ CameraCaptureSession.StateCallback listener, Handler handler)
+ throws CameraAccessException {
+ BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
+ if (isHighSpeed) {
+ camera.createConstrainedHighSpeedCaptureSession(outputSurfaces,
+ sessionListener, handler);
+ } else {
+ camera.createCaptureSession(outputSurfaces, sessionListener, handler);
+ }
+ CameraCaptureSession session =
+ sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
+ assertFalse("Camera session should not be a reprocessable session",
+ session.isReprocessable());
+ String sessionType = isHighSpeed ? "High Speed" : "Normal";
+ assertTrue("Capture session type must be " + sessionType,
+ isHighSpeed ==
+ CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom(session.getClass()));
+
+ return session;
+ }
+
+ /**
+ * Configure a new camera session with output surfaces.
+ *
+ * @param camera The CameraDevice to be configured.
+ * @param outputSurfaces The surface list that used for camera output.
+ * @param listener The callback CameraDevice will notify when capture results are available.
+ */
+ public static CameraCaptureSession configureCameraSession(CameraDevice camera,
+ List<Surface> outputSurfaces,
+ CameraCaptureSession.StateCallback listener, Handler handler)
+ throws CameraAccessException {
+
+ return configureCameraSession(camera, outputSurfaces, /*isHighSpeed*/false,
+ listener, handler);
+ }
+
+ public static CameraCaptureSession configureReprocessableCameraSession(CameraDevice camera,
+ InputConfiguration inputConfiguration, List<Surface> outputSurfaces,
+ CameraCaptureSession.StateCallback listener, Handler handler)
+ throws CameraAccessException {
+ BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
+ camera.createReprocessableCaptureSession(inputConfiguration, outputSurfaces,
+ sessionListener, handler);
+
+ Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
+ BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
+ int state = sessionListener.getStateWaiter().waitForAnyOfStates(
+ Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS);
+
+ assertTrue("Creating a reprocessable session failed.",
+ state == BlockingSessionCallback.SESSION_READY);
+
+ CameraCaptureSession session =
+ sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
+ assertTrue("Camera session should be a reprocessable session", session.isReprocessable());
+
+ return session;
+ }
+
+ public static <T> void assertArrayNotEmpty(T arr, String message) {
+ assertTrue(message, arr != null && Array.getLength(arr) > 0);
+ }
+
+ /**
+ * Check if the format is a legal YUV format camera supported.
+ */
+ public static void checkYuvFormat(int format) {
+ if ((format != ImageFormat.YUV_420_888) &&
+ (format != ImageFormat.NV21) &&
+ (format != ImageFormat.YV12)) {
+ fail("Wrong formats: " + format);
+ }
+ }
+
+ /**
+ * Check if image size and format match given size and format.
+ */
+ public static void checkImage(Image image, int width, int height, int format) {
+ // Image reader will wrap YV12/NV21 image by YUV_420_888
+ if (format == ImageFormat.NV21 || format == ImageFormat.YV12) {
+ format = ImageFormat.YUV_420_888;
+ }
+ assertNotNull("Input image is invalid", image);
+ assertEquals("Format doesn't match", format, image.getFormat());
+ assertEquals("Width doesn't match", width, image.getWidth());
+ assertEquals("Height doesn't match", height, image.getHeight());
+ }
+
+ /**
+ * <p>Read data from all planes of an Image into a contiguous unpadded, unpacked
+ * 1-D linear byte array, such that it can be write into disk, or accessed by
+ * software conveniently. It supports YUV_420_888/NV21/YV12 and JPEG input
+ * Image format.</p>
+ *
+ * <p>For YUV_420_888/NV21/YV12/Y8/Y16, it returns a byte array that contains
+ * the Y plane data first, followed by U(Cb), V(Cr) planes if there is any
+ * (xstride = width, ystride = height for chroma and luma components).</p>
+ *
+ * <p>For JPEG, it returns a 1-D byte array contains a complete JPEG image.</p>
+ */
+ public static byte[] getDataFromImage(Image image) {
+ assertNotNull("Invalid image:", image);
+ int format = image.getFormat();
+ int width = image.getWidth();
+ int height = image.getHeight();
+ int rowStride, pixelStride;
+ byte[] data = null;
+
+ // Read image data
+ Plane[] planes = image.getPlanes();
+ assertTrue("Fail to get image planes", planes != null && planes.length > 0);
+
+ // Check image validity
+ checkAndroidImageFormat(image);
+
+ ByteBuffer buffer = null;
+ // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer.
+ // Same goes for DEPTH_POINT_CLOUD
+ if (format == ImageFormat.JPEG || format == ImageFormat.DEPTH_POINT_CLOUD ||
+ format == ImageFormat.RAW_PRIVATE) {
+ buffer = planes[0].getBuffer();
+ assertNotNull("Fail to get jpeg or depth ByteBuffer", buffer);
+ data = new byte[buffer.remaining()];
+ buffer.get(data);
+ buffer.rewind();
+ return data;
+ }
+
+ int offset = 0;
+ data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
+ int maxRowSize = planes[0].getRowStride();
+ for (int i = 0; i < planes.length; i++) {
+ if (maxRowSize < planes[i].getRowStride()) {
+ maxRowSize = planes[i].getRowStride();
+ }
+ }
+ byte[] rowData = new byte[maxRowSize];
+ if(VERBOSE) Log.v(TAG, "get data from " + planes.length + " planes");
+ for (int i = 0; i < planes.length; i++) {
+ buffer = planes[i].getBuffer();
+ assertNotNull("Fail to get bytebuffer from plane", buffer);
+ rowStride = planes[i].getRowStride();
+ pixelStride = planes[i].getPixelStride();
+ assertTrue("pixel stride " + pixelStride + " is invalid", pixelStride > 0);
+ if (VERBOSE) {
+ Log.v(TAG, "pixelStride " + pixelStride);
+ Log.v(TAG, "rowStride " + rowStride);
+ Log.v(TAG, "width " + width);
+ Log.v(TAG, "height " + height);
+ }
+ // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling.
+ int w = (i == 0) ? width : width / 2;
+ int h = (i == 0) ? height : height / 2;
+ assertTrue("rowStride " + rowStride + " should be >= width " + w , rowStride >= w);
+ for (int row = 0; row < h; row++) {
+ int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8;
+ int length;
+ if (pixelStride == bytesPerPixel) {
+ // Special case: optimized read of the entire row
+ length = w * bytesPerPixel;
+ buffer.get(data, offset, length);
+ offset += length;
+ } else {
+ // Generic case: should work for any pixelStride but slower.
+ // Use intermediate buffer to avoid read byte-by-byte from
+ // DirectByteBuffer, which is very bad for performance
+ length = (w - 1) * pixelStride + bytesPerPixel;
+ buffer.get(rowData, 0, length);
+ for (int col = 0; col < w; col++) {
+ data[offset++] = rowData[col * pixelStride];
+ }
+ }
+ // Advance buffer the remainder of the row stride
+ if (row < h - 1) {
+ buffer.position(buffer.position() + rowStride - length);
+ }
+ }
+ if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i);
+ buffer.rewind();
+ }
+ return data;
+ }
+
+ /**
+ * <p>Check android image format validity for an image, only support below formats:</p>
+ *
+ * <p>YUV_420_888/NV21/YV12, can add more for future</p>
+ */
+ public static void checkAndroidImageFormat(Image image) {
+ int format = image.getFormat();
+ Plane[] planes = image.getPlanes();
+ switch (format) {
+ case ImageFormat.YUV_420_888:
+ case ImageFormat.NV21:
+ case ImageFormat.YV12:
+ assertEquals("YUV420 format Images should have 3 planes", 3, planes.length);
+ break;
+ case ImageFormat.JPEG:
+ case ImageFormat.RAW_SENSOR:
+ case ImageFormat.RAW_PRIVATE:
+ case ImageFormat.DEPTH16:
+ case ImageFormat.DEPTH_POINT_CLOUD:
+ assertEquals("JPEG/RAW/depth Images should have one plane", 1, planes.length);
+ break;
+ default:
+ fail("Unsupported Image Format: " + format);
+ }
+ }
+
+ public static void dumpFile(String fileName, Bitmap data) {
+ FileOutputStream outStream;
+ try {
+ Log.v(TAG, "output will be saved as " + fileName);
+ outStream = new FileOutputStream(fileName);
+ } catch (IOException ioe) {
+ throw new RuntimeException("Unable to create debug output file " + fileName, ioe);
+ }
+
+ try {
+ data.compress(Bitmap.CompressFormat.JPEG, /*quality*/90, outStream);
+ outStream.close();
+ } catch (IOException ioe) {
+ throw new RuntimeException("failed writing data to file " + fileName, ioe);
+ }
+ }
+
+ public static void dumpFile(String fileName, byte[] data) {
+ FileOutputStream outStream;
+ try {
+ Log.v(TAG, "output will be saved as " + fileName);
+ outStream = new FileOutputStream(fileName);
+ } catch (IOException ioe) {
+ throw new RuntimeException("Unable to create debug output file " + fileName, ioe);
+ }
+
+ try {
+ outStream.write(data);
+ outStream.close();
+ } catch (IOException ioe) {
+ throw new RuntimeException("failed writing data to file " + fileName, ioe);
+ }
+ }
+
+ /**
+ * Get the available output sizes for the user-defined {@code format}.
+ *
+ * <p>Note that implementation-defined/hidden formats are not supported.</p>
+ */
+ public static Size[] getSupportedSizeForFormat(int format, String cameraId,
+ CameraManager cameraManager) throws CameraAccessException {
+ CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
+ assertNotNull("Can't get camera characteristics!", properties);
+ if (VERBOSE) {
+ Log.v(TAG, "get camera characteristics for camera: " + cameraId);
+ }
+ StreamConfigurationMap configMap =
+ properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ Size[] availableSizes = configMap.getOutputSizes(format);
+ assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for format: "
+ + format);
+ Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(format);
+ if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
+ Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
+ System.arraycopy(availableSizes, 0, allSizes, 0,
+ availableSizes.length);
+ System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length,
+ highResAvailableSizes.length);
+ availableSizes = allSizes;
+ }
+ if (VERBOSE) Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
+ return availableSizes;
+ }
+
+ /**
+ * Get the available output sizes for the given class.
+ *
+ */
+ public static Size[] getSupportedSizeForClass(Class klass, String cameraId,
+ CameraManager cameraManager) throws CameraAccessException {
+ CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
+ assertNotNull("Can't get camera characteristics!", properties);
+ if (VERBOSE) {
+ Log.v(TAG, "get camera characteristics for camera: " + cameraId);
+ }
+ StreamConfigurationMap configMap =
+ properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ Size[] availableSizes = configMap.getOutputSizes(klass);
+ assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for class: "
+ + klass);
+ Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(ImageFormat.PRIVATE);
+ if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
+ Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
+ System.arraycopy(availableSizes, 0, allSizes, 0,
+ availableSizes.length);
+ System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length,
+ highResAvailableSizes.length);
+ availableSizes = allSizes;
+ }
+ if (VERBOSE) Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
+ return availableSizes;
+ }
+
+ /**
+ * Size comparator that compares the number of pixels it covers.
+ *
+ * <p>If two the areas of two sizes are same, compare the widths.</p>
+ */
+ public static class SizeComparator implements Comparator<Size> {
+ @Override
+ public int compare(Size lhs, Size rhs) {
+ return CameraUtils
+ .compareSizes(lhs.getWidth(), lhs.getHeight(), rhs.getWidth(), rhs.getHeight());
+ }
+ }
+
+ /**
+ * Get sorted size list in descending order. Remove the sizes larger than
+ * the bound. If the bound is null, don't do the size bound filtering.
+ */
+ static public List<Size> getSupportedPreviewSizes(String cameraId,
+ CameraManager cameraManager, Size bound) throws CameraAccessException {
+
+ Size[] rawSizes = getSupportedSizeForClass(android.view.SurfaceHolder.class, cameraId,
+ cameraManager);
+ assertArrayNotEmpty(rawSizes,
+ "Available sizes for SurfaceHolder class should not be empty");
+ if (VERBOSE) {
+ Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(rawSizes));
+ }
+
+ if (bound == null) {
+ return getAscendingOrderSizes(Arrays.asList(rawSizes), /*ascending*/false);
+ }
+
+ List<Size> sizes = new ArrayList<Size>();
+ for (Size sz: rawSizes) {
+ if (sz.getWidth() <= bound.getWidth() && sz.getHeight() <= bound.getHeight()) {
+ sizes.add(sz);
+ }
+ }
+ return getAscendingOrderSizes(sizes, /*ascending*/false);
+ }
+
+ /**
+ * Get a sorted list of sizes from a given size list.
+ *
+ * <p>
+ * The size is compare by area it covers, if the areas are same, then
+ * compare the widths.
+ * </p>
+ *
+ * @param sizeList The input size list to be sorted
+ * @param ascending True if the order is ascending, otherwise descending order
+ * @return The ordered list of sizes
+ */
+ static public List<Size> getAscendingOrderSizes(final List<Size> sizeList, boolean ascending) {
+ if (sizeList == null) {
+ throw new IllegalArgumentException("sizeList shouldn't be null");
+ }
+
+ Comparator<Size> comparator = new SizeComparator();
+ List<Size> sortedSizes = new ArrayList<Size>();
+ sortedSizes.addAll(sizeList);
+ Collections.sort(sortedSizes, comparator);
+ if (!ascending) {
+ Collections.reverse(sortedSizes);
+ }
+
+ return sortedSizes;
+ }
+
+ /**
+ * Get sorted (descending order) size list for given format. Remove the sizes larger than
+ * the bound. If the bound is null, don't do the size bound filtering.
+ */
+ static public List<Size> getSortedSizesForFormat(String cameraId,
+ CameraManager cameraManager, int format, Size bound) throws CameraAccessException {
+ Comparator<Size> comparator = new SizeComparator();
+ Size[] sizes = getSupportedSizeForFormat(format, cameraId, cameraManager);
+ List<Size> sortedSizes = null;
+ if (bound != null) {
+ sortedSizes = new ArrayList<Size>(/*capacity*/1);
+ for (Size sz : sizes) {
+ if (comparator.compare(sz, bound) <= 0) {
+ sortedSizes.add(sz);
+ }
+ }
+ } else {
+ sortedSizes = Arrays.asList(sizes);
+ }
+ assertTrue("Supported size list should have at least one element",
+ sortedSizes.size() > 0);
+
+ Collections.sort(sortedSizes, comparator);
+ // Make it in descending order.
+ Collections.reverse(sortedSizes);
+ return sortedSizes;
+ }
+
+ /**
+ * Get supported video size list for a given camera device.
+ *
+ * <p>
+ * Filter out the sizes that are larger than the bound. If the bound is
+ * null, don't do the size bound filtering.
+ * </p>
+ */
+ static public List<Size> getSupportedVideoSizes(String cameraId,
+ CameraManager cameraManager, Size bound) throws CameraAccessException {
+
+ Size[] rawSizes = getSupportedSizeForClass(android.media.MediaRecorder.class,
+ cameraId, cameraManager);
+ assertArrayNotEmpty(rawSizes,
+ "Available sizes for MediaRecorder class should not be empty");
+ if (VERBOSE) {
+ Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(rawSizes));
+ }
+
+ if (bound == null) {
+ return getAscendingOrderSizes(Arrays.asList(rawSizes), /*ascending*/false);
+ }
+
+ List<Size> sizes = new ArrayList<Size>();
+ for (Size sz: rawSizes) {
+ if (sz.getWidth() <= bound.getWidth() && sz.getHeight() <= bound.getHeight()) {
+ sizes.add(sz);
+ }
+ }
+ return getAscendingOrderSizes(sizes, /*ascending*/false);
+ }
+
+ /**
+ * Get supported video size list (descending order) for a given camera device.
+ *
+ * <p>
+ * Filter out the sizes that are larger than the bound. If the bound is
+ * null, don't do the size bound filtering.
+ * </p>
+ */
+ static public List<Size> getSupportedStillSizes(String cameraId,
+ CameraManager cameraManager, Size bound) throws CameraAccessException {
+ return getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.JPEG, bound);
+ }
+
+ static public Size getMinPreviewSize(String cameraId, CameraManager cameraManager)
+ throws CameraAccessException {
+ List<Size> sizes = getSupportedPreviewSizes(cameraId, cameraManager, null);
+ return sizes.get(sizes.size() - 1);
+ }
+
+ /**
+ * Get max supported preview size for a camera device.
+ */
+ static public Size getMaxPreviewSize(String cameraId, CameraManager cameraManager)
+ throws CameraAccessException {
+ return getMaxPreviewSize(cameraId, cameraManager, /*bound*/null);
+ }
+
+ /**
+ * Get max preview size for a camera device in the supported sizes that are no larger
+ * than the bound.
+ */
+ static public Size getMaxPreviewSize(String cameraId, CameraManager cameraManager, Size bound)
+ throws CameraAccessException {
+ List<Size> sizes = getSupportedPreviewSizes(cameraId, cameraManager, bound);
+ return sizes.get(0);
+ }
+
+ /**
+ * Get max depth size for a camera device.
+ */
+ static public Size getMaxDepthSize(String cameraId, CameraManager cameraManager)
+ throws CameraAccessException {
+ List<Size> sizes = getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.DEPTH16,
+ /*bound*/ null);
+ return sizes.get(0);
+ }
+
+ /**
+ * Get the largest size by area.
+ *
+ * @param sizes an array of sizes, must have at least 1 element
+ *
+ * @return Largest Size
+ *
+ * @throws IllegalArgumentException if sizes was null or had 0 elements
+ */
+ public static Size getMaxSize(Size... sizes) {
+ if (sizes == null || sizes.length == 0) {
+ throw new IllegalArgumentException("sizes was empty");
+ }
+
+ Size sz = sizes[0];
+ for (Size size : sizes) {
+ if (size.getWidth() * size.getHeight() > sz.getWidth() * sz.getHeight()) {
+ sz = size;
+ }
+ }
+
+ return sz;
+ }
+
+ /**
+ * Returns true if the given {@code array} contains the given element.
+ *
+ * @param array {@code array} to check for {@code elem}
+ * @param elem {@code elem} to test for
+ * @return {@code true} if the given element is contained
+ */
+ public static boolean contains(int[] array, int elem) {
+ if (array == null) return false;
+ for (int i = 0; i < array.length; i++) {
+ if (elem == array[i]) return true;
+ }
+ return false;
+ }
+
+ /**
+ * Get object array from byte array.
+ *
+ * @param array Input byte array to be converted
+ * @return Byte object array converted from input byte array
+ */
+ public static Byte[] toObject(byte[] array) {
+ return convertPrimitiveArrayToObjectArray(array, Byte.class);
+ }
+
+ /**
+ * Get object array from int array.
+ *
+ * @param array Input int array to be converted
+ * @return Integer object array converted from input int array
+ */
+ public static Integer[] toObject(int[] array) {
+ return convertPrimitiveArrayToObjectArray(array, Integer.class);
+ }
+
+ /**
+ * Get object array from float array.
+ *
+ * @param array Input float array to be converted
+ * @return Float object array converted from input float array
+ */
+ public static Float[] toObject(float[] array) {
+ return convertPrimitiveArrayToObjectArray(array, Float.class);
+ }
+
+ /**
+ * Get object array from double array.
+ *
+ * @param array Input double array to be converted
+ * @return Double object array converted from input double array
+ */
+ public static Double[] toObject(double[] array) {
+ return convertPrimitiveArrayToObjectArray(array, Double.class);
+ }
+
+ /**
+ * Convert a primitive input array into its object array version (e.g. from int[] to Integer[]).
+ *
+ * @param array Input array object
+ * @param wrapperClass The boxed class it converts to
+ * @return Boxed version of primitive array
+ */
+ private static <T> T[] convertPrimitiveArrayToObjectArray(final Object array,
+ final Class<T> wrapperClass) {
+ // getLength does the null check and isArray check already.
+ int arrayLength = Array.getLength(array);
+ if (arrayLength == 0) {
+ throw new IllegalArgumentException("Input array shouldn't be empty");
+ }
+
+ @SuppressWarnings("unchecked")
+ final T[] result = (T[]) Array.newInstance(wrapperClass, arrayLength);
+ for (int i = 0; i < arrayLength; i++) {
+ Array.set(result, i, Array.get(array, i));
+ }
+ return result;
+ }
+
+ /**
+ * Validate image based on format and size.
+ *
+ * @param image The image to be validated.
+ * @param width The image width.
+ * @param height The image height.
+ * @param format The image format.
+ * @param filePath The debug dump file path, null if don't want to dump to
+ * file.
+ * @throws UnsupportedOperationException if calling with an unknown format
+ */
+ public static void validateImage(Image image, int width, int height, int format,
+ String filePath) {
+ checkImage(image, width, height, format);
+
+ /**
+ * TODO: validate timestamp:
+ * 1. capture result timestamp against the image timestamp (need
+ * consider frame drops)
+ * 2. timestamps should be monotonically increasing for different requests
+ */
+ if(VERBOSE) Log.v(TAG, "validating Image");
+ byte[] data = getDataFromImage(image);
+ assertTrue("Invalid image data", data != null && data.length > 0);
+
+ switch (format) {
+ case ImageFormat.JPEG:
+ validateJpegData(data, width, height, filePath);
+ break;
+ case ImageFormat.YUV_420_888:
+ case ImageFormat.YV12:
+ validateYuvData(data, width, height, format, image.getTimestamp(), filePath);
+ break;
+ case ImageFormat.RAW_SENSOR:
+ validateRaw16Data(data, width, height, format, image.getTimestamp(), filePath);
+ break;
+ case ImageFormat.DEPTH16:
+ validateDepth16Data(data, width, height, format, image.getTimestamp(), filePath);
+ break;
+ case ImageFormat.DEPTH_POINT_CLOUD:
+ validateDepthPointCloudData(data, width, height, format, image.getTimestamp(), filePath);
+ break;
+ case ImageFormat.RAW_PRIVATE:
+ validateRawPrivateData(data, width, height, image.getTimestamp(), filePath);
+ break;
+ default:
+ throw new UnsupportedOperationException("Unsupported format for validation: "
+ + format);
+ }
+ }
+
+ /**
+ * Provide a mock for {@link CameraDevice.StateCallback}.
+ *
+ * <p>Only useful because mockito can't mock {@link CameraDevice.StateCallback} which is an
+ * abstract class.</p>
+ *
+ * <p>
+ * Use this instead of other classes when needing to verify interactions, since
+ * trying to spy on {@link BlockingStateCallback} (or others) will cause unnecessary extra
+ * interactions which will cause false test failures.
+ * </p>
+ *
+ */
+ public static class MockStateCallback extends CameraDevice.StateCallback {
+
+ @Override
+ public void onOpened(CameraDevice camera) {
+ }
+
+ @Override
+ public void onDisconnected(CameraDevice camera) {
+ }
+
+ @Override
+ public void onError(CameraDevice camera, int error) {
+ }
+
+ private MockStateCallback() {}
+
+ /**
+ * Create a Mockito-ready mocked StateCallback.
+ */
+ public static MockStateCallback mock() {
+ return Mockito.spy(new MockStateCallback());
+ }
+ }
+
+ private static void validateJpegData(byte[] jpegData, int width, int height, String filePath) {
+ BitmapFactory.Options bmpOptions = new BitmapFactory.Options();
+ // DecodeBound mode: only parse the frame header to get width/height.
+ // it doesn't decode the pixel.
+ bmpOptions.inJustDecodeBounds = true;
+ BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length, bmpOptions);
+ assertEquals(width, bmpOptions.outWidth);
+ assertEquals(height, bmpOptions.outHeight);
+
+ // Pixel decoding mode: decode whole image. check if the image data
+ // is decodable here.
+ assertNotNull("Decoding jpeg failed",
+ BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length));
+ if (DEBUG && filePath != null) {
+ String fileName =
+ filePath + "/" + width + "x" + height + ".jpeg";
+ dumpFile(fileName, jpegData);
+ }
+ }
+
+ private static void validateYuvData(byte[] yuvData, int width, int height, int format,
+ long ts, String filePath) {
+ checkYuvFormat(format);
+ if (VERBOSE) Log.v(TAG, "Validating YUV data");
+ int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
+ assertEquals("Yuv data doesn't match", expectedSize, yuvData.length);
+
+ // TODO: Can add data validation for test pattern.
+
+ if (DEBUG && filePath != null) {
+ String fileName =
+ filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".yuv";
+ dumpFile(fileName, yuvData);
+ }
+ }
+
+ private static void validateRaw16Data(byte[] rawData, int width, int height, int format,
+ long ts, String filePath) {
+ if (VERBOSE) Log.v(TAG, "Validating raw data");
+ int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
+ assertEquals("Raw data doesn't match", expectedSize, rawData.length);
+
+ // TODO: Can add data validation for test pattern.
+
+ if (DEBUG && filePath != null) {
+ String fileName =
+ filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".raw16";
+ dumpFile(fileName, rawData);
+ }
+
+ return;
+ }
+
+ private static void validateRawPrivateData(byte[] rawData, int width, int height,
+ long ts, String filePath) {
+ if (VERBOSE) Log.v(TAG, "Validating private raw data");
+ // Expect each RAW pixel should occupy at least one byte and no more than 2.5 bytes
+ int expectedSizeMin = width * height;
+ int expectedSizeMax = width * height * 5 / 2;
+
+ assertTrue("Opaque RAW size " + rawData.length + "out of normal bound [" +
+ expectedSizeMin + "," + expectedSizeMax + "]",
+ expectedSizeMin <= rawData.length && rawData.length <= expectedSizeMax);
+
+ if (DEBUG && filePath != null) {
+ String fileName =
+ filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".rawPriv";
+ dumpFile(fileName, rawData);
+ }
+
+ return;
+ }
+
+ private static void validateDepth16Data(byte[] depthData, int width, int height, int format,
+ long ts, String filePath) {
+
+ if (VERBOSE) Log.v(TAG, "Validating depth16 data");
+ int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
+ assertEquals("Depth data doesn't match", expectedSize, depthData.length);
+
+
+ if (DEBUG && filePath != null) {
+ String fileName =
+ filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".depth16";
+ dumpFile(fileName, depthData);
+ }
+
+ return;
+
+ }
+
+ private static void validateDepthPointCloudData(byte[] depthData, int width, int height, int format,
+ long ts, String filePath) {
+
+ if (VERBOSE) Log.v(TAG, "Validating depth point cloud data");
+
+ // Can't validate size since it is variable
+
+ if (DEBUG && filePath != null) {
+ String fileName =
+ filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".depth_point_cloud";
+ dumpFile(fileName, depthData);
+ }
+
+ return;
+
+ }
+
+ public static <T> T getValueNotNull(CaptureResult result, CaptureResult.Key<T> key) {
+ if (result == null) {
+ throw new IllegalArgumentException("Result must not be null");
+ }
+
+ T value = result.get(key);
+ assertNotNull("Value of Key " + key.getName() + "shouldn't be null", value);
+ return value;
+ }
+
+ public static <T> T getValueNotNull(CameraCharacteristics characteristics,
+ CameraCharacteristics.Key<T> key) {
+ if (characteristics == null) {
+ throw new IllegalArgumentException("Camera characteristics must not be null");
+ }
+
+ T value = characteristics.get(key);
+ assertNotNull("Value of Key " + key.getName() + "shouldn't be null", value);
+ return value;
+ }
+
+ /**
+ * Get a crop region for a given zoom factor and center position.
+ * <p>
+ * The center position is normalized position in range of [0, 1.0], where
+ * (0, 0) represents top left corner, (1.0. 1.0) represents bottom right
+ * corner. The center position could limit the effective minimal zoom
+ * factor, for example, if the center position is (0.75, 0.75), the
+ * effective minimal zoom position becomes 2.0. If the requested zoom factor
+ * is smaller than 2.0, a crop region with 2.0 zoom factor will be returned.
+ * </p>
+ * <p>
+ * The aspect ratio of the crop region is maintained the same as the aspect
+ * ratio of active array.
+ * </p>
+ *
+ * @param zoomFactor The zoom factor to generate the crop region, it must be
+ * >= 1.0
+ * @param center The normalized zoom center point that is in the range of [0, 1].
+ * @param maxZoom The max zoom factor supported by this device.
+ * @param activeArray The active array size of this device.
+ * @return crop region for the given normalized center and zoom factor.
+ */
+ public static Rect getCropRegionForZoom(float zoomFactor, final PointF center,
+ final float maxZoom, final Rect activeArray) {
+ if (zoomFactor < 1.0) {
+ throw new IllegalArgumentException("zoom factor " + zoomFactor + " should be >= 1.0");
+ }
+ if (center.x > 1.0 || center.x < 0) {
+ throw new IllegalArgumentException("center.x " + center.x
+ + " should be in range of [0, 1.0]");
+ }
+ if (center.y > 1.0 || center.y < 0) {
+ throw new IllegalArgumentException("center.y " + center.y
+ + " should be in range of [0, 1.0]");
+ }
+ if (maxZoom < 1.0) {
+ throw new IllegalArgumentException("max zoom factor " + maxZoom + " should be >= 1.0");
+ }
+ if (activeArray == null) {
+ throw new IllegalArgumentException("activeArray must not be null");
+ }
+
+ float minCenterLength = Math.min(Math.min(center.x, 1.0f - center.x),
+ Math.min(center.y, 1.0f - center.y));
+ float minEffectiveZoom = 0.5f / minCenterLength;
+ if (minEffectiveZoom > maxZoom) {
+ throw new IllegalArgumentException("Requested center " + center.toString() +
+ " has minimal zoomable factor " + minEffectiveZoom + ", which exceeds max"
+ + " zoom factor " + maxZoom);
+ }
+
+ if (zoomFactor < minEffectiveZoom) {
+ Log.w(TAG, "Requested zoomFactor " + zoomFactor + " > minimal zoomable factor "
+ + minEffectiveZoom + ". It will be overwritten by " + minEffectiveZoom);
+ zoomFactor = minEffectiveZoom;
+ }
+
+ int cropCenterX = (int)(activeArray.width() * center.x);
+ int cropCenterY = (int)(activeArray.height() * center.y);
+ int cropWidth = (int) (activeArray.width() / zoomFactor);
+ int cropHeight = (int) (activeArray.height() / zoomFactor);
+
+ return new Rect(
+ /*left*/cropCenterX - cropWidth / 2,
+ /*top*/cropCenterY - cropHeight / 2,
+ /*right*/ cropCenterX + cropWidth / 2 - 1,
+ /*bottom*/cropCenterY + cropHeight / 2 - 1);
+ }
+
+ /**
+ * Calculate output 3A region from the intersection of input 3A region and cropped region.
+ *
+ * @param requestRegions The input 3A regions
+ * @param cropRect The cropped region
+ * @return expected 3A regions output in capture result
+ */
+ public static MeteringRectangle[] getExpectedOutputRegion(
+ MeteringRectangle[] requestRegions, Rect cropRect){
+ MeteringRectangle[] resultRegions = new MeteringRectangle[requestRegions.length];
+ for (int i = 0; i < requestRegions.length; i++) {
+ Rect requestRect = requestRegions[i].getRect();
+ Rect resultRect = new Rect();
+ assertTrue("Input 3A region must intersect cropped region",
+ resultRect.setIntersect(requestRect, cropRect));
+ resultRegions[i] = new MeteringRectangle(
+ resultRect,
+ requestRegions[i].getMeteringWeight());
+ }
+ return resultRegions;
+ }
+
+ /**
+ * Copy source image data to destination image.
+ *
+ * @param src The source image to be copied from.
+ * @param dst The destination image to be copied to.
+ * @throws IllegalArgumentException If the source and destination images have
+ * different format, or one of the images is not copyable.
+ */
+ public static void imageCopy(Image src, Image dst) {
+ if (src == null || dst == null) {
+ throw new IllegalArgumentException("Images should be non-null");
+ }
+ if (src.getFormat() != dst.getFormat()) {
+ throw new IllegalArgumentException("Src and dst images should have the same format");
+ }
+ if (src.getFormat() == ImageFormat.PRIVATE ||
+ dst.getFormat() == ImageFormat.PRIVATE) {
+ throw new IllegalArgumentException("PRIVATE format images are not copyable");
+ }
+
+ // TODO: check the owner of the dst image, it must be from ImageWriter, other source may
+ // not be writable. Maybe we should add an isWritable() method in image class.
+
+ Plane[] srcPlanes = src.getPlanes();
+ Plane[] dstPlanes = dst.getPlanes();
+ ByteBuffer srcBuffer = null;
+ ByteBuffer dstBuffer = null;
+ for (int i = 0; i < srcPlanes.length; i++) {
+ srcBuffer = srcPlanes[i].getBuffer();
+ int srcPos = srcBuffer.position();
+ srcBuffer.rewind();
+ dstBuffer = dstPlanes[i].getBuffer();
+ dstBuffer.rewind();
+ dstBuffer.put(srcBuffer);
+ srcBuffer.position(srcPos);
+ dstBuffer.rewind();
+ }
+ }
+
+ /**
+ * <p>
+ * Checks whether the two images are strongly equal.
+ * </p>
+ * <p>
+ * Two images are strongly equal if and only if the data, formats, sizes,
+ * and timestamps are same. For {@link ImageFormat#PRIVATE PRIVATE} format
+ * images, the image data is not not accessible thus the data comparison is
+ * effectively skipped as the number of planes is zero.
+ * </p>
+ * <p>
+ * Note that this method compares the pixel data even outside of the crop
+ * region, which may not be necessary for general use case.
+ * </p>
+ *
+ * @param lhsImg First image to be compared with.
+ * @param rhsImg Second image to be compared with.
+ * @return true if the two images are equal, false otherwise.
+ * @throws IllegalArgumentException If either of image is null.
+ */
+ public static boolean isImageStronglyEqual(Image lhsImg, Image rhsImg) {
+ if (lhsImg == null || rhsImg == null) {
+ throw new IllegalArgumentException("Images should be non-null");
+ }
+
+ if (lhsImg.getFormat() != rhsImg.getFormat()) {
+ Log.i(TAG, "lhsImg format " + lhsImg.getFormat() + " is different with rhsImg format "
+ + rhsImg.getFormat());
+ return false;
+ }
+
+ if (lhsImg.getWidth() != rhsImg.getWidth()) {
+ Log.i(TAG, "lhsImg width " + lhsImg.getWidth() + " is different with rhsImg width "
+ + rhsImg.getWidth());
+ return false;
+ }
+
+ if (lhsImg.getHeight() != rhsImg.getHeight()) {
+ Log.i(TAG, "lhsImg height " + lhsImg.getHeight() + " is different with rhsImg height "
+ + rhsImg.getHeight());
+ return false;
+ }
+
+ if (lhsImg.getTimestamp() != rhsImg.getTimestamp()) {
+ Log.i(TAG, "lhsImg timestamp " + lhsImg.getTimestamp()
+ + " is different with rhsImg timestamp " + rhsImg.getTimestamp());
+ return false;
+ }
+
+ if (!lhsImg.getCropRect().equals(rhsImg.getCropRect())) {
+ Log.i(TAG, "lhsImg crop rect " + lhsImg.getCropRect()
+ + " is different with rhsImg crop rect " + rhsImg.getCropRect());
+ return false;
+ }
+
+ // Compare data inside of the image.
+ Plane[] lhsPlanes = lhsImg.getPlanes();
+ Plane[] rhsPlanes = rhsImg.getPlanes();
+ ByteBuffer lhsBuffer = null;
+ ByteBuffer rhsBuffer = null;
+ for (int i = 0; i < lhsPlanes.length; i++) {
+ lhsBuffer = lhsPlanes[i].getBuffer();
+ rhsBuffer = rhsPlanes[i].getBuffer();
+ if (!lhsBuffer.equals(rhsBuffer)) {
+ Log.i(TAG, "byte buffers for plane " + i + " don't matach.");
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ /**
+ * Set jpeg related keys in a capture request builder.
+ *
+ * @param builder The capture request builder to set the keys inl
+ * @param exifData The exif data to set.
+ * @param thumbnailSize The thumbnail size to set.
+ * @param collector The camera error collector to collect errors.
+ */
+ public static void setJpegKeys(CaptureRequest.Builder builder, ExifTestData exifData,
+ Size thumbnailSize, CameraErrorCollector collector) {
+ builder.set(CaptureRequest.JPEG_THUMBNAIL_SIZE, thumbnailSize);
+ builder.set(CaptureRequest.JPEG_GPS_LOCATION, exifData.gpsLocation);
+ builder.set(CaptureRequest.JPEG_ORIENTATION, exifData.jpegOrientation);
+ builder.set(CaptureRequest.JPEG_QUALITY, exifData.jpegQuality);
+ builder.set(CaptureRequest.JPEG_THUMBNAIL_QUALITY,
+ exifData.thumbnailQuality);
+
+ // Validate request set and get.
+ collector.expectEquals("JPEG thumbnail size request set and get should match",
+ thumbnailSize, builder.get(CaptureRequest.JPEG_THUMBNAIL_SIZE));
+ collector.expectTrue("GPS locations request set and get should match.",
+ areGpsFieldsEqual(exifData.gpsLocation,
+ builder.get(CaptureRequest.JPEG_GPS_LOCATION)));
+ collector.expectEquals("JPEG orientation request set and get should match",
+ exifData.jpegOrientation,
+ builder.get(CaptureRequest.JPEG_ORIENTATION));
+ collector.expectEquals("JPEG quality request set and get should match",
+ exifData.jpegQuality, builder.get(CaptureRequest.JPEG_QUALITY));
+ collector.expectEquals("JPEG thumbnail quality request set and get should match",
+ exifData.thumbnailQuality,
+ builder.get(CaptureRequest.JPEG_THUMBNAIL_QUALITY));
+ }
+
+ /**
+ * Simple validation of JPEG image size and format.
+ * <p>
+ * Only validate the image object sanity. It is fast, but doesn't actually
+ * check the buffer data. Assert is used here as it make no sense to
+ * continue the test if the jpeg image captured has some serious failures.
+ * </p>
+ *
+ * @param image The captured jpeg image
+ * @param expectedSize Expected capture jpeg size
+ */
+ public static void basicValidateJpegImage(Image image, Size expectedSize) {
+ Size imageSz = new Size(image.getWidth(), image.getHeight());
+ assertTrue(
+ String.format("Image size doesn't match (expected %s, actual %s) ",
+ expectedSize.toString(), imageSz.toString()), expectedSize.equals(imageSz));
+ assertEquals("Image format should be JPEG", ImageFormat.JPEG, image.getFormat());
+ assertNotNull("Image plane shouldn't be null", image.getPlanes());
+ assertEquals("Image plane number should be 1", 1, image.getPlanes().length);
+
+ // Jpeg decoding validate was done in ImageReaderTest, no need to duplicate the test here.
+ }
+
+ /**
+ * Verify the JPEG EXIF and JPEG related keys in a capture result are expected.
+ * - Capture request get values are same as were set.
+ * - capture result's exif data is the same as was set by
+ * the capture request.
+ * - new tags in the result set by the camera service are
+ * present and semantically correct.
+ *
+ * @param image The output JPEG image to verify.
+ * @param captureResult The capture result to verify.
+ * @param expectedSize The expected JPEG size.
+ * @param expectedThumbnailSize The expected thumbnail size.
+ * @param expectedExifData The expected EXIF data
+ * @param staticInfo The static metadata for the camera device.
+ * @param jpegFilename The filename to dump the jpeg to.
+ * @param collector The camera error collector to collect errors.
+ */
+ public static void verifyJpegKeys(Image image, CaptureResult captureResult, Size expectedSize,
+ Size expectedThumbnailSize, ExifTestData expectedExifData, StaticMetadata staticInfo,
+ CameraErrorCollector collector) throws Exception {
+
+ basicValidateJpegImage(image, expectedSize);
+
+ byte[] jpegBuffer = getDataFromImage(image);
+ // Have to dump into a file to be able to use ExifInterface
+ String jpegFilename = DEBUG_FILE_NAME_BASE + "/verifyJpegKeys.jpeg";
+ dumpFile(jpegFilename, jpegBuffer);
+ ExifInterface exif = new ExifInterface(jpegFilename);
+
+ if (expectedThumbnailSize.equals(new Size(0,0))) {
+ collector.expectTrue("Jpeg shouldn't have thumbnail when thumbnail size is (0, 0)",
+ !exif.hasThumbnail());
+ } else {
+ collector.expectTrue("Jpeg must have thumbnail for thumbnail size " +
+ expectedThumbnailSize, exif.hasThumbnail());
+ }
+
+ // Validate capture result vs. request
+ Size resultThumbnailSize = captureResult.get(CaptureResult.JPEG_THUMBNAIL_SIZE);
+ int orientationTested = expectedExifData.jpegOrientation;
+ // Legacy shim always doesn't rotate thumbnail size
+ if ((orientationTested == 90 || orientationTested == 270) &&
+ staticInfo.isHardwareLevelLimitedOrBetter()) {
+ int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
+ /*defaultValue*/-1);
+ if (exifOrientation == ExifInterface.ORIENTATION_UNDEFINED) {
+ // Device physically rotated image+thumbnail data
+ // Expect thumbnail size to be also rotated
+ resultThumbnailSize = new Size(resultThumbnailSize.getHeight(),
+ resultThumbnailSize.getWidth());
+ }
+ }
+
+ collector.expectEquals("JPEG thumbnail size result and request should match",
+ expectedThumbnailSize, resultThumbnailSize);
+ if (collector.expectKeyValueNotNull(captureResult, CaptureResult.JPEG_GPS_LOCATION) !=
+ null) {
+ collector.expectTrue("GPS location result and request should match.",
+ areGpsFieldsEqual(expectedExifData.gpsLocation,
+ captureResult.get(CaptureResult.JPEG_GPS_LOCATION)));
+ }
+ collector.expectEquals("JPEG orientation result and request should match",
+ expectedExifData.jpegOrientation,
+ captureResult.get(CaptureResult.JPEG_ORIENTATION));
+ collector.expectEquals("JPEG quality result and request should match",
+ expectedExifData.jpegQuality, captureResult.get(CaptureResult.JPEG_QUALITY));
+ collector.expectEquals("JPEG thumbnail quality result and request should match",
+ expectedExifData.thumbnailQuality,
+ captureResult.get(CaptureResult.JPEG_THUMBNAIL_QUALITY));
+
+ // Validate other exif tags for all non-legacy devices
+ if (!staticInfo.isHardwareLevelLegacy()) {
+ verifyJpegExifExtraTags(exif, expectedSize, captureResult, staticInfo, collector);
+ }
+ }
+
+ /**
+ * Get the degree of an EXIF orientation.
+ */
+ private static int getExifOrientationInDegree(int exifOrientation,
+ CameraErrorCollector collector) {
+ switch (exifOrientation) {
+ case ExifInterface.ORIENTATION_NORMAL:
+ return 0;
+ case ExifInterface.ORIENTATION_ROTATE_90:
+ return 90;
+ case ExifInterface.ORIENTATION_ROTATE_180:
+ return 180;
+ case ExifInterface.ORIENTATION_ROTATE_270:
+ return 270;
+ default:
+ collector.addMessage("It is impossible to get non 0, 90, 180, 270 degress exif" +
+ "info based on the request orientation range");
+ return 0;
+ }
+ }
+
+ /**
+ * Validate and return the focal length.
+ *
+ * @param result Capture result to get the focal length
+ * @return Focal length from capture result or -1 if focal length is not available.
+ */
+ private static float validateFocalLength(CaptureResult result, StaticMetadata staticInfo,
+ CameraErrorCollector collector) {
+ float[] focalLengths = staticInfo.getAvailableFocalLengthsChecked();
+ Float resultFocalLength = result.get(CaptureResult.LENS_FOCAL_LENGTH);
+ if (collector.expectTrue("Focal length is invalid",
+ resultFocalLength != null && resultFocalLength > 0)) {
+ List<Float> focalLengthList =
+ Arrays.asList(CameraTestUtils.toObject(focalLengths));
+ collector.expectTrue("Focal length should be one of the available focal length",
+ focalLengthList.contains(resultFocalLength));
+ return resultFocalLength;
+ }
+ return -1;
+ }
+
+ /**
+ * Validate and return the aperture.
+ *
+ * @param result Capture result to get the aperture
+ * @return Aperture from capture result or -1 if aperture is not available.
+ */
+ private static float validateAperture(CaptureResult result, StaticMetadata staticInfo,
+ CameraErrorCollector collector) {
+ float[] apertures = staticInfo.getAvailableAperturesChecked();
+ Float resultAperture = result.get(CaptureResult.LENS_APERTURE);
+ if (collector.expectTrue("Capture result aperture is invalid",
+ resultAperture != null && resultAperture > 0)) {
+ List<Float> apertureList =
+ Arrays.asList(CameraTestUtils.toObject(apertures));
+ collector.expectTrue("Aperture should be one of the available apertures",
+ apertureList.contains(resultAperture));
+ return resultAperture;
+ }
+ return -1;
+ }
+
+ /**
+ * Return the closest value in an array of floats.
+ */
+ private static float getClosestValueInArray(float[] values, float target) {
+ int minIdx = 0;
+ float minDistance = Math.abs(values[0] - target);
+ for(int i = 0; i < values.length; i++) {
+ float distance = Math.abs(values[i] - target);
+ if (minDistance > distance) {
+ minDistance = distance;
+ minIdx = i;
+ }
+ }
+
+ return values[minIdx];
+ }
+
+ /**
+ * Return if two Location's GPS field are the same.
+ */
+ private static boolean areGpsFieldsEqual(Location a, Location b) {
+ if (a == null || b == null) {
+ return false;
+ }
+
+ return a.getTime() == b.getTime() && a.getLatitude() == b.getLatitude() &&
+ a.getLongitude() == b.getLongitude() && a.getAltitude() == b.getAltitude() &&
+ a.getProvider() == b.getProvider();
+ }
+
+ /**
+ * Verify extra tags in JPEG EXIF
+ */
+ private static void verifyJpegExifExtraTags(ExifInterface exif, Size jpegSize,
+ CaptureResult result, StaticMetadata staticInfo, CameraErrorCollector collector)
+ throws ParseException {
+ /**
+ * TAG_IMAGE_WIDTH and TAG_IMAGE_LENGTH and TAG_ORIENTATION.
+ * Orientation and exif width/height need to be tested carefully, two cases:
+ *
+ * 1. Device rotate the image buffer physically, then exif width/height may not match
+ * the requested still capture size, we need swap them to check.
+ *
+ * 2. Device use the exif tag to record the image orientation, it doesn't rotate
+ * the jpeg image buffer itself. In this case, the exif width/height should always match
+ * the requested still capture size, and the exif orientation should always match the
+ * requested orientation.
+ *
+ */
+ int exifWidth = exif.getAttributeInt(ExifInterface.TAG_IMAGE_WIDTH, /*defaultValue*/0);
+ int exifHeight = exif.getAttributeInt(ExifInterface.TAG_IMAGE_LENGTH, /*defaultValue*/0);
+ Size exifSize = new Size(exifWidth, exifHeight);
+ // Orientation could be missing, which is ok, default to 0.
+ int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
+ /*defaultValue*/-1);
+ // Get requested orientation from result, because they should be same.
+ if (collector.expectKeyValueNotNull(result, CaptureResult.JPEG_ORIENTATION) != null) {
+ int requestedOrientation = result.get(CaptureResult.JPEG_ORIENTATION);
+ final int ORIENTATION_MIN = ExifInterface.ORIENTATION_UNDEFINED;
+ final int ORIENTATION_MAX = ExifInterface.ORIENTATION_ROTATE_270;
+ boolean orientationValid = collector.expectTrue(String.format(
+ "Exif orientation must be in range of [%d, %d]",
+ ORIENTATION_MIN, ORIENTATION_MAX),
+ exifOrientation >= ORIENTATION_MIN && exifOrientation <= ORIENTATION_MAX);
+ if (orientationValid) {
+ /**
+ * Device captured image doesn't respect the requested orientation,
+ * which means it rotates the image buffer physically. Then we
+ * should swap the exif width/height accordingly to compare.
+ */
+ boolean deviceRotatedImage = exifOrientation == ExifInterface.ORIENTATION_UNDEFINED;
+
+ if (deviceRotatedImage) {
+ // Case 1.
+ boolean needSwap = (requestedOrientation % 180 == 90);
+ if (needSwap) {
+ exifSize = new Size(exifHeight, exifWidth);
+ }
+ } else {
+ // Case 2.
+ collector.expectEquals("Exif orientaiton should match requested orientation",
+ requestedOrientation, getExifOrientationInDegree(exifOrientation,
+ collector));
+ }
+ }
+ }
+
+ /**
+ * Ideally, need check exifSize == jpegSize == actual buffer size. But
+ * jpegSize == jpeg decode bounds size(from jpeg jpeg frame
+ * header, not exif) was validated in ImageReaderTest, no need to
+ * validate again here.
+ */
+ collector.expectEquals("Exif size should match jpeg capture size", jpegSize, exifSize);
+
+ // TAG_DATETIME, it should be local time
+ long currentTimeInMs = System.currentTimeMillis();
+ long currentTimeInSecond = currentTimeInMs / 1000;
+ Date date = new Date(currentTimeInMs);
+ String localDatetime = new SimpleDateFormat("yyyy:MM:dd HH:").format(date);
+ String dateTime = exif.getAttribute(ExifInterface.TAG_DATETIME);
+ if (collector.expectTrue("Exif TAG_DATETIME shouldn't be null", dateTime != null)) {
+ collector.expectTrue("Exif TAG_DATETIME is wrong",
+ dateTime.length() == EXIF_DATETIME_LENGTH);
+ long exifTimeInSecond =
+ new SimpleDateFormat("yyyy:MM:dd HH:mm:ss").parse(dateTime).getTime() / 1000;
+ long delta = currentTimeInSecond - exifTimeInSecond;
+ collector.expectTrue("Capture time deviates too much from the current time",
+ Math.abs(delta) < EXIF_DATETIME_ERROR_MARGIN_SEC);
+ // It should be local time.
+ collector.expectTrue("Exif date time should be local time",
+ dateTime.startsWith(localDatetime));
+ }
+
+ // TAG_FOCAL_LENGTH.
+ float[] focalLengths = staticInfo.getAvailableFocalLengthsChecked();
+ float exifFocalLength = (float)exif.getAttributeDouble(ExifInterface.TAG_FOCAL_LENGTH, -1);
+ collector.expectEquals("Focal length should match",
+ getClosestValueInArray(focalLengths, exifFocalLength),
+ exifFocalLength, EXIF_FOCAL_LENGTH_ERROR_MARGIN);
+ // More checks for focal length.
+ collector.expectEquals("Exif focal length should match capture result",
+ validateFocalLength(result, staticInfo, collector), exifFocalLength);
+
+ // TAG_EXPOSURE_TIME
+ // ExifInterface API gives exposure time value in the form of float instead of rational
+ String exposureTime = exif.getAttribute(ExifInterface.TAG_EXPOSURE_TIME);
+ collector.expectNotNull("Exif TAG_EXPOSURE_TIME shouldn't be null", exposureTime);
+ if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_EXPOSURE_TIME)) {
+ if (exposureTime != null) {
+ double exposureTimeValue = Double.parseDouble(exposureTime);
+ long expTimeResult = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
+ double expected = expTimeResult / 1e9;
+ double tolerance = expected * EXIF_EXPOSURE_TIME_ERROR_MARGIN_RATIO;
+ tolerance = Math.max(tolerance, EXIF_EXPOSURE_TIME_MIN_ERROR_MARGIN_SEC);
+ collector.expectEquals("Exif exposure time doesn't match", expected,
+ exposureTimeValue, tolerance);
+ }
+ }
+
+ // TAG_APERTURE
+ // ExifInterface API gives aperture value in the form of float instead of rational
+ String exifAperture = exif.getAttribute(ExifInterface.TAG_APERTURE);
+ collector.expectNotNull("Exif TAG_APERTURE shouldn't be null", exifAperture);
+ if (staticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_AVAILABLE_APERTURES)) {
+ float[] apertures = staticInfo.getAvailableAperturesChecked();
+ if (exifAperture != null) {
+ float apertureValue = Float.parseFloat(exifAperture);
+ collector.expectEquals("Aperture value should match",
+ getClosestValueInArray(apertures, apertureValue),
+ apertureValue, EXIF_APERTURE_ERROR_MARGIN);
+ // More checks for aperture.
+ collector.expectEquals("Exif aperture length should match capture result",
+ validateAperture(result, staticInfo, collector), apertureValue);
+ }
+ }
+
+ /**
+ * TAG_FLASH. TODO: For full devices, can check a lot more info
+ * (http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/EXIF.html#Flash)
+ */
+ String flash = exif.getAttribute(ExifInterface.TAG_FLASH);
+ collector.expectNotNull("Exif TAG_FLASH shouldn't be null", flash);
+
+ /**
+ * TAG_WHITE_BALANCE. TODO: For full devices, with the DNG tags, we
+ * should be able to cross-check android.sensor.referenceIlluminant.
+ */
+ String whiteBalance = exif.getAttribute(ExifInterface.TAG_WHITE_BALANCE);
+ collector.expectNotNull("Exif TAG_WHITE_BALANCE shouldn't be null", whiteBalance);
+
+ // TAG_MAKE
+ String make = exif.getAttribute(ExifInterface.TAG_MAKE);
+ collector.expectEquals("Exif TAG_MAKE is incorrect", Build.MANUFACTURER, make);
+
+ // TAG_MODEL
+ String model = exif.getAttribute(ExifInterface.TAG_MODEL);
+ collector.expectEquals("Exif TAG_MODEL is incorrect", Build.MODEL, model);
+
+
+ // TAG_ISO
+ int iso = exif.getAttributeInt(ExifInterface.TAG_ISO, /*defaultValue*/-1);
+ if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_SENSITIVITY)) {
+ int expectedIso = result.get(CaptureResult.SENSOR_SENSITIVITY);
+ collector.expectEquals("Exif TAG_ISO is incorrect", expectedIso, iso);
+ }
+
+ // TAG_DATETIME_DIGITIZED (a.k.a Create time for digital cameras).
+ String digitizedTime = exif.getAttribute(ExifInterface.TAG_DATETIME_DIGITIZED);
+ collector.expectNotNull("Exif TAG_DATETIME_DIGITIZED shouldn't be null", digitizedTime);
+ if (digitizedTime != null) {
+ String expectedDateTime = exif.getAttribute(ExifInterface.TAG_DATETIME);
+ collector.expectNotNull("Exif TAG_DATETIME shouldn't be null", expectedDateTime);
+ if (expectedDateTime != null) {
+ collector.expectEquals("dataTime should match digitizedTime",
+ expectedDateTime, digitizedTime);
+ }
+ }
+
+ /**
+ * TAG_SUBSEC_TIME. Since the sub second tag strings are truncated to at
+ * most 9 digits in ExifInterface implementation, use getAttributeInt to
+ * sanitize it. When the default value -1 is returned, it means that
+ * this exif tag either doesn't exist or is a non-numerical invalid
+ * string. Same rule applies to the rest of sub second tags.
+ */
+ int subSecTime = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME, /*defaultValue*/-1);
+ collector.expectTrue("Exif TAG_SUBSEC_TIME value is null or invalid!", subSecTime > 0);
+
+ // TAG_SUBSEC_TIME_ORIG
+ int subSecTimeOrig = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME_ORIG,
+ /*defaultValue*/-1);
+ collector.expectTrue("Exif TAG_SUBSEC_TIME_ORIG value is null or invalid!",
+ subSecTimeOrig > 0);
+
+ // TAG_SUBSEC_TIME_DIG
+ int subSecTimeDig = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME_DIG,
+ /*defaultValue*/-1);
+ collector.expectTrue(
+ "Exif TAG_SUBSEC_TIME_DIG value is null or invalid!", subSecTimeDig > 0);
+ }
+
+
+ /**
+ * Immutable class wrapping the exif test data.
+ */
+ public static class ExifTestData {
+ public final Location gpsLocation;
+ public final int jpegOrientation;
+ public final byte jpegQuality;
+ public final byte thumbnailQuality;
+
+ public ExifTestData(Location location, int orientation,
+ byte jpgQuality, byte thumbQuality) {
+ gpsLocation = location;
+ jpegOrientation = orientation;
+ jpegQuality = jpgQuality;
+ thumbnailQuality = thumbQuality;
+ }
+ }
+
+ public static Size getPreviewSizeBound(WindowManager windowManager, Size bound) {
+ Display display = windowManager.getDefaultDisplay();
+
+ int width = display.getWidth();
+ int height = display.getHeight();
+
+ if (height > width) {
+ height = width;
+ width = display.getHeight();
+ }
+
+ if (bound.getWidth() <= width &&
+ bound.getHeight() <= height)
+ return bound;
+ else
+ return new Size(width, height);
+ }
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraUtils.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraUtils.java
new file mode 100644
index 0000000..dff20a6
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/CameraUtils.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest.helpers;
+
+import android.content.Context;
+import android.hardware.Camera;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+
+import java.util.Comparator;
+
+/**
+ * Utility class containing helper functions for the Camera framework tests.
+ */
+/**
+ * (non-Javadoc)
+ * @see android.hardware.cts.helpers.CameraUtils
+ */
+public class CameraUtils {
+
+ /**
+ * Returns {@code true} if this device only supports {@code LEGACY} mode operation in the
+ * Camera2 API for the given camera ID.
+ *
+ * @param context {@link Context} to access the {@link CameraManager} in.
+ * @param cameraId the ID of the camera device to check.
+ * @return {@code true} if this device only supports {@code LEGACY} mode.
+ */
+ public static boolean isLegacyHAL(Context context, int cameraId) throws Exception {
+ CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ CameraCharacteristics characteristics =
+ manager.getCameraCharacteristics(Integer.toString(cameraId));
+
+ return characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) ==
+ CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
+ }
+
+ /**
+ * Shared size comparison method used by size comparators.
+ *
+ * <p>Compares the number of pixels it covers.If two the areas of two sizes are same, compare
+ * the widths.</p>
+ */
+ public static int compareSizes(int widthA, int heightA, int widthB, int heightB) {
+ long left = widthA * (long) heightA;
+ long right = widthB * (long) heightB;
+ if (left == right) {
+ left = widthA;
+ right = widthB;
+ }
+ return (left < right) ? -1 : (left > right ? 1 : 0);
+ }
+
+ /**
+ * Size comparator that compares the number of pixels it covers.
+ *
+ * <p>If two the areas of two sizes are same, compare the widths.</p>
+ */
+ public static class LegacySizeComparator implements Comparator<Camera.Size> {
+ @Override
+ public int compare(Camera.Size lhs, Camera.Size rhs) {
+ return compareSizes(lhs.width, lhs.height, rhs.width, rhs.height);
+ }
+ }
+
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/InMatcher.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/InMatcher.java
new file mode 100644
index 0000000..e25a140
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/InMatcher.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.mediaframeworktest.helpers;
+
+import org.hamcrest.BaseMatcher;
+import org.hamcrest.Description;
+import org.hamcrest.Factory;
+import org.hamcrest.Matcher;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Objects;
+
+/**
+ * A {@link Matcher} class for checking if value contained in a {@link Collection} or array.
+ */
+/**
+ * (non-Javadoc)
+ * @see android.hardware.camera2.cts.helpers.InMatcher
+ */
+public class InMatcher<T> extends BaseMatcher<T> {
+
+ protected Collection<T> mValues;
+
+ public InMatcher(Collection<T> values) {
+ Preconditions.checkNotNull("values", values);
+ mValues = values;
+ }
+
+ public InMatcher(T... values) {
+ Preconditions.checkNotNull(values);
+ mValues = Arrays.asList(values);
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public boolean matches(Object o) {
+ T obj = (T) o;
+ for (T elem : mValues) {
+ if (Objects.equals(o, elem)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @Override
+ public void describeTo(Description description) {
+ description.appendText("in(").appendValue(mValues).appendText(")");
+ }
+
+ @Factory
+ public static <T> Matcher<T> in(T... operand) {
+ return new InMatcher<T>(operand);
+ }
+
+ @Factory
+ public static <T> Matcher<T> in(Collection<T> operand) {
+ return new InMatcher<T>(operand);
+ }
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/Preconditions.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/Preconditions.java
new file mode 100644
index 0000000..96b0424
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/Preconditions.java
@@ -0,0 +1,184 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest.helpers;
+
+import java.util.Collection;
+import java.util.Objects;
+
+/**
+ * Helper set of methods to perform precondition checks before starting method execution.
+ *
+ * <p>Typically used to sanity check arguments or the current object state.</p>
+ */
+/**
+ * (non-Javadoc)
+ * @see android.hardware.camera2.cts.helpers.Preconditions
+ */
+public final class Preconditions {
+
+ /**
+ * Checks that the value has the expected bitwise flags set.
+ *
+ * @param argName Name of the argument
+ * @param arg Argument to check
+ * @param flagsName Name of the bitwise flags
+ * @param flags Bit flags to check.
+ * @return arg
+ *
+ * @throws IllegalArgumentException if the bitwise flags weren't set
+ */
+ public static int checkBitFlags(String argName, int arg, String flagsName, int flags) {
+ if ((arg & flags) == 0) {
+ throw new IllegalArgumentException(
+ String.format("Argument '%s' must have flags '%s' set", argName, flagsName));
+ }
+
+ return arg;
+ }
+
+ /**
+ * Checks that the value is {@link Object#equals equal} to the expected value.
+ *
+ * @param argName Name of the argument
+ * @param arg Argument to check
+ * @param expectedName Name of the expected value
+ * @param expectedValue Expected value
+ * @return arg
+ *
+ * @throws IllegalArgumentException if the values were not equal
+ */
+ public static <T> T checkEquals(String argName, T arg,
+ String expectedName, T expectedValue) {
+ if (!Objects.equals(arg, expectedValue)) {
+ throw new IllegalArgumentException(
+ String.format(
+ "Argument '%s' must be equal to '%s' (was '%s', but expected '%s')",
+ argName, expectedName, arg, expectedValue));
+ }
+
+ return arg;
+ }
+
+ /**
+ * Checks that the value is not {@code null}.
+ *
+ * <p>
+ * Returns the value directly, so you can use {@code checkNotNull("value", value)} inline.
+ * </p>
+ *
+ * @param argName Name of the argument
+ * @param arg Argument to check
+ * @return arg
+ *
+ * @throws NullPointerException if arg was {@code null}
+ */
+ public static <T> T checkNotNull(String argName, T arg) {
+ if (arg == null) {
+ throw new NullPointerException("Argument '" + argName + "' must not be null");
+ }
+
+ return arg;
+ }
+
+ /**
+ * Checks that the value is not {@code null}.
+ *
+ * <p>
+ * Returns the value directly, so you can use {@code checkNotNull("value", value)} inline.
+ * </p>
+ *
+ * @param arg Argument to check
+ * @return arg
+ *
+ * @throws NullPointerException if arg was {@code null}
+ */
+ public static <T> T checkNotNull(T arg) {
+ return checkNotNull("", arg);
+ }
+
+ /**
+ * Checks that the state is currently {@link true}.
+ *
+ * @param message Message to raise an exception with if the state checking fails.
+ * @param state State to check
+ *
+ * @throws IllegalStateException if state was {@code false}
+ *
+ * @return The state value (always {@code true}).
+ */
+ public static boolean checkState(String message, boolean state) {
+ if (!state) {
+ throw new IllegalStateException(message);
+ }
+
+ return state;
+ }
+
+ /**
+ * Ensures that the {@link Collection} is not {@code null}, and none of its elements are
+ * {@code null}.
+ *
+ * @param value a {@link Collection} of boxed objects
+ * @param valueName the name of the argument to use if the check fails
+ *
+ * @return the validated {@link Collection}
+ *
+ * @throws NullPointerException if the {@code value} or any of its elements were {@code null}
+ */
+ public static <T> Collection<T> checkCollectionElementsNotNull(final Collection<T> value,
+ final String valueName) {
+ if (value == null) {
+ throw new NullPointerException(valueName + " must not be null");
+ }
+
+ long ctr = 0;
+ for (T elem : value) {
+ if (elem == null) {
+ throw new NullPointerException(
+ String.format("%s[%d] must not be null", valueName, ctr));
+ }
+ ++ctr;
+ }
+
+ return value;
+ }
+
+ /**
+ * Ensures that the {@link Collection} is not {@code null}, and contains at least one element.
+ *
+ * @param value a {@link Collection} of boxed elements.
+ * @param valueName the name of the argument to use if the check fails.
+
+ * @return the validated {@link Collection}
+ *
+ * @throws NullPointerException if the {@code value} was {@code null}
+ * @throws IllegalArgumentException if the {@code value} was empty
+ */
+ public static <T> Collection<T> checkCollectionNotEmpty(final Collection<T> value,
+ final String valueName) {
+ if (value == null) {
+ throw new NullPointerException(valueName + " must not be null");
+ }
+ if (value.isEmpty()) {
+ throw new IllegalArgumentException(valueName + " is empty");
+ }
+ return value;
+ }
+
+ // Suppress default constructor for noninstantiability
+ private Preconditions() { throw new AssertionError(); }
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/StaticMetadata.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/StaticMetadata.java
new file mode 100644
index 0000000..6678f8b
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/helpers/StaticMetadata.java
@@ -0,0 +1,2393 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest.helpers;
+
+import junit.framework.Assert;
+
+import android.graphics.ImageFormat;
+import android.graphics.Rect;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraCharacteristics.Key;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.util.Log;
+import android.util.Range;
+import android.util.Rational;
+import android.util.Size;
+
+import java.lang.reflect.Array;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import static com.android.mediaframeworktest.helpers.AssertHelpers.assertArrayContainsAnyOf;
+
+/**
+ * Helpers to get common static info out of the camera.
+ *
+ * <p>Avoid boiler plate by putting repetitive get/set patterns in this class.</p>
+ *
+ * <p>Attempt to be durable against the camera device having bad or missing metadata
+ * by providing reasonable defaults and logging warnings when that happens.</p>
+ */
+/**
+ * (non-Javadoc)
+ * @see android.hardware.camera2.cts.helpers.StaticMetadata
+ */
+public class StaticMetadata {
+
+ private static final String TAG = "StaticMetadata";
+ private static final int IGNORE_SIZE_CHECK = -1;
+
+ private static final long SENSOR_INFO_EXPOSURE_TIME_RANGE_MIN_AT_MOST = 100000L; // 100us
+ private static final long SENSOR_INFO_EXPOSURE_TIME_RANGE_MAX_AT_LEAST = 100000000; // 100ms
+ private static final int SENSOR_INFO_SENSITIVITY_RANGE_MIN_AT_MOST = 100;
+ private static final int SENSOR_INFO_SENSITIVITY_RANGE_MAX_AT_LEAST = 800;
+ private static final int STATISTICS_INFO_MAX_FACE_COUNT_MIN_AT_LEAST = 4;
+ private static final int TONEMAP_MAX_CURVE_POINTS_AT_LEAST = 64;
+ private static final int CONTROL_AE_COMPENSATION_RANGE_DEFAULT_MIN = -2;
+ private static final int CONTROL_AE_COMPENSATION_RANGE_DEFAULT_MAX = 2;
+ private static final Rational CONTROL_AE_COMPENSATION_STEP_DEFAULT = new Rational(1, 2);
+ private static final byte REQUEST_PIPELINE_MAX_DEPTH_MAX = 8;
+ private static final int MAX_REPROCESS_MAX_CAPTURE_STALL = 4;
+
+ // TODO: Consider making this work across any metadata object, not just camera characteristics
+ private final CameraCharacteristics mCharacteristics;
+ private final CheckLevel mLevel;
+ private final CameraErrorCollector mCollector;
+
+ // Index with android.control.aeMode
+ public static final String[] AE_MODE_NAMES = new String[] {
+ "AE_MODE_OFF",
+ "AE_MODE_ON",
+ "AE_MODE_ON_AUTO_FLASH",
+ "AE_MODE_ON_ALWAYS_FLASH",
+ "AE_MODE_ON_AUTO_FLASH_REDEYE"
+ };
+
+ // Index with android.control.afMode
+ public static final String[] AF_MODE_NAMES = new String[] {
+ "AF_MODE_OFF",
+ "AF_MODE_AUTO",
+ "AF_MODE_MACRO",
+ "AF_MODE_CONTINUOUS_VIDEO",
+ "AF_MODE_CONTINUOUS_PICTURE",
+ "AF_MODE_EDOF"
+ };
+
+ // Index with android.control.aeState
+ public static final String[] AE_STATE_NAMES = new String[] {
+ "AE_STATE_INACTIVE",
+ "AE_STATE_SEARCHING",
+ "AE_STATE_CONVERGED",
+ "AE_STATE_LOCKED",
+ "AE_STATE_FLASH_REQUIRED",
+ "AE_STATE_PRECAPTURE"
+ };
+
+ // Index with android.control.afState
+ public static final String[] AF_STATE_NAMES = new String[] {
+ "AF_STATE_INACTIVE",
+ "AF_STATE_PASSIVE_SCAN",
+ "AF_STATE_PASSIVE_FOCUSED",
+ "AF_STATE_ACTIVE_SCAN",
+ "AF_STATE_FOCUSED_LOCKED",
+ "AF_STATE_NOT_FOCUSED_LOCKED",
+ "AF_STATE_PASSIVE_UNFOCUSED"
+ };
+
+ public enum CheckLevel {
+ /** Only log warnings for metadata check failures. Execution continues. */
+ WARN,
+ /**
+ * Use ErrorCollector to collect the metadata check failures, Execution
+ * continues.
+ */
+ COLLECT,
+ /** Assert the metadata check failures. Execution aborts. */
+ ASSERT
+ }
+
+ /**
+ * Construct a new StaticMetadata object.
+ *
+ *<p> Default constructor, only log warnings for the static metadata check failures</p>
+ *
+ * @param characteristics static info for a camera
+ * @throws IllegalArgumentException if characteristics was null
+ */
+ public StaticMetadata(CameraCharacteristics characteristics) {
+ this(characteristics, CheckLevel.WARN, /*collector*/null);
+ }
+
+ /**
+ * Construct a new StaticMetadata object with {@link CameraErrorCollector}.
+ * <p>
+ * When level is not {@link CheckLevel.COLLECT}, the {@link CameraErrorCollector} will be
+ * ignored, otherwise, it will be used to log the check failures.
+ * </p>
+ *
+ * @param characteristics static info for a camera
+ * @param collector The {@link CameraErrorCollector} used by this StaticMetadata
+ * @throws IllegalArgumentException if characteristics or collector was null.
+ */
+ public StaticMetadata(CameraCharacteristics characteristics, CameraErrorCollector collector) {
+ this(characteristics, CheckLevel.COLLECT, collector);
+ }
+
+ /**
+ * Construct a new StaticMetadata object with {@link CheckLevel} and
+ * {@link CameraErrorCollector}.
+ * <p>
+ * When level is not {@link CheckLevel.COLLECT}, the {@link CameraErrorCollector} will be
+ * ignored, otherwise, it will be used to log the check failures.
+ * </p>
+ *
+ * @param characteristics static info for a camera
+ * @param level The {@link CheckLevel} of this StaticMetadata
+ * @param collector The {@link CameraErrorCollector} used by this StaticMetadata
+ * @throws IllegalArgumentException if characteristics was null or level was
+ * {@link CheckLevel.COLLECT} but collector was null.
+ */
+ public StaticMetadata(CameraCharacteristics characteristics, CheckLevel level,
+ CameraErrorCollector collector) {
+ if (characteristics == null) {
+ throw new IllegalArgumentException("characteristics was null");
+ }
+ if (level == CheckLevel.COLLECT && collector == null) {
+ throw new IllegalArgumentException("collector must valid when COLLECT level is set");
+ }
+
+ mCharacteristics = characteristics;
+ mLevel = level;
+ mCollector = collector;
+ }
+
+ /**
+ * Get the CameraCharacteristics associated with this StaticMetadata.
+ *
+ * @return A non-null CameraCharacteristics object
+ */
+ public CameraCharacteristics getCharacteristics() {
+ return mCharacteristics;
+ }
+
+ /**
+ * Whether or not the hardware level reported by android.info.supportedHardwareLevel
+ * is {@value CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_FULL}.
+ *
+ * <p>If the camera device is not reporting the hardwareLevel, this
+ * will cause the test to fail.</p>
+ *
+ * @return {@code true} if the device is {@code FULL}, {@code false} otherwise.
+ */
+ public boolean isHardwareLevelFull() {
+ return getHardwareLevelChecked() == CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
+ }
+
+ /**
+ * Whether or not the hardware level reported by android.info.supportedHardwareLevel
+ * Return the supported hardware level of the device, or fail if no value is reported.
+ *
+ * @return the supported hardware level as a constant defined for
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL}.
+ */
+ public int getHardwareLevelChecked() {
+ Integer hwLevel = getValueFromKeyNonNull(
+ CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
+ if (hwLevel == null) {
+ Assert.fail("No supported hardware level reported.");
+ }
+ return hwLevel;
+ }
+
+ /**
+ * Whether or not the hardware level reported by android.info.supportedHardwareLevel
+ * is {@value CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY}.
+ *
+ * <p>If the camera device is not reporting the hardwareLevel, this
+ * will cause the test to fail.</p>
+ *
+ * @return {@code true} if the device is {@code LEGACY}, {@code false} otherwise.
+ */
+ public boolean isHardwareLevelLegacy() {
+ return getHardwareLevelChecked() == CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
+ }
+
+ /**
+ * Whether or not the per frame control is supported by the camera device.
+ *
+ * @return {@code true} if per frame control is supported, {@code false} otherwise.
+ */
+ public boolean isPerFrameControlSupported() {
+ return getSyncMaxLatency() == CameraMetadata.SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
+ }
+
+ /**
+ * Get the maximum number of frames to wait for a request settings being applied
+ *
+ * @return CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN for unknown latency
+ * CameraMetadata.SYNC_MAX_LATENCY_PER_FRAME_CONTROL for per frame control
+ * a positive int otherwise
+ */
+ public int getSyncMaxLatency() {
+ Integer value = getValueFromKeyNonNull(CameraCharacteristics.SYNC_MAX_LATENCY);
+ if (value == null) {
+ return CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN;
+ }
+ return value;
+ }
+
+ /**
+ * Whether or not the hardware level reported by android.info.supportedHardwareLevel
+ * is {@value CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED}.
+ *
+ * <p>If the camera device is incorrectly reporting the hardwareLevel, this
+ * will always return {@code true}.</p>
+ *
+ * @return {@code true} if the device is {@code LIMITED}, {@code false} otherwise.
+ */
+ public boolean isHardwareLevelLimited() {
+ return getHardwareLevelChecked() == CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
+ }
+
+ /**
+ * Whether or not the hardware level reported by {@code android.info.supportedHardwareLevel}
+ * is at least {@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED}.
+ *
+ * <p>If the camera device is incorrectly reporting the hardwareLevel, this
+ * will always return {@code false}.</p>
+ *
+ * @return
+ * {@code true} if the device is {@code LIMITED} or {@code FULL},
+ * {@code false} otherwise (i.e. LEGACY).
+ */
+ public boolean isHardwareLevelLimitedOrBetter() {
+ Integer hwLevel = getValueFromKeyNonNull(
+ CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
+
+ if (hwLevel == null) {
+ return false;
+ }
+
+ // Normal. Device could be limited.
+ int hwLevelInt = hwLevel;
+ return hwLevelInt == CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL ||
+ hwLevelInt == CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
+ }
+
+ /**
+ * Get the maximum number of partial result a request can expect
+ *
+ * @return 1 if partial result is not supported.
+ * a integer value larger than 1 if partial result is supported.
+ */
+ public int getPartialResultCount() {
+ Integer value = mCharacteristics.get(CameraCharacteristics.REQUEST_PARTIAL_RESULT_COUNT);
+ if (value == null) {
+ // Optional key. Default value is 1 if key is missing.
+ return 1;
+ }
+ return value;
+ }
+
+ /**
+ * Get the exposure time value and clamp to the range if needed.
+ *
+ * @param exposure Input exposure time value to check.
+ * @return Exposure value in the legal range.
+ */
+ public long getExposureClampToRange(long exposure) {
+ long minExposure = getExposureMinimumOrDefault(Long.MAX_VALUE);
+ long maxExposure = getExposureMaximumOrDefault(Long.MIN_VALUE);
+ if (minExposure > SENSOR_INFO_EXPOSURE_TIME_RANGE_MIN_AT_MOST) {
+ failKeyCheck(CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE,
+ String.format(
+ "Min value %d is too large, set to maximal legal value %d",
+ minExposure, SENSOR_INFO_EXPOSURE_TIME_RANGE_MIN_AT_MOST));
+ minExposure = SENSOR_INFO_EXPOSURE_TIME_RANGE_MIN_AT_MOST;
+ }
+ if (maxExposure < SENSOR_INFO_EXPOSURE_TIME_RANGE_MAX_AT_LEAST) {
+ failKeyCheck(CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE,
+ String.format(
+ "Max value %d is too small, set to minimal legal value %d",
+ maxExposure, SENSOR_INFO_EXPOSURE_TIME_RANGE_MAX_AT_LEAST));
+ maxExposure = SENSOR_INFO_EXPOSURE_TIME_RANGE_MAX_AT_LEAST;
+ }
+
+ return Math.max(minExposure, Math.min(maxExposure, exposure));
+ }
+
+ /**
+ * Check if the camera device support focuser.
+ *
+ * @return true if camera device support focuser, false otherwise.
+ */
+ public boolean hasFocuser() {
+ if (areKeysAvailable(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE)) {
+ // LEGACY devices don't have lens.info.minimumFocusDistance, so guard this query
+ return (getMinimumFocusDistanceChecked() > 0);
+ } else {
+ // Check available AF modes
+ int[] availableAfModes = mCharacteristics.get(
+ CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
+
+ if (availableAfModes == null) {
+ return false;
+ }
+
+ // Assume that if we have an AF mode which doesn't ignore AF trigger, we have a focuser
+ boolean hasFocuser = false;
+ loop: for (int mode : availableAfModes) {
+ switch (mode) {
+ case CameraMetadata.CONTROL_AF_MODE_AUTO:
+ case CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE:
+ case CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_VIDEO:
+ case CameraMetadata.CONTROL_AF_MODE_MACRO:
+ hasFocuser = true;
+ break loop;
+ }
+ }
+
+ return hasFocuser;
+ }
+ }
+
+ /**
+ * Check if the camera device has flash unit.
+ * @return true if flash unit is available, false otherwise.
+ */
+ public boolean hasFlash() {
+ return getFlashInfoChecked();
+ }
+
+ /**
+ * Get minimum focus distance.
+ *
+ * @return minimum focus distance, 0 if minimum focus distance is invalid.
+ */
+ public float getMinimumFocusDistanceChecked() {
+ Key<Float> key = CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE;
+ Float minFocusDistance;
+
+ /**
+ * android.lens.info.minimumFocusDistance - required for FULL and MANUAL_SENSOR-capable
+ * devices; optional for all other devices.
+ */
+ if (isHardwareLevelFull() || isCapabilitySupported(
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
+ minFocusDistance = getValueFromKeyNonNull(key);
+ } else {
+ minFocusDistance = mCharacteristics.get(key);
+ }
+
+ if (minFocusDistance == null) {
+ return 0.0f;
+ }
+
+ checkTrueForKey(key, " minFocusDistance value shouldn't be negative",
+ minFocusDistance >= 0);
+ if (minFocusDistance < 0) {
+ minFocusDistance = 0.0f;
+ }
+
+ return minFocusDistance;
+ }
+
+ /**
+ * Get focusDistanceCalibration.
+ *
+ * @return focusDistanceCalibration, UNCALIBRATED if value is invalid.
+ */
+ public int getFocusDistanceCalibrationChecked() {
+ Key<Integer> key = CameraCharacteristics.LENS_INFO_FOCUS_DISTANCE_CALIBRATION;
+ Integer calibration = getValueFromKeyNonNull(key);
+
+ if (calibration == null) {
+ return CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED;
+ }
+
+ checkTrueForKey(key, " value is out of range" ,
+ calibration >= CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED &&
+ calibration <= CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED);
+
+ return calibration;
+ }
+
+ /**
+ * Get max AE regions and do sanity check.
+ *
+ * @return AE max regions supported by the camera device
+ */
+ public int getAeMaxRegionsChecked() {
+ Integer regionCount = mCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE);
+ if (regionCount == null) {
+ return 0;
+ }
+ return regionCount;
+ }
+
+ /**
+ * Get max AWB regions and do sanity check.
+ *
+ * @return AWB max regions supported by the camera device
+ */
+ public int getAwbMaxRegionsChecked() {
+ Integer regionCount = mCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB);
+ if (regionCount == null) {
+ return 0;
+ }
+ return regionCount;
+ }
+
+ /**
+ * Get max AF regions and do sanity check.
+ *
+ * @return AF max regions supported by the camera device
+ */
+ public int getAfMaxRegionsChecked() {
+ Integer regionCount = mCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF);
+ if (regionCount == null) {
+ return 0;
+ }
+ return regionCount;
+ }
+ /**
+ * Get the available anti-banding modes.
+ *
+ * @return The array contains available anti-banding modes.
+ */
+ public int[] getAeAvailableAntiBandingModesChecked() {
+ Key<int[]> key = CameraCharacteristics.CONTROL_AE_AVAILABLE_ANTIBANDING_MODES;
+ int[] modes = getValueFromKeyNonNull(key);
+
+ boolean foundAuto = false;
+ boolean found50Hz = false;
+ boolean found60Hz = false;
+ for (int mode : modes) {
+ checkTrueForKey(key, "mode value " + mode + " is out if range",
+ mode >= CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_OFF ||
+ mode <= CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_AUTO);
+ if (mode == CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_AUTO) {
+ foundAuto = true;
+ } else if (mode == CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_50HZ) {
+ found50Hz = true;
+ } else if (mode == CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_60HZ) {
+ found60Hz = true;
+ }
+ }
+ // Must contain AUTO mode or one of 50/60Hz mode.
+ checkTrueForKey(key, "Either AUTO mode or both 50HZ/60HZ mode should present",
+ foundAuto || (found50Hz && found60Hz));
+
+ return modes;
+ }
+
+ /**
+ * Check if the antibanding OFF mode is supported.
+ *
+ * @return true if antibanding OFF mode is supported, false otherwise.
+ */
+ public boolean isAntiBandingOffModeSupported() {
+ List<Integer> antiBandingModes =
+ Arrays.asList(CameraTestUtils.toObject(getAeAvailableAntiBandingModesChecked()));
+
+ return antiBandingModes.contains(CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_OFF);
+ }
+
+ public Boolean getFlashInfoChecked() {
+ Key<Boolean> key = CameraCharacteristics.FLASH_INFO_AVAILABLE;
+ Boolean hasFlash = getValueFromKeyNonNull(key);
+
+ // In case the failOnKey only gives warning.
+ if (hasFlash == null) {
+ return false;
+ }
+
+ return hasFlash;
+ }
+
+ public int[] getAvailableTestPatternModesChecked() {
+ Key<int[]> key =
+ CameraCharacteristics.SENSOR_AVAILABLE_TEST_PATTERN_MODES;
+ int[] modes = getValueFromKeyNonNull(key);
+
+ if (modes == null) {
+ return new int[0];
+ }
+
+ int expectValue = CameraCharacteristics.SENSOR_TEST_PATTERN_MODE_OFF;
+ Integer[] boxedModes = CameraTestUtils.toObject(modes);
+ checkTrueForKey(key, " value must contain OFF mode",
+ Arrays.asList(boxedModes).contains(expectValue));
+
+ return modes;
+ }
+
+ /**
+ * Get available thumbnail sizes and do the sanity check.
+ *
+ * @return The array of available thumbnail sizes
+ */
+ public Size[] getAvailableThumbnailSizesChecked() {
+ Key<Size[]> key = CameraCharacteristics.JPEG_AVAILABLE_THUMBNAIL_SIZES;
+ Size[] sizes = getValueFromKeyNonNull(key);
+ final List<Size> sizeList = Arrays.asList(sizes);
+
+ // Size must contain (0, 0).
+ checkTrueForKey(key, "size should contain (0, 0)", sizeList.contains(new Size(0, 0)));
+
+ // Each size must be distinct.
+ checkElementDistinct(key, sizeList);
+
+ // Must be sorted in ascending order by area, by width if areas are same.
+ List<Size> orderedSizes =
+ CameraTestUtils.getAscendingOrderSizes(sizeList, /*ascending*/true);
+ checkTrueForKey(key, "Sizes should be in ascending order: Original " + sizeList.toString()
+ + ", Expected " + orderedSizes.toString(), orderedSizes.equals(sizeList));
+
+ // TODO: Aspect ratio match, need wait for android.scaler.availableStreamConfigurations
+ // implementation see b/12958122.
+
+ return sizes;
+ }
+
+ /**
+ * Get available focal lengths and do the sanity check.
+ *
+ * @return The array of available focal lengths
+ */
+ public float[] getAvailableFocalLengthsChecked() {
+ Key<float[]> key = CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS;
+ float[] focalLengths = getValueFromKeyNonNull(key);
+
+ checkTrueForKey(key, "Array should contain at least one element", focalLengths.length >= 1);
+
+ for (int i = 0; i < focalLengths.length; i++) {
+ checkTrueForKey(key,
+ String.format("focalLength[%d] %f should be positive.", i, focalLengths[i]),
+ focalLengths[i] > 0);
+ }
+ checkElementDistinct(key, Arrays.asList(CameraTestUtils.toObject(focalLengths)));
+
+ return focalLengths;
+ }
+
+ /**
+ * Get available apertures and do the sanity check.
+ *
+ * @return The non-null array of available apertures
+ */
+ public float[] getAvailableAperturesChecked() {
+ Key<float[]> key = CameraCharacteristics.LENS_INFO_AVAILABLE_APERTURES;
+ float[] apertures = getValueFromKeyNonNull(key);
+
+ checkTrueForKey(key, "Array should contain at least one element", apertures.length >= 1);
+
+ for (int i = 0; i < apertures.length; i++) {
+ checkTrueForKey(key,
+ String.format("apertures[%d] %f should be positive.", i, apertures[i]),
+ apertures[i] > 0);
+ }
+ checkElementDistinct(key, Arrays.asList(CameraTestUtils.toObject(apertures)));
+
+ return apertures;
+ }
+
+ /**
+ * Get and check the available hot pixel map modes.
+ *
+ * @return the available hot pixel map modes
+ */
+ public int[] getAvailableHotPixelModesChecked() {
+ Key<int[]> key = CameraCharacteristics.HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES;
+ int[] modes = getValueFromKeyNonNull(key);
+
+ if (modes == null) {
+ return new int[0];
+ }
+
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
+ if (isHardwareLevelFull()) {
+ checkTrueForKey(key, "Full-capability camera devices must support FAST mode",
+ modeList.contains(CameraMetadata.HOT_PIXEL_MODE_FAST));
+ }
+
+ if (isHardwareLevelLimitedOrBetter()) {
+ // FAST and HIGH_QUALITY mode must be both present or both not present
+ List<Integer> coupledModes = Arrays.asList(new Integer[] {
+ CameraMetadata.HOT_PIXEL_MODE_FAST,
+ CameraMetadata.HOT_PIXEL_MODE_HIGH_QUALITY
+ });
+ checkTrueForKey(
+ key, " FAST and HIGH_QUALITY mode must both present or both not present",
+ containsAllOrNone(modeList, coupledModes));
+ }
+ checkElementDistinct(key, modeList);
+ checkArrayValuesInRange(key, modes, CameraMetadata.HOT_PIXEL_MODE_OFF,
+ CameraMetadata.HOT_PIXEL_MODE_HIGH_QUALITY);
+
+ return modes;
+ }
+
+ /**
+ * Get and check available face detection modes.
+ *
+ * @return The non-null array of available face detection modes
+ */
+ public int[] getAvailableFaceDetectModesChecked() {
+ Key<int[]> key = CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES;
+ int[] modes = getValueFromKeyNonNull(key);
+
+ if (modes == null) {
+ return new int[0];
+ }
+
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
+ checkTrueForKey(key, "Array should contain OFF mode",
+ modeList.contains(CameraMetadata.STATISTICS_FACE_DETECT_MODE_OFF));
+ checkElementDistinct(key, modeList);
+ checkArrayValuesInRange(key, modes, CameraMetadata.STATISTICS_FACE_DETECT_MODE_OFF,
+ CameraMetadata.STATISTICS_FACE_DETECT_MODE_FULL);
+
+ return modes;
+ }
+
+ /**
+ * Get and check max face detected count.
+ *
+ * @return max number of faces that can be detected
+ */
+ public int getMaxFaceCountChecked() {
+ Key<Integer> key = CameraCharacteristics.STATISTICS_INFO_MAX_FACE_COUNT;
+ Integer count = getValueFromKeyNonNull(key);
+
+ if (count == null) {
+ return 0;
+ }
+
+ List<Integer> faceDetectModes =
+ Arrays.asList(CameraTestUtils.toObject(getAvailableFaceDetectModesChecked()));
+ if (faceDetectModes.contains(CameraMetadata.STATISTICS_FACE_DETECT_MODE_OFF) &&
+ faceDetectModes.size() == 1) {
+ checkTrueForKey(key, " value must be 0 if only OFF mode is supported in "
+ + "availableFaceDetectionModes", count == 0);
+ } else {
+ int maxFaceCountAtLeast = STATISTICS_INFO_MAX_FACE_COUNT_MIN_AT_LEAST;
+
+ // Legacy mode may support fewer than STATISTICS_INFO_MAX_FACE_COUNT_MIN_AT_LEAST faces.
+ if (isHardwareLevelLegacy()) {
+ maxFaceCountAtLeast = 1;
+ }
+ checkTrueForKey(key, " value must be no less than " + maxFaceCountAtLeast + " if SIMPLE"
+ + "or FULL is also supported in availableFaceDetectionModes",
+ count >= maxFaceCountAtLeast);
+ }
+
+ return count;
+ }
+
+ /**
+ * Get and check the available tone map modes.
+ *
+ * @return the available tone map modes
+ */
+ public int[] getAvailableToneMapModesChecked() {
+ Key<int[]> key = CameraCharacteristics.TONEMAP_AVAILABLE_TONE_MAP_MODES;
+ int[] modes = getValueFromKeyNonNull(key);
+
+ if (modes == null) {
+ return new int[0];
+ }
+
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
+ checkTrueForKey(key, " Camera devices must always support FAST mode",
+ modeList.contains(CameraMetadata.TONEMAP_MODE_FAST));
+ // Qualification check for MANUAL_POSTPROCESSING capability is in
+ // StaticMetadataTest#testCapabilities
+
+ if (isHardwareLevelLimitedOrBetter()) {
+ // FAST and HIGH_QUALITY mode must be both present or both not present
+ List<Integer> coupledModes = Arrays.asList(new Integer[] {
+ CameraMetadata.TONEMAP_MODE_FAST,
+ CameraMetadata.TONEMAP_MODE_HIGH_QUALITY
+ });
+ checkTrueForKey(
+ key, " FAST and HIGH_QUALITY mode must both present or both not present",
+ containsAllOrNone(modeList, coupledModes));
+ }
+ checkElementDistinct(key, modeList);
+ checkArrayValuesInRange(key, modes, CameraMetadata.TONEMAP_MODE_CONTRAST_CURVE,
+ CameraMetadata.TONEMAP_MODE_PRESET_CURVE);
+
+ return modes;
+ }
+
+ /**
+ * Get and check max tonemap curve point.
+ *
+ * @return Max tonemap curve points.
+ */
+ public int getMaxTonemapCurvePointChecked() {
+ Key<Integer> key = CameraCharacteristics.TONEMAP_MAX_CURVE_POINTS;
+ Integer count = getValueFromKeyNonNull(key);
+ List<Integer> modeList =
+ Arrays.asList(CameraTestUtils.toObject(getAvailableToneMapModesChecked()));
+ boolean tonemapCurveOutputSupported =
+ modeList.contains(CameraMetadata.TONEMAP_MODE_CONTRAST_CURVE) ||
+ modeList.contains(CameraMetadata.TONEMAP_MODE_GAMMA_VALUE) ||
+ modeList.contains(CameraMetadata.TONEMAP_MODE_PRESET_CURVE);
+
+ if (count == null) {
+ if (tonemapCurveOutputSupported) {
+ Assert.fail("Tonemap curve output is supported but MAX_CURVE_POINTS is null");
+ }
+ return 0;
+ }
+
+ if (tonemapCurveOutputSupported) {
+ checkTrueForKey(key, "Tonemap curve output supported camera device must support "
+ + "maxCurvePoints >= " + TONEMAP_MAX_CURVE_POINTS_AT_LEAST,
+ count >= TONEMAP_MAX_CURVE_POINTS_AT_LEAST);
+ }
+
+ return count;
+ }
+
+ /**
+ * Get and check pixel array size.
+ */
+ public Size getPixelArraySizeChecked() {
+ Key<Size> key = CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE;
+ Size pixelArray = getValueFromKeyNonNull(key);
+ if (pixelArray == null) {
+ return new Size(0, 0);
+ }
+
+ return pixelArray;
+ }
+
+ /**
+ * Get and check pre-correction active array size.
+ */
+ public Rect getPreCorrectedActiveArraySizeChecked() {
+ Key<Rect> key = CameraCharacteristics.SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE;
+ Rect activeArray = getValueFromKeyNonNull(key);
+
+ if (activeArray == null) {
+ return new Rect(0, 0, 0, 0);
+ }
+
+ Size pixelArraySize = getPixelArraySizeChecked();
+ checkTrueForKey(key, "values left/top are invalid", activeArray.left >= 0 && activeArray.top >= 0);
+ checkTrueForKey(key, "values width/height are invalid",
+ activeArray.width() <= pixelArraySize.getWidth() &&
+ activeArray.height() <= pixelArraySize.getHeight());
+
+ return activeArray;
+ }
+
+ /**
+ * Get and check active array size.
+ */
+ public Rect getActiveArraySizeChecked() {
+ Key<Rect> key = CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE;
+ Rect activeArray = getValueFromKeyNonNull(key);
+
+ if (activeArray == null) {
+ return new Rect(0, 0, 0, 0);
+ }
+
+ Size pixelArraySize = getPixelArraySizeChecked();
+ checkTrueForKey(key, "values left/top are invalid", activeArray.left >= 0 && activeArray.top >= 0);
+ checkTrueForKey(key, "values width/height are invalid",
+ activeArray.width() <= pixelArraySize.getWidth() &&
+ activeArray.height() <= pixelArraySize.getHeight());
+
+ return activeArray;
+ }
+
+ /**
+ * Get the dimensions to use for RAW16 buffers.
+ */
+ public Size getRawDimensChecked() throws Exception {
+ Size[] targetCaptureSizes = getAvailableSizesForFormatChecked(ImageFormat.RAW_SENSOR,
+ StaticMetadata.StreamDirection.Output);
+ Assert.assertTrue("No capture sizes available for RAW format!",
+ targetCaptureSizes.length != 0);
+ Rect activeArray = getPreCorrectedActiveArraySizeChecked();
+ Size preCorrectionActiveArraySize =
+ new Size(activeArray.width(), activeArray.height());
+ Size pixelArraySize = getPixelArraySizeChecked();
+ Assert.assertTrue("Missing pre-correction active array size", activeArray.width() > 0 &&
+ activeArray.height() > 0);
+ Assert.assertTrue("Missing pixel array size", pixelArraySize.getWidth() > 0 &&
+ pixelArraySize.getHeight() > 0);
+ Size[] allowedArraySizes = new Size[] { preCorrectionActiveArraySize,
+ pixelArraySize };
+ return assertArrayContainsAnyOf("Available sizes for RAW format" +
+ " must include either the pre-corrected active array size, or the full " +
+ "pixel array size", targetCaptureSizes, allowedArraySizes);
+ }
+
+ /**
+ * Get the sensitivity value and clamp to the range if needed.
+ *
+ * @param sensitivity Input sensitivity value to check.
+ * @return Sensitivity value in legal range.
+ */
+ public int getSensitivityClampToRange(int sensitivity) {
+ int minSensitivity = getSensitivityMinimumOrDefault(Integer.MAX_VALUE);
+ int maxSensitivity = getSensitivityMaximumOrDefault(Integer.MIN_VALUE);
+ if (minSensitivity > SENSOR_INFO_SENSITIVITY_RANGE_MIN_AT_MOST) {
+ failKeyCheck(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE,
+ String.format(
+ "Min value %d is too large, set to maximal legal value %d",
+ minSensitivity, SENSOR_INFO_SENSITIVITY_RANGE_MIN_AT_MOST));
+ minSensitivity = SENSOR_INFO_SENSITIVITY_RANGE_MIN_AT_MOST;
+ }
+ if (maxSensitivity < SENSOR_INFO_SENSITIVITY_RANGE_MAX_AT_LEAST) {
+ failKeyCheck(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE,
+ String.format(
+ "Max value %d is too small, set to minimal legal value %d",
+ maxSensitivity, SENSOR_INFO_SENSITIVITY_RANGE_MAX_AT_LEAST));
+ maxSensitivity = SENSOR_INFO_SENSITIVITY_RANGE_MAX_AT_LEAST;
+ }
+
+ return Math.max(minSensitivity, Math.min(maxSensitivity, sensitivity));
+ }
+
+ /**
+ * Get maxAnalogSensitivity for a camera device.
+ * <p>
+ * This is only available for FULL capability device, return 0 if it is unavailable.
+ * </p>
+ *
+ * @return maxAnalogSensitivity, 0 if it is not available.
+ */
+ public int getMaxAnalogSensitivityChecked() {
+
+ Key<Integer> key = CameraCharacteristics.SENSOR_MAX_ANALOG_SENSITIVITY;
+ Integer maxAnalogsensitivity = mCharacteristics.get(key);
+ if (maxAnalogsensitivity == null) {
+ if (isHardwareLevelFull()) {
+ Assert.fail("Full device should report max analog sensitivity");
+ }
+ return 0;
+ }
+
+ int minSensitivity = getSensitivityMinimumOrDefault();
+ int maxSensitivity = getSensitivityMaximumOrDefault();
+ checkTrueForKey(key, " Max analog sensitivity " + maxAnalogsensitivity
+ + " should be no larger than max sensitivity " + maxSensitivity,
+ maxAnalogsensitivity <= maxSensitivity);
+ checkTrueForKey(key, " Max analog sensitivity " + maxAnalogsensitivity
+ + " should be larger than min sensitivity " + maxSensitivity,
+ maxAnalogsensitivity > minSensitivity);
+
+ return maxAnalogsensitivity;
+ }
+
+ /**
+ * Get hyperfocalDistance and do the sanity check.
+ * <p>
+ * Note that, this tag is optional, will return -1 if this tag is not
+ * available.
+ * </p>
+ *
+ * @return hyperfocalDistance of this device, -1 if this tag is not available.
+ */
+ public float getHyperfocalDistanceChecked() {
+ Key<Float> key = CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE;
+ Float hyperfocalDistance = getValueFromKeyNonNull(key);
+ if (hyperfocalDistance == null) {
+ return -1;
+ }
+
+ if (hasFocuser()) {
+ float minFocusDistance = getMinimumFocusDistanceChecked();
+ checkTrueForKey(key, String.format(" hyperfocal distance %f should be in the range of"
+ + " should be in the range of (%f, %f]", hyperfocalDistance, 0.0f,
+ minFocusDistance),
+ hyperfocalDistance > 0 && hyperfocalDistance <= minFocusDistance);
+ }
+
+ return hyperfocalDistance;
+ }
+
+ /**
+ * Get the minimum value for a sensitivity range from android.sensor.info.sensitivityRange.
+ *
+ * <p>If the camera is incorrectly reporting values, log a warning and return
+ * the default value instead, which is the largest minimum value required to be supported
+ * by all camera devices.</p>
+ *
+ * @return The value reported by the camera device or the defaultValue otherwise.
+ */
+ public int getSensitivityMinimumOrDefault() {
+ return getSensitivityMinimumOrDefault(SENSOR_INFO_SENSITIVITY_RANGE_MIN_AT_MOST);
+ }
+
+ /**
+ * Get the minimum value for a sensitivity range from android.sensor.info.sensitivityRange.
+ *
+ * <p>If the camera is incorrectly reporting values, log a warning and return
+ * the default value instead.</p>
+ *
+ * @param defaultValue Value to return if no legal value is available
+ * @return The value reported by the camera device or the defaultValue otherwise.
+ */
+ public int getSensitivityMinimumOrDefault(int defaultValue) {
+ Range<Integer> range = getValueFromKeyNonNull(
+ CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE);
+ if (range == null) {
+ failKeyCheck(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE,
+ "had no valid minimum value; using default of " + defaultValue);
+ return defaultValue;
+ }
+ return range.getLower();
+ }
+
+ /**
+ * Get the maximum value for a sensitivity range from android.sensor.info.sensitivityRange.
+ *
+ * <p>If the camera is incorrectly reporting values, log a warning and return
+ * the default value instead, which is the smallest maximum value required to be supported
+ * by all camera devices.</p>
+ *
+ * @return The value reported by the camera device or the defaultValue otherwise.
+ */
+ public int getSensitivityMaximumOrDefault() {
+ return getSensitivityMaximumOrDefault(SENSOR_INFO_SENSITIVITY_RANGE_MAX_AT_LEAST);
+ }
+
+ /**
+ * Get the maximum value for a sensitivity range from android.sensor.info.sensitivityRange.
+ *
+ * <p>If the camera is incorrectly reporting values, log a warning and return
+ * the default value instead.</p>
+ *
+ * @param defaultValue Value to return if no legal value is available
+ * @return The value reported by the camera device or the defaultValue otherwise.
+ */
+ public int getSensitivityMaximumOrDefault(int defaultValue) {
+ Range<Integer> range = getValueFromKeyNonNull(
+ CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE);
+ if (range == null) {
+ failKeyCheck(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE,
+ "had no valid maximum value; using default of " + defaultValue);
+ return defaultValue;
+ }
+ return range.getUpper();
+ }
+
+ /**
+ * Get the minimum value for an exposure range from android.sensor.info.exposureTimeRange.
+ *
+ * <p>If the camera is incorrectly reporting values, log a warning and return
+ * the default value instead.</p>
+ *
+ * @param defaultValue Value to return if no legal value is available
+ * @return The value reported by the camera device or the defaultValue otherwise.
+ */
+ public long getExposureMinimumOrDefault(long defaultValue) {
+ Range<Long> range = getValueFromKeyNonNull(
+ CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE);
+ if (range == null) {
+ failKeyCheck(CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE,
+ "had no valid minimum value; using default of " + defaultValue);
+ return defaultValue;
+ }
+ return range.getLower();
+ }
+
+ /**
+ * Get the minimum value for an exposure range from android.sensor.info.exposureTimeRange.
+ *
+ * <p>If the camera is incorrectly reporting values, log a warning and return
+ * the default value instead, which is the largest minimum value required to be supported
+ * by all camera devices.</p>
+ *
+ * @return The value reported by the camera device or the defaultValue otherwise.
+ */
+ public long getExposureMinimumOrDefault() {
+ return getExposureMinimumOrDefault(SENSOR_INFO_EXPOSURE_TIME_RANGE_MIN_AT_MOST);
+ }
+
+ /**
+ * Get the maximum value for an exposure range from android.sensor.info.exposureTimeRange.
+ *
+ * <p>If the camera is incorrectly reporting values, log a warning and return
+ * the default value instead.</p>
+ *
+ * @param defaultValue Value to return if no legal value is available
+ * @return The value reported by the camera device or the defaultValue otherwise.
+ */
+ public long getExposureMaximumOrDefault(long defaultValue) {
+ Range<Long> range = getValueFromKeyNonNull(
+ CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE);
+ if (range == null) {
+ failKeyCheck(CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE,
+ "had no valid maximum value; using default of " + defaultValue);
+ return defaultValue;
+ }
+ return range.getUpper();
+ }
+
+ /**
+ * Get the maximum value for an exposure range from android.sensor.info.exposureTimeRange.
+ *
+ * <p>If the camera is incorrectly reporting values, log a warning and return
+ * the default value instead, which is the smallest maximum value required to be supported
+ * by all camera devices.</p>
+ *
+ * @return The value reported by the camera device or the defaultValue otherwise.
+ */
+ public long getExposureMaximumOrDefault() {
+ return getExposureMaximumOrDefault(SENSOR_INFO_EXPOSURE_TIME_RANGE_MAX_AT_LEAST);
+ }
+
+ /**
+ * get android.control.availableModes and do the sanity check.
+ *
+ * @return available control modes.
+ */
+ public int[] getAvailableControlModesChecked() {
+ Key<int[]> modesKey = CameraCharacteristics.CONTROL_AVAILABLE_MODES;
+ int[] modes = getValueFromKeyNonNull(modesKey);
+ if (modes == null) {
+ modes = new int[0];
+ }
+
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
+ checkTrueForKey(modesKey, "value is empty", !modeList.isEmpty());
+
+ // All camera device must support AUTO
+ checkTrueForKey(modesKey, "values " + modeList.toString() + " must contain AUTO mode",
+ modeList.contains(CameraMetadata.CONTROL_MODE_AUTO));
+
+ boolean isAeOffSupported = Arrays.asList(
+ CameraTestUtils.toObject(getAeAvailableModesChecked())).contains(
+ CameraMetadata.CONTROL_AE_MODE_OFF);
+ boolean isAfOffSupported = Arrays.asList(
+ CameraTestUtils.toObject(getAfAvailableModesChecked())).contains(
+ CameraMetadata.CONTROL_AF_MODE_OFF);
+ boolean isAwbOffSupported = Arrays.asList(
+ CameraTestUtils.toObject(getAwbAvailableModesChecked())).contains(
+ CameraMetadata.CONTROL_AWB_MODE_OFF);
+ if (isAeOffSupported && isAfOffSupported && isAwbOffSupported) {
+ // 3A OFF controls are supported, OFF mode must be supported here.
+ checkTrueForKey(modesKey, "values " + modeList.toString() + " must contain OFF mode",
+ modeList.contains(CameraMetadata.CONTROL_MODE_OFF));
+ }
+
+ if (isSceneModeSupported()) {
+ checkTrueForKey(modesKey, "values " + modeList.toString() + " must contain"
+ + " USE_SCENE_MODE",
+ modeList.contains(CameraMetadata.CONTROL_MODE_USE_SCENE_MODE));
+ }
+
+ return modes;
+ }
+
+ public boolean isSceneModeSupported() {
+ List<Integer> availableSceneModes = Arrays.asList(
+ CameraTestUtils.toObject(getAvailableSceneModesChecked()));
+
+ if (availableSceneModes.isEmpty()) {
+ return false;
+ }
+
+ // If sceneMode is not supported, camera device will contain single entry: DISABLED.
+ return availableSceneModes.size() > 1 ||
+ !availableSceneModes.contains(CameraMetadata.CONTROL_SCENE_MODE_DISABLED);
+ }
+
+ /**
+ * Get aeAvailableModes and do the sanity check.
+ *
+ * <p>Depending on the check level this class has, for WAR or COLLECT levels,
+ * If the aeMode list is invalid, return an empty mode array. The the caller doesn't
+ * have to abort the execution even the aeMode list is invalid.</p>
+ * @return AE available modes
+ */
+ public int[] getAeAvailableModesChecked() {
+ Key<int[]> modesKey = CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES;
+ int[] modes = getValueFromKeyNonNull(modesKey);
+ if (modes == null) {
+ modes = new int[0];
+ }
+ List<Integer> modeList = new ArrayList<Integer>();
+ for (int mode : modes) {
+ modeList.add(mode);
+ }
+ checkTrueForKey(modesKey, "value is empty", !modeList.isEmpty());
+
+ // All camera device must support ON
+ checkTrueForKey(modesKey, "values " + modeList.toString() + " must contain ON mode",
+ modeList.contains(CameraMetadata.CONTROL_AE_MODE_ON));
+
+ // All camera devices with flash units support ON_AUTO_FLASH and ON_ALWAYS_FLASH
+ Key<Boolean> flashKey= CameraCharacteristics.FLASH_INFO_AVAILABLE;
+ Boolean hasFlash = getValueFromKeyNonNull(flashKey);
+ if (hasFlash == null) {
+ hasFlash = false;
+ }
+ if (hasFlash) {
+ boolean flashModeConsistentWithFlash =
+ modeList.contains(CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH) &&
+ modeList.contains(CameraMetadata.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
+ checkTrueForKey(modesKey,
+ "value must contain ON_AUTO_FLASH and ON_ALWAYS_FLASH and when flash is" +
+ "available", flashModeConsistentWithFlash);
+ } else {
+ boolean flashModeConsistentWithoutFlash =
+ !(modeList.contains(CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH) ||
+ modeList.contains(CameraMetadata.CONTROL_AE_MODE_ON_ALWAYS_FLASH) ||
+ modeList.contains(CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE));
+ checkTrueForKey(modesKey,
+ "value must not contain ON_AUTO_FLASH, ON_ALWAYS_FLASH and" +
+ "ON_AUTO_FLASH_REDEYE when flash is unavailable",
+ flashModeConsistentWithoutFlash);
+ }
+
+ // FULL mode camera devices always support OFF mode.
+ boolean condition =
+ !isHardwareLevelFull() || modeList.contains(CameraMetadata.CONTROL_AE_MODE_OFF);
+ checkTrueForKey(modesKey, "Full capability device must have OFF mode", condition);
+
+ // Boundary check.
+ for (int mode : modes) {
+ checkTrueForKey(modesKey, "Value " + mode + " is out of bound",
+ mode >= CameraMetadata.CONTROL_AE_MODE_OFF
+ && mode <= CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
+ }
+
+ return modes;
+ }
+
+ /**
+ * Get available AWB modes and do the sanity check.
+ *
+ * @return array that contains available AWB modes, empty array if awbAvailableModes is
+ * unavailable.
+ */
+ public int[] getAwbAvailableModesChecked() {
+ Key<int[]> key =
+ CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES;
+ int[] awbModes = getValueFromKeyNonNull(key);
+
+ if (awbModes == null) {
+ return new int[0];
+ }
+
+ List<Integer> modesList = Arrays.asList(CameraTestUtils.toObject(awbModes));
+ checkTrueForKey(key, " All camera devices must support AUTO mode",
+ modesList.contains(CameraMetadata.CONTROL_AWB_MODE_AUTO));
+ if (isHardwareLevelFull()) {
+ checkTrueForKey(key, " Full capability camera devices must support OFF mode",
+ modesList.contains(CameraMetadata.CONTROL_AWB_MODE_OFF));
+ }
+
+ return awbModes;
+ }
+
+ /**
+ * Get available AF modes and do the sanity check.
+ *
+ * @return array that contains available AF modes, empty array if afAvailableModes is
+ * unavailable.
+ */
+ public int[] getAfAvailableModesChecked() {
+ Key<int[]> key =
+ CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES;
+ int[] afModes = getValueFromKeyNonNull(key);
+
+ if (afModes == null) {
+ return new int[0];
+ }
+
+ List<Integer> modesList = Arrays.asList(CameraTestUtils.toObject(afModes));
+ if (isHardwareLevelLimitedOrBetter()) {
+ // Some LEGACY mode devices do not support AF OFF
+ checkTrueForKey(key, " All camera devices must support OFF mode",
+ modesList.contains(CameraMetadata.CONTROL_AF_MODE_OFF));
+ }
+ if (hasFocuser()) {
+ checkTrueForKey(key, " Camera devices that have focuser units must support AUTO mode",
+ modesList.contains(CameraMetadata.CONTROL_AF_MODE_AUTO));
+ }
+
+ return afModes;
+ }
+
+ /**
+ * Get supported raw output sizes and do the check.
+ *
+ * @return Empty size array if raw output is not supported
+ */
+ public Size[] getRawOutputSizesChecked() {
+ return getAvailableSizesForFormatChecked(ImageFormat.RAW_SENSOR,
+ StreamDirection.Output);
+ }
+
+ /**
+ * Get supported jpeg output sizes and do the check.
+ *
+ * @return Empty size array if jpeg output is not supported
+ */
+ public Size[] getJpegOutputSizesChecked() {
+ return getAvailableSizesForFormatChecked(ImageFormat.JPEG,
+ StreamDirection.Output);
+ }
+
+ /**
+ * Used to determine the stream direction for various helpers that look up
+ * format or size information.
+ */
+ public enum StreamDirection {
+ /** Stream is used with {@link android.hardware.camera2.CameraDevice#configureOutputs} */
+ Output,
+ /** Stream is used with {@code CameraDevice#configureInputs} -- NOT YET PUBLIC */
+ Input
+ }
+
+ /**
+ * Get available formats for a given direction.
+ *
+ * @param direction The stream direction, input or output.
+ * @return The formats of the given direction, empty array if no available format is found.
+ */
+ public int[] getAvailableFormats(StreamDirection direction) {
+ Key<StreamConfigurationMap> key =
+ CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
+ StreamConfigurationMap config = getValueFromKeyNonNull(key);
+
+ if (config == null) {
+ return new int[0];
+ }
+
+ switch (direction) {
+ case Output:
+ return config.getOutputFormats();
+ case Input:
+ return config.getInputFormats();
+ default:
+ throw new IllegalArgumentException("direction must be output or input");
+ }
+ }
+
+ /**
+ * Get valid output formats for a given input format.
+ *
+ * @param inputFormat The input format used to produce the output images.
+ * @return The output formats for the given input format, empty array if
+ * no available format is found.
+ */
+ public int[] getValidOutputFormatsForInput(int inputFormat) {
+ Key<StreamConfigurationMap> key =
+ CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
+ StreamConfigurationMap config = getValueFromKeyNonNull(key);
+
+ if (config == null) {
+ return new int[0];
+ }
+
+ return config.getValidOutputFormatsForInput(inputFormat);
+ }
+
+ /**
+ * Get available sizes for given format and direction.
+ *
+ * @param format The format for the requested size array.
+ * @param direction The stream direction, input or output.
+ * @return The sizes of the given format, empty array if no available size is found.
+ */
+ public Size[] getAvailableSizesForFormatChecked(int format, StreamDirection direction) {
+ return getAvailableSizesForFormatChecked(format, direction,
+ /*fastSizes*/true, /*slowSizes*/true);
+ }
+
+ /**
+ * Get available sizes for given format and direction, and whether to limit to slow or fast
+ * resolutions.
+ *
+ * @param format The format for the requested size array.
+ * @param direction The stream direction, input or output.
+ * @param fastSizes whether to include getOutputSizes() sizes (generally faster)
+ * @param slowSizes whether to include getHighResolutionOutputSizes() sizes (generally slower)
+ * @return The sizes of the given format, empty array if no available size is found.
+ */
+ public Size[] getAvailableSizesForFormatChecked(int format, StreamDirection direction,
+ boolean fastSizes, boolean slowSizes) {
+ Key<StreamConfigurationMap> key =
+ CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
+ StreamConfigurationMap config = getValueFromKeyNonNull(key);
+
+ if (config == null) {
+ return new Size[0];
+ }
+
+ Size[] sizes = null;
+
+ switch (direction) {
+ case Output:
+ Size[] fastSizeList = null;
+ Size[] slowSizeList = null;
+ if (fastSizes) {
+ fastSizeList = config.getOutputSizes(format);
+ }
+ if (slowSizes) {
+ slowSizeList = config.getHighResolutionOutputSizes(format);
+ }
+ if (fastSizeList != null && slowSizeList != null) {
+ sizes = new Size[slowSizeList.length + fastSizeList.length];
+ System.arraycopy(fastSizeList, 0, sizes, 0, fastSizeList.length);
+ System.arraycopy(slowSizeList, 0, sizes, fastSizeList.length, slowSizeList.length);
+ } else if (fastSizeList != null) {
+ sizes = fastSizeList;
+ } else if (slowSizeList != null) {
+ sizes = slowSizeList;
+ }
+ break;
+ case Input:
+ sizes = config.getInputSizes(format);
+ break;
+ default:
+ throw new IllegalArgumentException("direction must be output or input");
+ }
+
+ if (sizes == null) {
+ sizes = new Size[0];
+ }
+
+ return sizes;
+ }
+
+ /**
+ * Get available AE target fps ranges.
+ *
+ * @return Empty int array if aeAvailableTargetFpsRanges is invalid.
+ */
+ @SuppressWarnings("raw")
+ public Range<Integer>[] getAeAvailableTargetFpsRangesChecked() {
+ Key<Range<Integer>[]> key =
+ CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES;
+ Range<Integer>[] fpsRanges = getValueFromKeyNonNull(key);
+
+ if (fpsRanges == null) {
+ return new Range[0];
+ }
+
+ // Round down to 2 boundary if it is not integer times of 2, to avoid array out of bound
+ // in case the above check fails.
+ int fpsRangeLength = fpsRanges.length;
+ int minFps, maxFps;
+ long maxFrameDuration = getMaxFrameDurationChecked();
+ for (int i = 0; i < fpsRangeLength; i += 1) {
+ minFps = fpsRanges[i].getLower();
+ maxFps = fpsRanges[i].getUpper();
+ checkTrueForKey(key, " min fps must be no larger than max fps!",
+ minFps > 0 && maxFps >= minFps);
+ long maxDuration = (long) (1e9 / minFps);
+ checkTrueForKey(key, String.format(
+ " the frame duration %d for min fps %d must smaller than maxFrameDuration %d",
+ maxDuration, minFps, maxFrameDuration), maxDuration <= maxFrameDuration);
+ }
+ return fpsRanges;
+ }
+
+ /**
+ * Get the highest supported target FPS range.
+ * Prioritizes maximizing the min FPS, then the max FPS without lowering min FPS.
+ */
+ public Range<Integer> getAeMaxTargetFpsRange() {
+ Range<Integer>[] fpsRanges = getAeAvailableTargetFpsRangesChecked();
+
+ Range<Integer> targetRange = fpsRanges[0];
+ // Assume unsorted list of target FPS ranges, so use two passes, first maximize min FPS
+ for (Range<Integer> candidateRange : fpsRanges) {
+ if (candidateRange.getLower() > targetRange.getLower()) {
+ targetRange = candidateRange;
+ }
+ }
+ // Then maximize max FPS while not lowering min FPS
+ for (Range<Integer> candidateRange : fpsRanges) {
+ if (candidateRange.getLower() >= targetRange.getLower() &&
+ candidateRange.getUpper() > targetRange.getUpper()) {
+ targetRange = candidateRange;
+ }
+ }
+ return targetRange;
+ }
+
+ /**
+ * Get max frame duration.
+ *
+ * @return 0 if maxFrameDuration is null
+ */
+ public long getMaxFrameDurationChecked() {
+ Key<Long> key =
+ CameraCharacteristics.SENSOR_INFO_MAX_FRAME_DURATION;
+ Long maxDuration = getValueFromKeyNonNull(key);
+
+ if (maxDuration == null) {
+ return 0;
+ }
+
+ return maxDuration;
+ }
+
+ /**
+ * Get available minimal frame durations for a given format.
+ *
+ * @param format One of the format from {@link ImageFormat}.
+ * @return HashMap of minimal frame durations for different sizes, empty HashMap
+ * if availableMinFrameDurations is null.
+ */
+ public HashMap<Size, Long> getAvailableMinFrameDurationsForFormatChecked(int format) {
+
+ HashMap<Size, Long> minDurationMap = new HashMap<Size, Long>();
+
+ Key<StreamConfigurationMap> key =
+ CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
+ StreamConfigurationMap config = getValueFromKeyNonNull(key);
+
+ if (config == null) {
+ return minDurationMap;
+ }
+
+ for (Size size : getAvailableSizesForFormatChecked(format,
+ StreamDirection.Output)) {
+ long minFrameDuration = config.getOutputMinFrameDuration(format, size);
+
+ if (minFrameDuration != 0) {
+ minDurationMap.put(new Size(size.getWidth(), size.getHeight()), minFrameDuration);
+ }
+ }
+
+ return minDurationMap;
+ }
+
+ public int[] getAvailableEdgeModesChecked() {
+ Key<int[]> key = CameraCharacteristics.EDGE_AVAILABLE_EDGE_MODES;
+ int[] edgeModes = getValueFromKeyNonNull(key);
+
+ if (edgeModes == null) {
+ return new int[0];
+ }
+
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(edgeModes));
+ // Full device should always include OFF and FAST
+ if (isHardwareLevelFull()) {
+ checkTrueForKey(key, "Full device must contain OFF and FAST edge modes",
+ modeList.contains(CameraMetadata.EDGE_MODE_OFF) &&
+ modeList.contains(CameraMetadata.EDGE_MODE_FAST));
+ }
+
+ if (isHardwareLevelLimitedOrBetter()) {
+ // FAST and HIGH_QUALITY mode must be both present or both not present
+ List<Integer> coupledModes = Arrays.asList(new Integer[] {
+ CameraMetadata.EDGE_MODE_FAST,
+ CameraMetadata.EDGE_MODE_HIGH_QUALITY
+ });
+ checkTrueForKey(
+ key, " FAST and HIGH_QUALITY mode must both present or both not present",
+ containsAllOrNone(modeList, coupledModes));
+ }
+
+ return edgeModes;
+ }
+
+ public int[] getAvailableNoiseReductionModesChecked() {
+ Key<int[]> key =
+ CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES;
+ int[] noiseReductionModes = getValueFromKeyNonNull(key);
+
+ if (noiseReductionModes == null) {
+ return new int[0];
+ }
+
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(noiseReductionModes));
+ // Full device should always include OFF and FAST
+ if (isHardwareLevelFull()) {
+
+ checkTrueForKey(key, "Full device must contain OFF and FAST noise reduction modes",
+ modeList.contains(CameraMetadata.NOISE_REDUCTION_MODE_OFF) &&
+ modeList.contains(CameraMetadata.NOISE_REDUCTION_MODE_FAST));
+ }
+
+ if (isHardwareLevelLimitedOrBetter()) {
+ // FAST and HIGH_QUALITY mode must be both present or both not present
+ List<Integer> coupledModes = Arrays.asList(new Integer[] {
+ CameraMetadata.NOISE_REDUCTION_MODE_FAST,
+ CameraMetadata.NOISE_REDUCTION_MODE_HIGH_QUALITY
+ });
+ checkTrueForKey(
+ key, " FAST and HIGH_QUALITY mode must both present or both not present",
+ containsAllOrNone(modeList, coupledModes));
+ }
+ return noiseReductionModes;
+ }
+
+ /**
+ * Get value of key android.control.aeCompensationStep and do the sanity check.
+ *
+ * @return default value if the value is null.
+ */
+ public Rational getAeCompensationStepChecked() {
+ Key<Rational> key =
+ CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP;
+ Rational compensationStep = getValueFromKeyNonNull(key);
+
+ if (compensationStep == null) {
+ // Return default step.
+ return CONTROL_AE_COMPENSATION_STEP_DEFAULT;
+ }
+
+ // Legacy devices don't have a minimum step requirement
+ if (isHardwareLevelLimitedOrBetter()) {
+ float compensationStepF =
+ (float) compensationStep.getNumerator() / compensationStep.getDenominator();
+ checkTrueForKey(key, " value must be no more than 1/2", compensationStepF <= 0.5f);
+ }
+
+ return compensationStep;
+ }
+
+ /**
+ * Get value of key android.control.aeCompensationRange and do the sanity check.
+ *
+ * @return default value if the value is null or malformed.
+ */
+ public Range<Integer> getAeCompensationRangeChecked() {
+ Key<Range<Integer>> key =
+ CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE;
+ Range<Integer> compensationRange = getValueFromKeyNonNull(key);
+ Rational compensationStep = getAeCompensationStepChecked();
+ float compensationStepF = compensationStep.floatValue();
+ final Range<Integer> DEFAULT_RANGE = Range.create(
+ (int)(CONTROL_AE_COMPENSATION_RANGE_DEFAULT_MIN / compensationStepF),
+ (int)(CONTROL_AE_COMPENSATION_RANGE_DEFAULT_MAX / compensationStepF));
+ final Range<Integer> ZERO_RANGE = Range.create(0, 0);
+ if (compensationRange == null) {
+ return ZERO_RANGE;
+ }
+
+ // Legacy devices don't have a minimum range requirement
+ if (isHardwareLevelLimitedOrBetter() && !compensationRange.equals(ZERO_RANGE)) {
+ checkTrueForKey(key, " range value must be at least " + DEFAULT_RANGE
+ + ", actual " + compensationRange + ", compensation step " + compensationStep,
+ compensationRange.getLower() <= DEFAULT_RANGE.getLower() &&
+ compensationRange.getUpper() >= DEFAULT_RANGE.getUpper());
+ }
+
+ return compensationRange;
+ }
+
+ /**
+ * Get availableVideoStabilizationModes and do the sanity check.
+ *
+ * @return available video stabilization modes, empty array if it is unavailable.
+ */
+ public int[] getAvailableVideoStabilizationModesChecked() {
+ Key<int[]> key =
+ CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES;
+ int[] modes = getValueFromKeyNonNull(key);
+
+ if (modes == null) {
+ return new int[0];
+ }
+
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
+ checkTrueForKey(key, " All device should support OFF mode",
+ modeList.contains(CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF));
+ checkArrayValuesInRange(key, modes,
+ CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF,
+ CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_ON);
+
+ return modes;
+ }
+
+ public boolean isVideoStabilizationSupported() {
+ Integer[] videoStabModes =
+ CameraTestUtils.toObject(getAvailableVideoStabilizationModesChecked());
+ return Arrays.asList(videoStabModes).contains(
+ CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_ON);
+ }
+
+ /**
+ * Get availableOpticalStabilization and do the sanity check.
+ *
+ * @return available optical stabilization modes, empty array if it is unavailable.
+ */
+ public int[] getAvailableOpticalStabilizationChecked() {
+ Key<int[]> key =
+ CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION;
+ int[] modes = getValueFromKeyNonNull(key);
+
+ if (modes == null) {
+ return new int[0];
+ }
+
+ checkArrayValuesInRange(key, modes,
+ CameraMetadata.LENS_OPTICAL_STABILIZATION_MODE_OFF,
+ CameraMetadata.LENS_OPTICAL_STABILIZATION_MODE_ON);
+
+ return modes;
+ }
+
+ /**
+ * Get the scaler's max digital zoom ({@code >= 1.0f}) ratio between crop and active array
+ * @return the max zoom ratio, or {@code 1.0f} if the value is unavailable
+ */
+ public float getAvailableMaxDigitalZoomChecked() {
+ Key<Float> key =
+ CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM;
+
+ Float maxZoom = getValueFromKeyNonNull(key);
+ if (maxZoom == null) {
+ return 1.0f;
+ }
+
+ checkTrueForKey(key, " max digital zoom should be no less than 1",
+ maxZoom >= 1.0f && !Float.isNaN(maxZoom) && !Float.isInfinite(maxZoom));
+
+ return maxZoom;
+ }
+
+ public int[] getAvailableSceneModesChecked() {
+ Key<int[]> key =
+ CameraCharacteristics.CONTROL_AVAILABLE_SCENE_MODES;
+ int[] modes = getValueFromKeyNonNull(key);
+
+ if (modes == null) {
+ return new int[0];
+ }
+
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
+ // FACE_PRIORITY must be included if face detection is supported.
+ if (areKeysAvailable(CameraCharacteristics.STATISTICS_INFO_MAX_FACE_COUNT) &&
+ getMaxFaceCountChecked() > 0) {
+ checkTrueForKey(key, " FACE_PRIORITY must be included if face detection is supported",
+ modeList.contains(CameraMetadata.CONTROL_SCENE_MODE_FACE_PRIORITY));
+ }
+
+ return modes;
+ }
+
+ public int[] getAvailableEffectModesChecked() {
+ Key<int[]> key =
+ CameraCharacteristics.CONTROL_AVAILABLE_EFFECTS;
+ int[] modes = getValueFromKeyNonNull(key);
+
+ if (modes == null) {
+ return new int[0];
+ }
+
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
+ // OFF must be included.
+ checkTrueForKey(key, " OFF must be included",
+ modeList.contains(CameraMetadata.CONTROL_EFFECT_MODE_OFF));
+
+ return modes;
+ }
+
+ /**
+ * Get and check the available color aberration modes
+ *
+ * @return the available color aberration modes
+ */
+ public int[] getAvailableColorAberrationModesChecked() {
+ Key<int[]> key =
+ CameraCharacteristics.COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES;
+ int[] modes = getValueFromKeyNonNull(key);
+
+ if (modes == null) {
+ return new int[0];
+ }
+
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
+ checkTrueForKey(key, " Camera devices must always support either OFF or FAST mode",
+ modeList.contains(CameraMetadata.COLOR_CORRECTION_ABERRATION_MODE_OFF) ||
+ modeList.contains(CameraMetadata.COLOR_CORRECTION_ABERRATION_MODE_FAST));
+
+ if (isHardwareLevelLimitedOrBetter()) {
+ // FAST and HIGH_QUALITY mode must be both present or both not present
+ List<Integer> coupledModes = Arrays.asList(new Integer[] {
+ CameraMetadata.COLOR_CORRECTION_ABERRATION_MODE_FAST,
+ CameraMetadata.COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY
+ });
+ checkTrueForKey(
+ key, " FAST and HIGH_QUALITY mode must both present or both not present",
+ containsAllOrNone(modeList, coupledModes));
+ }
+ checkElementDistinct(key, modeList);
+ checkArrayValuesInRange(key, modes,
+ CameraMetadata.COLOR_CORRECTION_ABERRATION_MODE_OFF,
+ CameraMetadata.COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY);
+
+ return modes;
+ }
+
+ /**
+ * Get max pipeline depth and do the sanity check.
+ *
+ * @return max pipeline depth, default value if it is not available.
+ */
+ public byte getPipelineMaxDepthChecked() {
+ Key<Byte> key =
+ CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH;
+ Byte maxDepth = getValueFromKeyNonNull(key);
+
+ if (maxDepth == null) {
+ return REQUEST_PIPELINE_MAX_DEPTH_MAX;
+ }
+
+ checkTrueForKey(key, " max pipeline depth should be no larger than "
+ + REQUEST_PIPELINE_MAX_DEPTH_MAX, maxDepth <= REQUEST_PIPELINE_MAX_DEPTH_MAX);
+
+ return maxDepth;
+ }
+
+ /**
+ * Get available lens shading modes.
+ */
+ public int[] getAvailableLensShadingModesChecked() {
+ Key<int[]> key =
+ CameraCharacteristics.SHADING_AVAILABLE_MODES;
+ int[] modes = getValueFromKeyNonNull(key);
+ if (modes == null) {
+ return new int[0];
+ }
+
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
+ // FAST must be included.
+ checkTrueForKey(key, " FAST must be included",
+ modeList.contains(CameraMetadata.SHADING_MODE_FAST));
+
+ if (isCapabilitySupported(
+ CameraMetadata.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING)) {
+ checkTrueForKey(key, " OFF must be included for MANUAL_POST_PROCESSING devices",
+ modeList.contains(CameraMetadata.SHADING_MODE_OFF));
+ }
+ return modes;
+ }
+
+ /**
+ * Get available lens shading map modes.
+ */
+ public int[] getAvailableLensShadingMapModesChecked() {
+ Key<int[]> key =
+ CameraCharacteristics.STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES;
+ int[] modes = getValueFromKeyNonNull(key);
+ if (modes == null) {
+ return new int[0];
+ }
+
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
+
+ if (isCapabilitySupported(
+ CameraMetadata.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
+ checkTrueForKey(key, " ON must be included for RAW capability devices",
+ modeList.contains(CameraMetadata.STATISTICS_LENS_SHADING_MAP_MODE_ON));
+ }
+ return modes;
+ }
+
+
+ /**
+ * Get available capabilities and do the sanity check.
+ *
+ * @return reported available capabilities list, empty list if the value is unavailable.
+ */
+ public List<Integer> getAvailableCapabilitiesChecked() {
+ Key<int[]> key =
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES;
+ int[] availableCaps = getValueFromKeyNonNull(key);
+ List<Integer> capList;
+
+ if (availableCaps == null) {
+ return new ArrayList<Integer>();
+ }
+
+ checkArrayValuesInRange(key, availableCaps,
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
+ capList = Arrays.asList(CameraTestUtils.toObject(availableCaps));
+ return capList;
+ }
+
+ /**
+ * Determine whether the current device supports a capability or not.
+ *
+ * @param capability (non-negative)
+ *
+ * @return {@code true} if the capability is supported, {@code false} otherwise.
+ *
+ * @throws IllegalArgumentException if {@code capability} was negative
+ *
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ */
+ public boolean isCapabilitySupported(int capability) {
+ if (capability < 0) {
+ throw new IllegalArgumentException("capability must be non-negative");
+ }
+
+ List<Integer> availableCapabilities = getAvailableCapabilitiesChecked();
+
+ return availableCapabilities.contains(capability);
+ }
+
+ /**
+ * Determine whether or not all the {@code keys} are available characteristics keys
+ * (as in {@link CameraCharacteristics#getKeys}.
+ *
+ * <p>If this returns {@code true}, then querying for this key from a characteristics
+ * object will always return a non-{@code null} value.</p>
+ *
+ * @param keys collection of camera characteristics keys
+ * @return whether or not all characteristics keys are available
+ */
+ public final boolean areCharacteristicsKeysAvailable(
+ Collection<Key<?>> keys) {
+ return mCharacteristics.getKeys().containsAll(keys);
+ }
+
+ /**
+ * Determine whether or not all the {@code keys} are available result keys
+ * (as in {@link CameraCharacteristics#getAvailableCaptureResultKeys}.
+ *
+ * <p>If this returns {@code true}, then querying for this key from a result
+ * object will almost always return a non-{@code null} value.</p>
+ *
+ * <p>In some cases (e.g. lens shading map), the request must have additional settings
+ * configured in order for the key to correspond to a value.</p>
+ *
+ * @param keys collection of capture result keys
+ * @return whether or not all result keys are available
+ */
+ public final boolean areResultKeysAvailable(Collection<CaptureResult.Key<?>> keys) {
+ return mCharacteristics.getAvailableCaptureResultKeys().containsAll(keys);
+ }
+
+ /**
+ * Determine whether or not all the {@code keys} are available request keys
+ * (as in {@link CameraCharacteristics#getAvailableCaptureRequestKeys}.
+ *
+ * <p>If this returns {@code true}, then setting this key in the request builder
+ * may have some effect (and if it's {@code false}, then the camera device will
+ * definitely ignore it).</p>
+ *
+ * <p>In some cases (e.g. manual control of exposure), other keys must be also be set
+ * in order for a key to take effect (e.g. control.mode set to OFF).</p>
+ *
+ * @param keys collection of capture request keys
+ * @return whether or not all result keys are available
+ */
+ public final boolean areRequestKeysAvailable(Collection<CaptureRequest.Key<?>> keys) {
+ return mCharacteristics.getAvailableCaptureRequestKeys().containsAll(keys);
+ }
+
+ /**
+ * Determine whether or not all the {@code keys} are available characteristics keys
+ * (as in {@link CameraCharacteristics#getKeys}.
+ *
+ * <p>If this returns {@code true}, then querying for this key from a characteristics
+ * object will always return a non-{@code null} value.</p>
+ *
+ * @param keys one or more camera characteristic keys
+ * @return whether or not all characteristics keys are available
+ */
+ @SafeVarargs
+ public final boolean areKeysAvailable(Key<?>... keys) {
+ return areCharacteristicsKeysAvailable(Arrays.asList(keys));
+ }
+
+ /**
+ * Determine whether or not all the {@code keys} are available result keys
+ * (as in {@link CameraCharacteristics#getAvailableCaptureResultKeys}.
+ *
+ * <p>If this returns {@code true}, then querying for this key from a result
+ * object will almost always return a non-{@code null} value.</p>
+ *
+ * <p>In some cases (e.g. lens shading map), the request must have additional settings
+ * configured in order for the key to correspond to a value.</p>
+ *
+ * @param keys one or more capture result keys
+ * @return whether or not all result keys are available
+ */
+ @SafeVarargs
+ public final boolean areKeysAvailable(CaptureResult.Key<?>... keys) {
+ return areResultKeysAvailable(Arrays.asList(keys));
+ }
+
+ /**
+ * Determine whether or not all the {@code keys} are available request keys
+ * (as in {@link CameraCharacteristics#getAvailableCaptureRequestKeys}.
+ *
+ * <p>If this returns {@code true}, then setting this key in the request builder
+ * may have some effect (and if it's {@code false}, then the camera device will
+ * definitely ignore it).</p>
+ *
+ * <p>In some cases (e.g. manual control of exposure), other keys must be also be set
+ * in order for a key to take effect (e.g. control.mode set to OFF).</p>
+ *
+ * @param keys one or more capture request keys
+ * @return whether or not all result keys are available
+ */
+ @SafeVarargs
+ public final boolean areKeysAvailable(CaptureRequest.Key<?>... keys) {
+ return areRequestKeysAvailable(Arrays.asList(keys));
+ }
+
+ /*
+ * Determine if camera device support AE lock control
+ *
+ * @return {@code true} if AE lock control is supported
+ */
+ public boolean isAeLockSupported() {
+ return getValueFromKeyNonNull(CameraCharacteristics.CONTROL_AE_LOCK_AVAILABLE);
+ }
+
+ /*
+ * Determine if camera device support AWB lock control
+ *
+ * @return {@code true} if AWB lock control is supported
+ */
+ public boolean isAwbLockSupported() {
+ return getValueFromKeyNonNull(CameraCharacteristics.CONTROL_AWB_LOCK_AVAILABLE);
+ }
+
+
+ /*
+ * Determine if camera device support manual lens shading map control
+ *
+ * @return {@code true} if manual lens shading map control is supported
+ */
+ public boolean isManualLensShadingMapSupported() {
+ return areKeysAvailable(CaptureRequest.SHADING_MODE);
+ }
+
+ /**
+ * Determine if camera device support manual color correction control
+ *
+ * @return {@code true} if manual color correction control is supported
+ */
+ public boolean isColorCorrectionSupported() {
+ return areKeysAvailable(CaptureRequest.COLOR_CORRECTION_MODE);
+ }
+
+ /**
+ * Determine if camera device support manual tone mapping control
+ *
+ * @return {@code true} if manual tone mapping control is supported
+ */
+ public boolean isManualToneMapSupported() {
+ return areKeysAvailable(CaptureRequest.TONEMAP_MODE);
+ }
+
+ /**
+ * Determine if camera device support manual color aberration control
+ *
+ * @return {@code true} if manual color aberration control is supported
+ */
+ public boolean isManualColorAberrationControlSupported() {
+ return areKeysAvailable(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE);
+ }
+
+ /**
+ * Determine if camera device support edge mode control
+ *
+ * @return {@code true} if edge mode control is supported
+ */
+ public boolean isEdgeModeControlSupported() {
+ return areKeysAvailable(CaptureRequest.EDGE_MODE);
+ }
+
+ /**
+ * Determine if camera device support hot pixel mode control
+ *
+ * @return {@code true} if hot pixel mode control is supported
+ */
+ public boolean isHotPixelMapModeControlSupported() {
+ return areKeysAvailable(CaptureRequest.HOT_PIXEL_MODE);
+ }
+
+ /**
+ * Determine if camera device support noise reduction mode control
+ *
+ * @return {@code true} if noise reduction mode control is supported
+ */
+ public boolean isNoiseReductionModeControlSupported() {
+ return areKeysAvailable(CaptureRequest.NOISE_REDUCTION_MODE);
+ }
+
+ /**
+ * Get max number of output raw streams and do the basic sanity check.
+ *
+ * @return reported max number of raw output stream
+ */
+ public int getMaxNumOutputStreamsRawChecked() {
+ Integer maxNumStreams =
+ getValueFromKeyNonNull(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW);
+ if (maxNumStreams == null)
+ return 0;
+ return maxNumStreams;
+ }
+
+ /**
+ * Get max number of output processed streams and do the basic sanity check.
+ *
+ * @return reported max number of processed output stream
+ */
+ public int getMaxNumOutputStreamsProcessedChecked() {
+ Integer maxNumStreams =
+ getValueFromKeyNonNull(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC);
+ if (maxNumStreams == null)
+ return 0;
+ return maxNumStreams;
+ }
+
+ /**
+ * Get max number of output stalling processed streams and do the basic sanity check.
+ *
+ * @return reported max number of stalling processed output stream
+ */
+ public int getMaxNumOutputStreamsProcessedStallChecked() {
+ Integer maxNumStreams =
+ getValueFromKeyNonNull(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING);
+ if (maxNumStreams == null)
+ return 0;
+ return maxNumStreams;
+ }
+
+ /**
+ * Get lens facing and do the sanity check
+ * @return lens facing, return default value (BACK) if value is unavailable.
+ */
+ public int getLensFacingChecked() {
+ Key<Integer> key =
+ CameraCharacteristics.LENS_FACING;
+ Integer facing = getValueFromKeyNonNull(key);
+
+ if (facing == null) {
+ return CameraCharacteristics.LENS_FACING_BACK;
+ }
+
+ checkTrueForKey(key, " value is out of range ",
+ facing >= CameraCharacteristics.LENS_FACING_FRONT &&
+ facing <= CameraCharacteristics.LENS_FACING_BACK);
+ return facing;
+ }
+
+ /**
+ * Get maxCaptureStall frames or default value (if value doesn't exist)
+ * @return maxCaptureStall frames or default value.
+ */
+ public int getMaxCaptureStallOrDefault() {
+ Key<Integer> key =
+ CameraCharacteristics.REPROCESS_MAX_CAPTURE_STALL;
+ Integer value = getValueFromKeyNonNull(key);
+
+ if (value == null) {
+ return MAX_REPROCESS_MAX_CAPTURE_STALL;
+ }
+
+ checkTrueForKey(key, " value is out of range ",
+ value >= 0 &&
+ value <= MAX_REPROCESS_MAX_CAPTURE_STALL);
+
+ return value;
+ }
+
+ /**
+ * Get the scaler's cropping type (center only or freeform)
+ * @return cropping type, return default value (CENTER_ONLY) if value is unavailable
+ */
+ public int getScalerCroppingTypeChecked() {
+ Key<Integer> key =
+ CameraCharacteristics.SCALER_CROPPING_TYPE;
+ Integer value = getValueFromKeyNonNull(key);
+
+ if (value == null) {
+ return CameraCharacteristics.SCALER_CROPPING_TYPE_CENTER_ONLY;
+ }
+
+ checkTrueForKey(key, " value is out of range ",
+ value >= CameraCharacteristics.SCALER_CROPPING_TYPE_CENTER_ONLY &&
+ value <= CameraCharacteristics.SCALER_CROPPING_TYPE_FREEFORM);
+
+ return value;
+ }
+
+ /**
+ * Check if the constrained high speed video is supported by the camera device.
+ * The high speed FPS ranges and sizes are sanitized in
+ * ExtendedCameraCharacteristicsTest#testConstrainedHighSpeedCapability.
+ *
+ * @return true if the constrained high speed video is supported, false otherwise.
+ */
+ public boolean isConstrainedHighSpeedVideoSupported() {
+ List<Integer> availableCapabilities = getAvailableCapabilitiesChecked();
+ return (availableCapabilities.contains(
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO));
+ }
+
+ /**
+ * Check if high speed video is supported (HIGH_SPEED_VIDEO scene mode is
+ * supported, supported high speed fps ranges and sizes are valid).
+ *
+ * @return true if high speed video is supported.
+ */
+ public boolean isHighSpeedVideoSupported() {
+ List<Integer> sceneModes =
+ Arrays.asList(CameraTestUtils.toObject(getAvailableSceneModesChecked()));
+ if (sceneModes.contains(CameraCharacteristics.CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO)) {
+ StreamConfigurationMap config =
+ getValueFromKeyNonNull(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ if (config == null) {
+ return false;
+ }
+ Size[] availableSizes = config.getHighSpeedVideoSizes();
+ if (availableSizes.length == 0) {
+ return false;
+ }
+
+ for (Size size : availableSizes) {
+ Range<Integer>[] availableFpsRanges = config.getHighSpeedVideoFpsRangesFor(size);
+ if (availableFpsRanges.length == 0) {
+ return false;
+ }
+ }
+
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ /**
+ * Check if depth output is supported, based on the depth capability
+ */
+ public boolean isDepthOutputSupported() {
+ return isCapabilitySupported(
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
+ }
+
+ /**
+ * Check if standard outputs (PRIVATE, YUV, JPEG) outputs are supported, based on the
+ * backwards-compatible capability
+ */
+ public boolean isColorOutputSupported() {
+ return isCapabilitySupported(
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
+ }
+
+ /**
+ * Check if optical black regions key is supported.
+ */
+ public boolean isOpticalBlackRegionSupported() {
+ return areKeysAvailable(CameraCharacteristics.SENSOR_OPTICAL_BLACK_REGIONS);
+ }
+
+ /**
+ * Check if the dynamic black level is supported.
+ *
+ * <p>
+ * Note that: This also indicates if the white level is supported, as dynamic black and white
+ * level must be all supported or none of them is supported.
+ * </p>
+ */
+ public boolean isDynamicBlackLevelSupported() {
+ return areKeysAvailable(CaptureResult.SENSOR_DYNAMIC_BLACK_LEVEL);
+ }
+
+ /**
+ * Get the value in index for a fixed-size array from a given key.
+ *
+ * <p>If the camera device is incorrectly reporting values, log a warning and return
+ * the default value instead.</p>
+ *
+ * @param key Key to fetch
+ * @param defaultValue Default value to return if camera device uses invalid values
+ * @param name Human-readable name for the array index (logging only)
+ * @param index Array index of the subelement
+ * @param size Expected fixed size of the array
+ *
+ * @return The value reported by the camera device, or the defaultValue otherwise.
+ */
+ private <T> T getArrayElementOrDefault(Key<?> key, T defaultValue, String name, int index,
+ int size) {
+ T elementValue = getArrayElementCheckRangeNonNull(
+ key,
+ index,
+ size);
+
+ if (elementValue == null) {
+ failKeyCheck(key,
+ "had no valid " + name + " value; using default of " + defaultValue);
+ elementValue = defaultValue;
+ }
+
+ return elementValue;
+ }
+
+ /**
+ * Fetch an array sub-element from an array value given by a key.
+ *
+ * <p>
+ * Prints a warning if the sub-element was null.
+ * </p>
+ *
+ * <p>Use for variable-size arrays since this does not check the array size.</p>
+ *
+ * @param key Metadata key to look up
+ * @param element A non-negative index value.
+ * @return The array sub-element, or null if the checking failed.
+ */
+ private <T> T getArrayElementNonNull(Key<?> key, int element) {
+ return getArrayElementCheckRangeNonNull(key, element, IGNORE_SIZE_CHECK);
+ }
+
+ /**
+ * Fetch an array sub-element from an array value given by a key.
+ *
+ * <p>
+ * Prints a warning if the array size does not match the size, or if the sub-element was null.
+ * </p>
+ *
+ * @param key Metadata key to look up
+ * @param element The index in [0,size)
+ * @param size A positive size value or otherwise {@value #IGNORE_SIZE_CHECK}
+ * @return The array sub-element, or null if the checking failed.
+ */
+ private <T> T getArrayElementCheckRangeNonNull(Key<?> key, int element, int size) {
+ Object array = getValueFromKeyNonNull(key);
+
+ if (array == null) {
+ // Warning already printed
+ return null;
+ }
+
+ if (size != IGNORE_SIZE_CHECK) {
+ int actualLength = Array.getLength(array);
+ if (actualLength != size) {
+ failKeyCheck(key,
+ String.format("had the wrong number of elements (%d), expected (%d)",
+ actualLength, size));
+ return null;
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ T val = (T) Array.get(array, element);
+
+ if (val == null) {
+ failKeyCheck(key, "had a null element at index" + element);
+ return null;
+ }
+
+ return val;
+ }
+
+ /**
+ * Gets the key, logging warnings for null values.
+ */
+ public <T> T getValueFromKeyNonNull(Key<T> key) {
+ if (key == null) {
+ throw new IllegalArgumentException("key was null");
+ }
+
+ T value = mCharacteristics.get(key);
+
+ if (value == null) {
+ failKeyCheck(key, "was null");
+ }
+
+ return value;
+ }
+
+ private void checkArrayValuesInRange(Key<int[]> key, int[] array, int min, int max) {
+ for (int value : array) {
+ checkTrueForKey(key, String.format(" value is out of range [%d, %d]", min, max),
+ value <= max && value >= min);
+ }
+ }
+
+ private void checkArrayValuesInRange(Key<byte[]> key, byte[] array, byte min, byte max) {
+ for (byte value : array) {
+ checkTrueForKey(key, String.format(" value is out of range [%d, %d]", min, max),
+ value <= max && value >= min);
+ }
+ }
+
+ /**
+ * Check the uniqueness of the values in a list.
+ *
+ * @param key The key to be checked
+ * @param list The list contains the value of the key
+ */
+ private <U, T> void checkElementDistinct(Key<U> key, List<T> list) {
+ // Each size must be distinct.
+ Set<T> sizeSet = new HashSet<T>(list);
+ checkTrueForKey(key, "Each size must be distinct", sizeSet.size() == list.size());
+ }
+
+ private <T> void checkTrueForKey(Key<T> key, String message, boolean condition) {
+ if (!condition) {
+ failKeyCheck(key, message);
+ }
+ }
+
+ /* Helper function to check if the coupled modes are either all present or all non-present */
+ private <T> boolean containsAllOrNone(Collection<T> observedModes, Collection<T> coupledModes) {
+ if (observedModes.containsAll(coupledModes)) {
+ return true;
+ }
+ for (T mode : coupledModes) {
+ if (observedModes.contains(mode)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private <T> void failKeyCheck(Key<T> key, String message) {
+ // TODO: Consider only warning once per key/message combination if it's too spammy.
+ // TODO: Consider offering other options such as throwing an assertion exception
+ String failureCause = String.format("The static info key '%s' %s", key.getName(), message);
+ switch (mLevel) {
+ case WARN:
+ Log.w(TAG, failureCause);
+ break;
+ case COLLECT:
+ mCollector.addMessage(failureCause);
+ break;
+ case ASSERT:
+ Assert.fail(failureCause);
+ default:
+ throw new UnsupportedOperationException("Unhandled level " + mLevel);
+ }
+ }
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/Camera2CaptureRequestTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/Camera2CaptureRequestTest.java
new file mode 100644
index 0000000..b152975
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/Camera2CaptureRequestTest.java
@@ -0,0 +1,441 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest.stress;
+
+import com.android.mediaframeworktest.Camera2SurfaceViewTestCase;
+import com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback;
+
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.util.Log;
+import android.util.Rational;
+import android.util.Size;
+
+import java.util.Arrays;
+
+import static android.hardware.camera2.CameraCharacteristics.CONTROL_AE_MODE_OFF;
+import static android.hardware.camera2.CameraCharacteristics.CONTROL_AE_MODE_ON;
+import static android.hardware.camera2.CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH;
+import static android.hardware.camera2.CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH;
+import static android.hardware.camera2.CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.getValueNotNull;
+
+/**
+ * <p>
+ * Basic test for camera CaptureRequest key controls.
+ * </p>
+ * <p>
+ * Several test categories are covered: manual sensor control, 3A control,
+ * manual ISP control and other per-frame control and synchronization.
+ * </p>
+ *
+ * adb shell am instrument \
+ * -e class com.android.mediaframeworktest.stress.Camera2CaptureRequestTest#testAeModeAndLock \
+ * -e repeat 10 \
+ * -e waitIntervalMs 1000 \
+ * -e resultToFile false \
+ * -r -w com.android.mediaframeworktest/.Camera2InstrumentationTestRunner
+ */
+public class Camera2CaptureRequestTest extends Camera2SurfaceViewTestCase {
+ private static final String TAG = "CaptureRequestTest";
+ private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
+ /** 30ms exposure time must be supported by full capability devices. */
+ private static final long DEFAULT_EXP_TIME_NS = 30000000L;
+ private static final int DEFAULT_SENSITIVITY = 100;
+ private static final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000L; // 100us, Approximation.
+ private static final float EXPOSURE_TIME_ERROR_MARGIN_RATE = 0.03f; // 3%, Approximation.
+ private static final float SENSITIVITY_ERROR_MARGIN_RATE = 0.03f; // 3%, Approximation.
+ private static final int DEFAULT_NUM_EXPOSURE_TIME_STEPS = 3;
+ private static final int DEFAULT_NUM_SENSITIVITY_STEPS = 16;
+ private static final int DEFAULT_SENSITIVITY_STEP_SIZE = 100;
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ }
+
+ /**
+ * Test AE mode and lock.
+ *
+ * <p>
+ * For AE lock, when it is locked, exposure parameters shouldn't be changed.
+ * For AE modes, each mode should satisfy the per frame controls defined in
+ * API specifications.
+ * </p>
+ */
+ public void testAeModeAndLock() throws Exception {
+ for (int i = 0; i < mCameraIds.length; i++) {
+ try {
+ openDevice(mCameraIds[i]);
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + mCameraIds[i] +
+ " does not support color outputs, skipping");
+ continue;
+ }
+
+ Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
+
+ // Update preview surface with given size for all sub-tests.
+ updatePreviewSurface(maxPreviewSz);
+
+ // Test iteration starts...
+ for (int repeat = 0; repeat < getRepeatCount(); ++repeat) {
+ Log.v(TAG, String.format("AE mode and lock: %d/%d", repeat + 1,
+ getRepeatCount()));
+
+ // Test aeMode and lock
+ int[] aeModes = mStaticInfo.getAeAvailableModesChecked();
+ for (int mode : aeModes) {
+ aeModeAndLockTestByMode(mode);
+ }
+ getResultPrinter().printStatus(getRepeatCount(), repeat + 1, mCameraIds[i]);
+ Thread.sleep(getTestWaitIntervalMs());
+ }
+ } finally {
+ closeDevice();
+ }
+ }
+ }
+
+ /**
+ * Test the all available AE modes and AE lock.
+ * <p>
+ * For manual AE mode, test iterates through different sensitivities and
+ * exposure times, validate the result exposure time correctness. For
+ * CONTROL_AE_MODE_ON_ALWAYS_FLASH mode, the AE lock and flash are tested.
+ * For the rest of the AUTO mode, AE lock is tested.
+ * </p>
+ *
+ * @param mode
+ */
+ private void aeModeAndLockTestByMode(int mode)
+ throws Exception {
+ switch (mode) {
+ case CONTROL_AE_MODE_OFF:
+ if (mStaticInfo.isCapabilitySupported(
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
+ // Test manual exposure control.
+ aeManualControlTest();
+ } else {
+ Log.w(TAG,
+ "aeModeAndLockTestByMode - can't test AE mode OFF without " +
+ "manual sensor control");
+ }
+ break;
+ case CONTROL_AE_MODE_ON:
+ case CONTROL_AE_MODE_ON_AUTO_FLASH:
+ case CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE:
+ case CONTROL_AE_MODE_ON_ALWAYS_FLASH:
+ // Test AE lock for above AUTO modes.
+ aeAutoModeTestLock(mode);
+ break;
+ default:
+ throw new UnsupportedOperationException("Unhandled AE mode " + mode);
+ }
+ }
+
+ /**
+ * Test AE auto modes.
+ * <p>
+ * Use single request rather than repeating request to test AE lock per frame control.
+ * </p>
+ */
+ private void aeAutoModeTestLock(int mode) throws Exception {
+ CaptureRequest.Builder requestBuilder =
+ mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+ if (mStaticInfo.isAeLockSupported()) {
+ requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
+ }
+ requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, mode);
+ configurePreviewOutput(requestBuilder);
+
+ final int MAX_NUM_CAPTURES_DURING_LOCK = 5;
+ for (int i = 1; i <= MAX_NUM_CAPTURES_DURING_LOCK; i++) {
+ autoAeMultipleCapturesThenTestLock(requestBuilder, mode, i);
+ }
+ }
+
+ /**
+ * Issue multiple auto AE captures, then lock AE, validate the AE lock vs.
+ * the first capture result after the AE lock. The right AE lock behavior is:
+ * When it is locked, it locks to the current exposure value, and all subsequent
+ * request with lock ON will have the same exposure value locked.
+ */
+ private void autoAeMultipleCapturesThenTestLock(
+ CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock)
+ throws Exception {
+ if (numCapturesDuringLock < 1) {
+ throw new IllegalArgumentException("numCapturesBeforeLock must be no less than 1");
+ }
+ if (VERBOSE) {
+ Log.v(TAG, "Camera " + mCamera.getId() + ": Testing auto AE mode and lock for mode "
+ + aeMode + " with " + numCapturesDuringLock + " captures before lock");
+ }
+
+ final int NUM_CAPTURES_BEFORE_LOCK = 2;
+ SimpleCaptureCallback listener = new SimpleCaptureCallback();
+
+ CaptureResult[] resultsDuringLock = new CaptureResult[numCapturesDuringLock];
+ boolean canSetAeLock = mStaticInfo.isAeLockSupported();
+
+ // Reset the AE lock to OFF, since we are reusing this builder many times
+ if (canSetAeLock) {
+ requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
+ }
+
+ // Just send several captures with auto AE, lock off.
+ CaptureRequest request = requestBuilder.build();
+ for (int i = 0; i < NUM_CAPTURES_BEFORE_LOCK; i++) {
+ mSession.capture(request, listener, mHandler);
+ }
+ waitForNumResults(listener, NUM_CAPTURES_BEFORE_LOCK);
+
+ if (!canSetAeLock) {
+ // Without AE lock, the remaining tests items won't work
+ return;
+ }
+
+ // Then fire several capture to lock the AE.
+ requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
+
+ int requestCount = captureRequestsSynchronized(
+ requestBuilder.build(), numCapturesDuringLock, listener, mHandler);
+
+ int[] sensitivities = new int[numCapturesDuringLock];
+ long[] expTimes = new long[numCapturesDuringLock];
+ Arrays.fill(sensitivities, -1);
+ Arrays.fill(expTimes, -1L);
+
+ // Get the AE lock on result and validate the exposure values.
+ waitForNumResults(listener, requestCount - numCapturesDuringLock);
+ for (int i = 0; i < resultsDuringLock.length; i++) {
+ resultsDuringLock[i] = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
+ }
+
+ for (int i = 0; i < numCapturesDuringLock; i++) {
+ mCollector.expectKeyValueEquals(
+ resultsDuringLock[i], CaptureResult.CONTROL_AE_LOCK, true);
+ }
+
+ // Can't read manual sensor/exposure settings without manual sensor
+ if (mStaticInfo.isCapabilitySupported(
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) {
+ int sensitivityLocked =
+ getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_SENSITIVITY);
+ long expTimeLocked =
+ getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_EXPOSURE_TIME);
+ for (int i = 1; i < resultsDuringLock.length; i++) {
+ mCollector.expectKeyValueEquals(
+ resultsDuringLock[i], CaptureResult.SENSOR_EXPOSURE_TIME, expTimeLocked);
+ mCollector.expectKeyValueEquals(
+ resultsDuringLock[i], CaptureResult.SENSOR_SENSITIVITY, sensitivityLocked);
+ }
+ }
+ }
+
+ /**
+ * Iterate through exposure times and sensitivities for manual AE control.
+ * <p>
+ * Use single request rather than repeating request to test manual exposure
+ * value change per frame control.
+ * </p>
+ */
+ private void aeManualControlTest()
+ throws Exception {
+ CaptureRequest.Builder requestBuilder =
+ mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+
+ requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF);
+ configurePreviewOutput(requestBuilder);
+ SimpleCaptureCallback listener = new SimpleCaptureCallback();
+
+ long[] expTimes = getExposureTimeTestValues();
+ int[] sensitivities = getSensitivityTestValues();
+ // Submit single request at a time, then verify the result.
+ for (int i = 0; i < expTimes.length; i++) {
+ for (int j = 0; j < sensitivities.length; j++) {
+ if (VERBOSE) {
+ Log.v(TAG, "Camera " + mCamera.getId() + ": Testing sensitivity "
+ + sensitivities[j] + ", exposure time " + expTimes[i] + "ns");
+ }
+
+ changeExposure(requestBuilder, expTimes[i], sensitivities[j]);
+ mSession.capture(requestBuilder.build(), listener, mHandler);
+
+ // make sure timeout is long enough for long exposure time
+ long timeout = WAIT_FOR_RESULT_TIMEOUT_MS + expTimes[i];
+ CaptureResult result = listener.getCaptureResult(timeout);
+ long resultExpTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
+ int resultSensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
+ validateExposureTime(expTimes[i], resultExpTime);
+ validateSensitivity(sensitivities[j], resultSensitivity);
+ validateFrameDurationForCapture(result);
+ }
+ }
+ // TODO: Add another case to test where we can submit all requests, then wait for
+ // results, which will hide the pipeline latency. this is not only faster, but also
+ // test high speed per frame control and synchronization.
+ }
+
+ //----------------------------------------------------------------
+ //---------Below are common functions for all tests.--------------
+ //----------------------------------------------------------------
+
+ /**
+ * Enable exposure manual control and change exposure and sensitivity and
+ * clamp the value into the supported range.
+ */
+ private void changeExposure(CaptureRequest.Builder requestBuilder,
+ long expTime, int sensitivity) {
+ // Check if the max analog sensitivity is available and no larger than max sensitivity.
+ // The max analog sensitivity is not actually used here. This is only an extra sanity check.
+ mStaticInfo.getMaxAnalogSensitivityChecked();
+
+ expTime = mStaticInfo.getExposureClampToRange(expTime);
+ sensitivity = mStaticInfo.getSensitivityClampToRange(sensitivity);
+
+ requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF);
+ requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, expTime);
+ requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
+ }
+
+ /**
+ * Get the exposure time array that contains multiple exposure time steps in
+ * the exposure time range.
+ */
+ private long[] getExposureTimeTestValues() {
+ long[] testValues = new long[DEFAULT_NUM_EXPOSURE_TIME_STEPS + 1];
+ long maxExpTime = mStaticInfo.getExposureMaximumOrDefault(DEFAULT_EXP_TIME_NS);
+ long minExpTime = mStaticInfo.getExposureMinimumOrDefault(DEFAULT_EXP_TIME_NS);
+
+ long range = maxExpTime - minExpTime;
+ double stepSize = range / (double)DEFAULT_NUM_EXPOSURE_TIME_STEPS;
+ for (int i = 0; i < testValues.length; i++) {
+ testValues[i] = maxExpTime - (long)(stepSize * i);
+ testValues[i] = mStaticInfo.getExposureClampToRange(testValues[i]);
+ }
+
+ return testValues;
+ }
+
+ /**
+ * Get the sensitivity array that contains multiple sensitivity steps in the
+ * sensitivity range.
+ * <p>
+ * Sensitivity number of test values is determined by
+ * {@value #DEFAULT_SENSITIVITY_STEP_SIZE} and sensitivity range, and
+ * bounded by {@value #DEFAULT_NUM_SENSITIVITY_STEPS}.
+ * </p>
+ */
+ private int[] getSensitivityTestValues() {
+ int maxSensitivity = mStaticInfo.getSensitivityMaximumOrDefault(
+ DEFAULT_SENSITIVITY);
+ int minSensitivity = mStaticInfo.getSensitivityMinimumOrDefault(
+ DEFAULT_SENSITIVITY);
+
+ int range = maxSensitivity - minSensitivity;
+ int stepSize = DEFAULT_SENSITIVITY_STEP_SIZE;
+ int numSteps = range / stepSize;
+ // Bound the test steps to avoid supper long test.
+ if (numSteps > DEFAULT_NUM_SENSITIVITY_STEPS) {
+ numSteps = DEFAULT_NUM_SENSITIVITY_STEPS;
+ stepSize = range / numSteps;
+ }
+ int[] testValues = new int[numSteps + 1];
+ for (int i = 0; i < testValues.length; i++) {
+ testValues[i] = maxSensitivity - stepSize * i;
+ testValues[i] = mStaticInfo.getSensitivityClampToRange(testValues[i]);
+ }
+
+ return testValues;
+ }
+
+ /**
+ * Validate the AE manual control exposure time.
+ *
+ * <p>Exposure should be close enough, and only round down if they are not equal.</p>
+ *
+ * @param request Request exposure time
+ * @param result Result exposure time
+ */
+ private void validateExposureTime(long request, long result) {
+ long expTimeDelta = request - result;
+ long expTimeErrorMargin = (long)(Math.max(EXPOSURE_TIME_ERROR_MARGIN_NS, request
+ * EXPOSURE_TIME_ERROR_MARGIN_RATE));
+ // First, round down not up, second, need close enough.
+ mCollector.expectTrue("Exposture time is invalid for AE manaul control test, request: "
+ + request + " result: " + result,
+ expTimeDelta < expTimeErrorMargin && expTimeDelta >= 0);
+ }
+
+ /**
+ * Validate AE manual control sensitivity.
+ *
+ * @param request Request sensitivity
+ * @param result Result sensitivity
+ */
+ private void validateSensitivity(int request, int result) {
+ float sensitivityDelta = request - result;
+ float sensitivityErrorMargin = request * SENSITIVITY_ERROR_MARGIN_RATE;
+ // First, round down not up, second, need close enough.
+ mCollector.expectTrue("Sensitivity is invalid for AE manaul control test, request: "
+ + request + " result: " + result,
+ sensitivityDelta < sensitivityErrorMargin && sensitivityDelta >= 0);
+ }
+
+ /**
+ * Validate frame duration for a given capture.
+ *
+ * <p>Frame duration should be longer than exposure time.</p>
+ *
+ * @param result The capture result for a given capture
+ */
+ private void validateFrameDurationForCapture(CaptureResult result) {
+ long expTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
+ long frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION);
+ if (VERBOSE) {
+ Log.v(TAG, "frame duration: " + frameDuration + " Exposure time: " + expTime);
+ }
+
+ mCollector.expectTrue(String.format("Frame duration (%d) should be longer than exposure"
+ + " time (%d) for a given capture", frameDuration, expTime),
+ frameDuration >= expTime);
+
+ validatePipelineDepth(result);
+ }
+
+ /**
+ * Validate the pipeline depth result.
+ *
+ * @param result The capture result to get pipeline depth data
+ */
+ private void validatePipelineDepth(CaptureResult result) {
+ final byte MIN_PIPELINE_DEPTH = 1;
+ byte maxPipelineDepth = mStaticInfo.getPipelineMaxDepthChecked();
+ Byte pipelineDepth = getValueNotNull(result, CaptureResult.REQUEST_PIPELINE_DEPTH);
+ mCollector.expectInRange(String.format("Pipeline depth must be in the range of [%d, %d]",
+ MIN_PIPELINE_DEPTH, maxPipelineDepth), pipelineDepth, MIN_PIPELINE_DEPTH,
+ maxPipelineDepth);
+ }
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/Camera2RecordingTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/Camera2RecordingTest.java
new file mode 100644
index 0000000..e7c91cf
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/Camera2RecordingTest.java
@@ -0,0 +1,1082 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest.stress;
+
+import com.android.ex.camera2.blocking.BlockingSessionCallback;
+import com.android.mediaframeworktest.Camera2SurfaceViewTestCase;
+import com.android.mediaframeworktest.helpers.CameraTestUtils;
+
+import junit.framework.AssertionFailedError;
+
+import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.media.CamcorderProfile;
+import android.media.Image;
+import android.media.ImageReader;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.media.MediaRecorder;
+import android.os.Environment;
+import android.os.SystemClock;
+import android.test.suitebuilder.annotation.LargeTest;
+import android.util.Log;
+import android.util.Range;
+import android.util.Size;
+import android.view.Surface;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+
+import static com.android.ex.camera2.blocking.BlockingSessionCallback.SESSION_CLOSED;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.SESSION_CLOSE_TIMEOUT_MS;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.SIZE_BOUND_1080P;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.SIZE_BOUND_2160P;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleImageReaderListener;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.configureCameraSession;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.getSupportedVideoSizes;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.getValueNotNull;
+
+/**
+ * CameraDevice video recording use case tests by using MediaRecorder and
+ * MediaCodec.
+ *
+ * adb shell am instrument \
+ * -e class com.android.mediaframeworktest.stress.Camera2RecordingTest#testBasicRecording \
+ * -e repeat 10 \
+ * -e waitIntervalMs 1000 \
+ * -e resultToFile false \
+ * -r -w com.android.mediaframeworktest/.Camera2InstrumentationTestRunner
+ */
+@LargeTest
+public class Camera2RecordingTest extends Camera2SurfaceViewTestCase {
+ private static final String TAG = "RecordingTest";
+ private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
+ private static final boolean DEBUG_DUMP = Log.isLoggable(TAG, Log.DEBUG);
+ private static final int RECORDING_DURATION_MS = 3000;
+ private static final float DURATION_MARGIN = 0.2f;
+ private static final double FRAME_DURATION_ERROR_TOLERANCE_MS = 3.0;
+ private static final int BIT_RATE_1080P = 16000000;
+ private static final int BIT_RATE_MIN = 64000;
+ private static final int BIT_RATE_MAX = 40000000;
+ private static final int VIDEO_FRAME_RATE = 30;
+ private final String VIDEO_FILE_PATH = Environment.getExternalStorageDirectory().getPath();
+ private static final int[] mCamcorderProfileList = {
+ CamcorderProfile.QUALITY_HIGH,
+ CamcorderProfile.QUALITY_2160P,
+ CamcorderProfile.QUALITY_1080P,
+ CamcorderProfile.QUALITY_720P,
+ CamcorderProfile.QUALITY_480P,
+ CamcorderProfile.QUALITY_CIF,
+ CamcorderProfile.QUALITY_QCIF,
+ CamcorderProfile.QUALITY_QVGA,
+ CamcorderProfile.QUALITY_LOW,
+ };
+ private static final int MAX_VIDEO_SNAPSHOT_IMAGES = 5;
+ private static final int BURST_VIDEO_SNAPSHOT_NUM = 3;
+ private static final int SLOWMO_SLOW_FACTOR = 4;
+ private static final int MAX_NUM_FRAME_DROP_INTERVAL_ALLOWED = 4;
+ private List<Size> mSupportedVideoSizes;
+ private Surface mRecordingSurface;
+ private Surface mPersistentSurface;
+ private MediaRecorder mMediaRecorder;
+ private String mOutMediaFileName;
+ private int mVideoFrameRate;
+ private Size mVideoSize;
+ private long mRecordingStartTime;
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ }
+
+ private void doBasicRecording(boolean useVideoStab) throws Exception {
+ for (int i = 0; i < mCameraIds.length; i++) {
+ try {
+ Log.i(TAG, "Testing basic recording for camera " + mCameraIds[i]);
+ // Re-use the MediaRecorder object for the same camera device.
+ mMediaRecorder = new MediaRecorder();
+ openDevice(mCameraIds[i]);
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + mCameraIds[i] +
+ " does not support color outputs, skipping");
+ continue;
+ }
+
+ if (!mStaticInfo.isVideoStabilizationSupported() && useVideoStab) {
+ Log.i(TAG, "Camera " + mCameraIds[i] +
+ " does not support video stabilization, skipping the stabilization"
+ + " test");
+ continue;
+ }
+
+ initSupportedVideoSize(mCameraIds[i]);
+
+ // Test iteration starts...
+ for (int repeat = 0; repeat < getRepeatCount(); ++repeat) {
+ Log.v(TAG, String.format("Recording video: %d/%d", repeat + 1,
+ getRepeatCount()));
+ basicRecordingTestByCamera(mCamcorderProfileList, useVideoStab);
+ getResultPrinter().printStatus(getRepeatCount(), repeat + 1, mCameraIds[i]);
+ Thread.sleep(getTestWaitIntervalMs());
+ }
+ } finally {
+ closeDevice();
+ releaseRecorder();
+ }
+ }
+ }
+
+ /**
+ * <p>
+ * Test basic camera recording.
+ * </p>
+ * <p>
+ * This test covers the typical basic use case of camera recording.
+ * MediaRecorder is used to record the audio and video, CamcorderProfile is
+ * used to configure the MediaRecorder. It goes through the pre-defined
+ * CamcorderProfile list, test each profile configuration and validate the
+ * recorded video. Preview is set to the video size.
+ * </p>
+ */
+ public void testBasicRecording() throws Exception {
+ doBasicRecording(/*useVideoStab*/false);
+ }
+
+ /**
+ * <p>
+ * Test video snapshot for each camera.
+ * </p>
+ * <p>
+ * This test covers video snapshot typical use case. The MediaRecorder is used to record the
+ * video for each available video size. The largest still capture size is selected to
+ * capture the JPEG image. The still capture images are validated according to the capture
+ * configuration. The timestamp of capture result before and after video snapshot is also
+ * checked to make sure no frame drop caused by video snapshot.
+ * </p>
+ */
+ public void testVideoSnapshot() throws Exception {
+ videoSnapshotHelper(/*burstTest*/false);
+ }
+
+ public void testConstrainedHighSpeedRecording() throws Exception {
+ constrainedHighSpeedRecording();
+ }
+
+ private void constrainedHighSpeedRecording() throws Exception {
+ for (String id : mCameraIds) {
+ try {
+ Log.i(TAG, "Testing constrained high speed recording for camera " + id);
+ // Re-use the MediaRecorder object for the same camera device.
+ mMediaRecorder = new MediaRecorder();
+ openDevice(id);
+
+ if (!mStaticInfo.isConstrainedHighSpeedVideoSupported()) {
+ Log.i(TAG, "Camera " + id + " doesn't support high speed recording, skipping.");
+ continue;
+ }
+
+ // Test iteration starts...
+ for (int repeat = 0; repeat < getRepeatCount(); ++repeat) {
+ Log.v(TAG, String.format("Constrained high speed recording: %d/%d", repeat + 1,
+ getRepeatCount()));
+
+ StreamConfigurationMap config =
+ mStaticInfo.getValueFromKeyNonNull(
+ CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ Size[] highSpeedVideoSizes = config.getHighSpeedVideoSizes();
+ for (Size size : highSpeedVideoSizes) {
+ List<Range<Integer>> fixedFpsRanges =
+ getHighSpeedFixedFpsRangeForSize(config, size);
+ mCollector.expectTrue("Unable to find the fixed frame rate fps range for " +
+ "size " + size, fixedFpsRanges.size() > 0);
+ // Test recording for each FPS range
+ for (Range<Integer> fpsRange : fixedFpsRanges) {
+ int captureRate = fpsRange.getLower();
+ final int VIDEO_FRAME_RATE = 30;
+ // Skip the test if the highest recording FPS supported by CamcorderProfile
+ if (fpsRange.getUpper() > getFpsFromHighSpeedProfileForSize(size)) {
+ Log.w(TAG, "high speed recording " + size + "@" + captureRate + "fps"
+ + " is not supported by CamcorderProfile");
+ continue;
+ }
+
+ mOutMediaFileName = VIDEO_FILE_PATH + "/test_cslowMo_video_" + captureRate +
+ "fps_" + id + "_" + size.toString() + ".mp4";
+
+ prepareRecording(size, VIDEO_FRAME_RATE, captureRate);
+
+ // prepare preview surface by using video size.
+ updatePreviewSurfaceWithVideo(size, captureRate);
+
+ // Start recording
+ SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
+ startSlowMotionRecording(/*useMediaRecorder*/true, VIDEO_FRAME_RATE,
+ captureRate, fpsRange, resultListener,
+ /*useHighSpeedSession*/true);
+
+ // Record certain duration.
+ SystemClock.sleep(RECORDING_DURATION_MS);
+
+ // Stop recording and preview
+ stopRecording(/*useMediaRecorder*/true);
+ // Convert number of frames camera produced into the duration in unit of ms.
+ int durationMs = (int) (resultListener.getTotalNumFrames() * 1000.0f /
+ VIDEO_FRAME_RATE);
+
+ // Validation.
+ validateRecording(size, durationMs);
+ }
+
+ getResultPrinter().printStatus(getRepeatCount(), repeat + 1, id);
+ Thread.sleep(getTestWaitIntervalMs());
+ }
+ }
+
+ } finally {
+ closeDevice();
+ releaseRecorder();
+ }
+ }
+ }
+
+ /**
+ * Get high speed FPS from CamcorderProfiles for a given size.
+ *
+ * @param size The size used to search the CamcorderProfiles for the FPS.
+ * @return high speed video FPS, 0 if the given size is not supported by the CamcorderProfiles.
+ */
+ private int getFpsFromHighSpeedProfileForSize(Size size) {
+ for (int quality = CamcorderProfile.QUALITY_HIGH_SPEED_480P;
+ quality <= CamcorderProfile.QUALITY_HIGH_SPEED_2160P; quality++) {
+ if (CamcorderProfile.hasProfile(quality)) {
+ CamcorderProfile profile = CamcorderProfile.get(quality);
+ if (size.equals(new Size(profile.videoFrameWidth, profile.videoFrameHeight))){
+ return profile.videoFrameRate;
+ }
+ }
+ }
+
+ return 0;
+ }
+
+ private List<Range<Integer>> getHighSpeedFixedFpsRangeForSize(StreamConfigurationMap config,
+ Size size) {
+ Range<Integer>[] availableFpsRanges = config.getHighSpeedVideoFpsRangesFor(size);
+ List<Range<Integer>> fixedRanges = new ArrayList<Range<Integer>>();
+ for (Range<Integer> range : availableFpsRanges) {
+ if (range.getLower().equals(range.getUpper())) {
+ fixedRanges.add(range);
+ }
+ }
+ return fixedRanges;
+ }
+
+ private void startSlowMotionRecording(boolean useMediaRecorder, int videoFrameRate,
+ int captureRate, Range<Integer> fpsRange,
+ CameraCaptureSession.CaptureCallback listener, boolean useHighSpeedSession) throws Exception {
+ List<Surface> outputSurfaces = new ArrayList<Surface>(2);
+ assertTrue("Both preview and recording surfaces should be valid",
+ mPreviewSurface.isValid() && mRecordingSurface.isValid());
+ outputSurfaces.add(mPreviewSurface);
+ outputSurfaces.add(mRecordingSurface);
+ // Video snapshot surface
+ if (mReaderSurface != null) {
+ outputSurfaces.add(mReaderSurface);
+ }
+ mSessionListener = new BlockingSessionCallback();
+ mSession = configureCameraSession(mCamera, outputSurfaces, useHighSpeedSession,
+ mSessionListener, mHandler);
+
+ // Create slow motion request list
+ List<CaptureRequest> slowMoRequests = null;
+ if (useHighSpeedSession) {
+ CaptureRequest.Builder requestBuilder =
+ mCamera.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
+ requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
+ requestBuilder.addTarget(mPreviewSurface);
+ requestBuilder.addTarget(mRecordingSurface);
+ slowMoRequests = ((CameraConstrainedHighSpeedCaptureSession) mSession).
+ createHighSpeedRequestList(requestBuilder.build());
+ } else {
+ CaptureRequest.Builder recordingRequestBuilder =
+ mCamera.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
+ recordingRequestBuilder.set(CaptureRequest.CONTROL_MODE,
+ CaptureRequest.CONTROL_MODE_USE_SCENE_MODE);
+ recordingRequestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE,
+ CaptureRequest.CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO);
+
+ CaptureRequest.Builder recordingOnlyBuilder =
+ mCamera.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
+ recordingOnlyBuilder.set(CaptureRequest.CONTROL_MODE,
+ CaptureRequest.CONTROL_MODE_USE_SCENE_MODE);
+ recordingOnlyBuilder.set(CaptureRequest.CONTROL_SCENE_MODE,
+ CaptureRequest.CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO);
+ int slowMotionFactor = captureRate / videoFrameRate;
+
+ // Make sure camera output frame rate is set to correct value.
+ recordingRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
+ recordingRequestBuilder.addTarget(mRecordingSurface);
+ recordingRequestBuilder.addTarget(mPreviewSurface);
+ recordingOnlyBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
+ recordingOnlyBuilder.addTarget(mRecordingSurface);
+
+ slowMoRequests = new ArrayList<CaptureRequest>();
+ slowMoRequests.add(recordingRequestBuilder.build());// Preview + recording.
+
+ for (int i = 0; i < slowMotionFactor - 1; i++) {
+ slowMoRequests.add(recordingOnlyBuilder.build()); // Recording only.
+ }
+ }
+
+ mSession.setRepeatingBurst(slowMoRequests, listener, mHandler);
+
+ if (useMediaRecorder) {
+ mMediaRecorder.start();
+ } else {
+ // TODO: need implement MediaCodec path.
+ }
+
+ }
+
+ /**
+ * Test camera recording by using each available CamcorderProfile for a
+ * given camera. preview size is set to the video size.
+ */
+ private void basicRecordingTestByCamera(int[] camcorderProfileList, boolean useVideoStab)
+ throws Exception {
+ Size maxPreviewSize = mOrderedPreviewSizes.get(0);
+ List<Range<Integer> > fpsRanges = Arrays.asList(
+ mStaticInfo.getAeAvailableTargetFpsRangesChecked());
+ int cameraId = Integer.valueOf(mCamera.getId());
+ int maxVideoFrameRate = -1;
+ for (int profileId : camcorderProfileList) {
+ if (!CamcorderProfile.hasProfile(cameraId, profileId) ||
+ allowedUnsupported(cameraId, profileId)) {
+ continue;
+ }
+
+ CamcorderProfile profile = CamcorderProfile.get(cameraId, profileId);
+ Size videoSz = new Size(profile.videoFrameWidth, profile.videoFrameHeight);
+ Range<Integer> fpsRange = new Range(profile.videoFrameRate, profile.videoFrameRate);
+ if (maxVideoFrameRate < profile.videoFrameRate) {
+ maxVideoFrameRate = profile.videoFrameRate;
+ }
+ if (mStaticInfo.isHardwareLevelLegacy() &&
+ (videoSz.getWidth() > maxPreviewSize.getWidth() ||
+ videoSz.getHeight() > maxPreviewSize.getHeight())) {
+ // Skip. Legacy mode can only do recording up to max preview size
+ continue;
+ }
+ assertTrue("Video size " + videoSz.toString() + " for profile ID " + profileId +
+ " must be one of the camera device supported video size!",
+ mSupportedVideoSizes.contains(videoSz));
+ assertTrue("Frame rate range " + fpsRange + " (for profile ID " + profileId +
+ ") must be one of the camera device available FPS range!",
+ fpsRanges.contains(fpsRange));
+
+ if (VERBOSE) {
+ Log.v(TAG, "Testing camera recording with video size " + videoSz.toString());
+ }
+
+ // Configure preview and recording surfaces.
+ mOutMediaFileName = VIDEO_FILE_PATH + "/test_video.mp4";
+ if (DEBUG_DUMP) {
+ mOutMediaFileName = VIDEO_FILE_PATH + "/test_video_" + cameraId + "_"
+ + videoSz.toString() + ".mp4";
+ }
+
+ prepareRecordingWithProfile(profile);
+
+ // prepare preview surface by using video size.
+ updatePreviewSurfaceWithVideo(videoSz, profile.videoFrameRate);
+
+ // Start recording
+ SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
+ startRecording(/* useMediaRecorder */true, resultListener, useVideoStab);
+
+ // Record certain duration.
+ SystemClock.sleep(RECORDING_DURATION_MS);
+
+ // Stop recording and preview
+ stopRecording(/* useMediaRecorder */true);
+ // Convert number of frames camera produced into the duration in unit of ms.
+ int durationMs = (int) (resultListener.getTotalNumFrames() * 1000.0f /
+ profile.videoFrameRate);
+
+ if (VERBOSE) {
+ Log.v(TAG, "video frame rate: " + profile.videoFrameRate +
+ ", num of frames produced: " + resultListener.getTotalNumFrames());
+ }
+
+ // Validation.
+ validateRecording(videoSz, durationMs);
+ }
+ if (maxVideoFrameRate != -1) {
+ // At least one CamcorderProfile is present, check FPS
+ assertTrue("At least one CamcorderProfile must support >= 24 FPS",
+ maxVideoFrameRate >= 24);
+ }
+ }
+
+ /**
+ * Initialize the supported video sizes.
+ */
+ private void initSupportedVideoSize(String cameraId) throws Exception {
+ Size maxVideoSize = SIZE_BOUND_1080P;
+ if (CamcorderProfile.hasProfile(CamcorderProfile.QUALITY_2160P)) {
+ maxVideoSize = SIZE_BOUND_2160P;
+ }
+ mSupportedVideoSizes =
+ getSupportedVideoSizes(cameraId, mCameraManager, maxVideoSize);
+ }
+
+ /**
+ * Simple wrapper to wrap normal/burst video snapshot tests
+ */
+ private void videoSnapshotHelper(boolean burstTest) throws Exception {
+ for (String id : mCameraIds) {
+ try {
+ Log.i(TAG, "Testing video snapshot for camera " + id);
+ // Re-use the MediaRecorder object for the same camera device.
+ mMediaRecorder = new MediaRecorder();
+
+ openDevice(id);
+
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id +
+ " does not support color outputs, skipping");
+ continue;
+ }
+
+ initSupportedVideoSize(id);
+
+ // Test iteration starts...
+ for (int repeat = 0; repeat < getRepeatCount(); ++repeat) {
+ Log.v(TAG, String.format("Video snapshot: %d/%d", repeat + 1,
+ getRepeatCount()));
+ videoSnapshotTestByCamera(burstTest);
+ getResultPrinter().printStatus(getRepeatCount(), repeat + 1, id);
+ Thread.sleep(getTestWaitIntervalMs());
+ }
+ } finally {
+ closeDevice();
+ releaseRecorder();
+ }
+ }
+ }
+
+ /**
+ * Returns {@code true} if the {@link CamcorderProfile} ID is allowed to be unsupported.
+ *
+ * <p>This only allows unsupported profiles when using the LEGACY mode of the Camera API.</p>
+ *
+ * @param profileId a {@link CamcorderProfile} ID to check.
+ * @return {@code true} if supported.
+ */
+ private boolean allowedUnsupported(int cameraId, int profileId) {
+ if (!mStaticInfo.isHardwareLevelLegacy()) {
+ return false;
+ }
+
+ switch(profileId) {
+ case CamcorderProfile.QUALITY_2160P:
+ case CamcorderProfile.QUALITY_1080P:
+ case CamcorderProfile.QUALITY_HIGH:
+ return !CamcorderProfile.hasProfile(cameraId, profileId) ||
+ CamcorderProfile.get(cameraId, profileId).videoFrameWidth >= 1080;
+ }
+ return false;
+ }
+
+ /**
+ * Test video snapshot for each available CamcorderProfile for a given camera.
+ *
+ * <p>
+ * Preview size is set to the video size. For the burst test, frame drop and jittering
+ * is not checked.
+ * </p>
+ *
+ * @param burstTest Perform burst capture or single capture. For burst capture
+ * {@value #BURST_VIDEO_SNAPSHOT_NUM} capture requests will be sent.
+ */
+ private void videoSnapshotTestByCamera(boolean burstTest)
+ throws Exception {
+ final int NUM_SINGLE_SHOT_TEST = 5;
+ final int FRAMEDROP_TOLERANCE = 8;
+ final int FRAME_SIZE_15M = 15000000;
+ final float FRAME_DROP_TOLERENCE_FACTOR = 1.5f;
+ int kFrameDrop_Tolerence = FRAMEDROP_TOLERANCE;
+
+ for (int profileId : mCamcorderProfileList) {
+ int cameraId = Integer.valueOf(mCamera.getId());
+ if (!CamcorderProfile.hasProfile(cameraId, profileId) ||
+ allowedUnsupported(cameraId, profileId)) {
+ continue;
+ }
+
+ CamcorderProfile profile = CamcorderProfile.get(cameraId, profileId);
+ Size videoSz = new Size(profile.videoFrameWidth, profile.videoFrameHeight);
+ Size maxPreviewSize = mOrderedPreviewSizes.get(0);
+
+ if (mStaticInfo.isHardwareLevelLegacy() &&
+ (videoSz.getWidth() > maxPreviewSize.getWidth() ||
+ videoSz.getHeight() > maxPreviewSize.getHeight())) {
+ // Skip. Legacy mode can only do recording up to max preview size
+ continue;
+ }
+
+ if (!mSupportedVideoSizes.contains(videoSz)) {
+ mCollector.addMessage("Video size " + videoSz.toString() + " for profile ID " +
+ profileId + " must be one of the camera device supported video size!");
+ continue;
+ }
+
+ // For LEGACY, find closest supported smaller or equal JPEG size to the current video
+ // size; if no size is smaller than the video, pick the smallest JPEG size. The assert
+ // for video size above guarantees that for LIMITED or FULL, we select videoSz here.
+ // Also check for minFrameDuration here to make sure jpeg stream won't slow down
+ // video capture
+ Size videoSnapshotSz = mOrderedStillSizes.get(mOrderedStillSizes.size() - 1);
+ // Allow a bit tolerance so we don't fail for a few nano seconds of difference
+ final float FRAME_DURATION_TOLERANCE = 0.01f;
+ long videoFrameDuration = (long) (1e9 / profile.videoFrameRate *
+ (1.0 + FRAME_DURATION_TOLERANCE));
+ HashMap<Size, Long> minFrameDurationMap = mStaticInfo.
+ getAvailableMinFrameDurationsForFormatChecked(ImageFormat.JPEG);
+ for (int i = mOrderedStillSizes.size() - 2; i >= 0; i--) {
+ Size candidateSize = mOrderedStillSizes.get(i);
+ if (mStaticInfo.isHardwareLevelLegacy()) {
+ // Legacy level doesn't report min frame duration
+ if (candidateSize.getWidth() <= videoSz.getWidth() &&
+ candidateSize.getHeight() <= videoSz.getHeight()) {
+ videoSnapshotSz = candidateSize;
+ }
+ } else {
+ Long jpegFrameDuration = minFrameDurationMap.get(candidateSize);
+ assertTrue("Cannot find minimum frame duration for jpeg size " + candidateSize,
+ jpegFrameDuration != null);
+ if (candidateSize.getWidth() <= videoSz.getWidth() &&
+ candidateSize.getHeight() <= videoSz.getHeight() &&
+ jpegFrameDuration <= videoFrameDuration) {
+ videoSnapshotSz = candidateSize;
+ }
+ }
+ }
+
+ /**
+ * Only test full res snapshot when below conditions are all true.
+ * 1. Camera is a FULL device
+ * 2. video size is up to max preview size, which will be bounded by 1080p.
+ * 3. Full resolution jpeg stream can keep up to video stream speed.
+ * When full res jpeg stream cannot keep up to video stream speed, search
+ * the largest jpeg size that can susptain video speed instead.
+ */
+ if (mStaticInfo.isHardwareLevelFull() &&
+ videoSz.getWidth() <= maxPreviewSize.getWidth() &&
+ videoSz.getHeight() <= maxPreviewSize.getHeight()) {
+ for (Size jpegSize : mOrderedStillSizes) {
+ Long jpegFrameDuration = minFrameDurationMap.get(jpegSize);
+ assertTrue("Cannot find minimum frame duration for jpeg size " + jpegSize,
+ jpegFrameDuration != null);
+ if (jpegFrameDuration <= videoFrameDuration) {
+ videoSnapshotSz = jpegSize;
+ break;
+ }
+ if (jpegSize.equals(videoSz)) {
+ throw new AssertionFailedError(
+ "Cannot find adequate video snapshot size for video size" +
+ videoSz);
+ }
+ }
+ }
+
+ Log.i(TAG, "Testing video snapshot size " + videoSnapshotSz +
+ " for video size " + videoSz);
+ if (videoSnapshotSz.getWidth() * videoSnapshotSz.getHeight() > FRAME_SIZE_15M)
+ kFrameDrop_Tolerence = (int)(FRAMEDROP_TOLERANCE * FRAME_DROP_TOLERENCE_FACTOR);
+
+ createImageReader(
+ videoSnapshotSz, ImageFormat.JPEG,
+ MAX_VIDEO_SNAPSHOT_IMAGES, /*listener*/null);
+
+ if (VERBOSE) {
+ Log.v(TAG, "Testing camera recording with video size " + videoSz.toString());
+ }
+
+ // Configure preview and recording surfaces.
+ mOutMediaFileName = VIDEO_FILE_PATH + "/test_video.mp4";
+ if (DEBUG_DUMP) {
+ mOutMediaFileName = VIDEO_FILE_PATH + "/test_video_" + cameraId + "_"
+ + videoSz.toString() + ".mp4";
+ }
+
+ int numTestIterations = burstTest ? 1 : NUM_SINGLE_SHOT_TEST;
+ int totalDroppedFrames = 0;
+
+ for (int numTested = 0; numTested < numTestIterations; numTested++) {
+ prepareRecordingWithProfile(profile);
+
+ // prepare video snapshot
+ SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
+ SimpleImageReaderListener imageListener = new SimpleImageReaderListener();
+ CaptureRequest.Builder videoSnapshotRequestBuilder =
+ mCamera.createCaptureRequest((mStaticInfo.isHardwareLevelLegacy()) ?
+ CameraDevice.TEMPLATE_RECORD :
+ CameraDevice.TEMPLATE_VIDEO_SNAPSHOT);
+
+ // prepare preview surface by using video size.
+ updatePreviewSurfaceWithVideo(videoSz, profile.videoFrameRate);
+
+ prepareVideoSnapshot(videoSnapshotRequestBuilder, imageListener);
+ CaptureRequest request = videoSnapshotRequestBuilder.build();
+
+ // Start recording
+ startRecording(/* useMediaRecorder */true, resultListener, /*useVideoStab*/false);
+ long startTime = SystemClock.elapsedRealtime();
+
+ // Record certain duration.
+ SystemClock.sleep(RECORDING_DURATION_MS / 2);
+
+ // take video snapshot
+ if (burstTest) {
+ List<CaptureRequest> requests =
+ new ArrayList<CaptureRequest>(BURST_VIDEO_SNAPSHOT_NUM);
+ for (int i = 0; i < BURST_VIDEO_SNAPSHOT_NUM; i++) {
+ requests.add(request);
+ }
+ mSession.captureBurst(requests, resultListener, mHandler);
+ } else {
+ mSession.capture(request, resultListener, mHandler);
+ }
+
+ // make sure recording is still going after video snapshot
+ SystemClock.sleep(RECORDING_DURATION_MS / 2);
+
+ // Stop recording and preview
+ int durationMs = stopRecording(/* useMediaRecorder */true);
+ // For non-burst test, use number of frames to also double check video frame rate.
+ // Burst video snapshot is allowed to cause frame rate drop, so do not use number
+ // of frames to estimate duration
+ if (!burstTest) {
+ durationMs = (int) (resultListener.getTotalNumFrames() * 1000.0f /
+ profile.videoFrameRate);
+ }
+
+ // Validation recorded video
+ validateRecording(videoSz, durationMs);
+
+ if (burstTest) {
+ for (int i = 0; i < BURST_VIDEO_SNAPSHOT_NUM; i++) {
+ Image image = imageListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
+ validateVideoSnapshotCapture(image, videoSnapshotSz);
+ image.close();
+ }
+ } else {
+ // validate video snapshot image
+ Image image = imageListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
+ validateVideoSnapshotCapture(image, videoSnapshotSz);
+
+ // validate if there is framedrop around video snapshot
+ totalDroppedFrames += validateFrameDropAroundVideoSnapshot(
+ resultListener, image.getTimestamp());
+
+ //TODO: validate jittering. Should move to PTS
+ //validateJittering(resultListener);
+
+ image.close();
+ }
+ }
+
+ if (!burstTest) {
+ Log.w(TAG, String.format("Camera %d Video size %s: Number of dropped frames " +
+ "detected in %d trials is %d frames.", cameraId, videoSz.toString(),
+ numTestIterations, totalDroppedFrames));
+ mCollector.expectLessOrEqual(
+ String.format(
+ "Camera %d Video size %s: Number of dropped frames %d must not"
+ + " be larger than %d",
+ cameraId, videoSz.toString(), totalDroppedFrames,
+ kFrameDrop_Tolerence),
+ kFrameDrop_Tolerence, totalDroppedFrames);
+ }
+ closeImageReader();
+ }
+ }
+
+ /**
+ * Configure video snapshot request according to the still capture size
+ */
+ private void prepareVideoSnapshot(
+ CaptureRequest.Builder requestBuilder,
+ ImageReader.OnImageAvailableListener imageListener)
+ throws Exception {
+ mReader.setOnImageAvailableListener(imageListener, mHandler);
+ assertNotNull("Recording surface must be non-null!", mRecordingSurface);
+ requestBuilder.addTarget(mRecordingSurface);
+ assertNotNull("Preview surface must be non-null!", mPreviewSurface);
+ requestBuilder.addTarget(mPreviewSurface);
+ assertNotNull("Reader surface must be non-null!", mReaderSurface);
+ requestBuilder.addTarget(mReaderSurface);
+ }
+
+ /**
+ * Update preview size with video size.
+ *
+ * <p>Preview size will be capped with max preview size.</p>
+ *
+ * @param videoSize The video size used for preview.
+ * @param videoFrameRate The video frame rate
+ *
+ */
+ private void updatePreviewSurfaceWithVideo(Size videoSize, int videoFrameRate) {
+ if (mOrderedPreviewSizes == null) {
+ throw new IllegalStateException("supported preview size list is not initialized yet");
+ }
+ final float FRAME_DURATION_TOLERANCE = 0.01f;
+ long videoFrameDuration = (long) (1e9 / videoFrameRate *
+ (1.0 + FRAME_DURATION_TOLERANCE));
+ HashMap<Size, Long> minFrameDurationMap = mStaticInfo.
+ getAvailableMinFrameDurationsForFormatChecked(ImageFormat.PRIVATE);
+ Size maxPreviewSize = mOrderedPreviewSizes.get(0);
+ Size previewSize = null;
+ if (videoSize.getWidth() > maxPreviewSize.getWidth() ||
+ videoSize.getHeight() > maxPreviewSize.getHeight()) {
+ for (Size s : mOrderedPreviewSizes) {
+ Long frameDuration = minFrameDurationMap.get(s);
+ if (mStaticInfo.isHardwareLevelLegacy()) {
+ // Legacy doesn't report min frame duration
+ frameDuration = new Long(0);
+ }
+ assertTrue("Cannot find minimum frame duration for private size" + s,
+ frameDuration != null);
+ if (frameDuration <= videoFrameDuration &&
+ s.getWidth() <= videoSize.getWidth() &&
+ s.getHeight() <= videoSize.getHeight()) {
+ Log.w(TAG, "Overwrite preview size from " + videoSize.toString() +
+ " to " + s.toString());
+ previewSize = s;
+ break;
+ // If all preview size doesn't work then we fallback to video size
+ }
+ }
+ }
+ if (previewSize == null) {
+ previewSize = videoSize;
+ }
+ updatePreviewSurface(previewSize);
+ }
+
+ /**
+ * Configure MediaRecorder recording session with CamcorderProfile, prepare
+ * the recording surface.
+ */
+ private void prepareRecordingWithProfile(CamcorderProfile profile)
+ throws Exception {
+ // Prepare MediaRecorder.
+ mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
+ mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
+ mMediaRecorder.setProfile(profile);
+ mMediaRecorder.setOutputFile(mOutMediaFileName);
+ if (mPersistentSurface != null) {
+ mMediaRecorder.setInputSurface(mPersistentSurface);
+ mRecordingSurface = mPersistentSurface;
+ }
+ mMediaRecorder.prepare();
+ if (mPersistentSurface == null) {
+ mRecordingSurface = mMediaRecorder.getSurface();
+ }
+ assertNotNull("Recording surface must be non-null!", mRecordingSurface);
+ mVideoFrameRate = profile.videoFrameRate;
+ mVideoSize = new Size(profile.videoFrameWidth, profile.videoFrameHeight);
+ }
+
+ /**
+ * Configure MediaRecorder recording session with CamcorderProfile, prepare
+ * the recording surface. Use AVC for video compression, AAC for audio compression.
+ * Both are required for android devices by android CDD.
+ */
+ private void prepareRecording(Size sz, int videoFrameRate, int captureRate)
+ throws Exception {
+ // Prepare MediaRecorder.
+ mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
+ mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
+ mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
+ mMediaRecorder.setOutputFile(mOutMediaFileName);
+ mMediaRecorder.setVideoEncodingBitRate(getVideoBitRate(sz));
+ mMediaRecorder.setVideoFrameRate(videoFrameRate);
+ mMediaRecorder.setCaptureRate(captureRate);
+ mMediaRecorder.setVideoSize(sz.getWidth(), sz.getHeight());
+ mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
+ mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
+ if (mPersistentSurface != null) {
+ mMediaRecorder.setInputSurface(mPersistentSurface);
+ mRecordingSurface = mPersistentSurface;
+ }
+ mMediaRecorder.prepare();
+ if (mPersistentSurface == null) {
+ mRecordingSurface = mMediaRecorder.getSurface();
+ }
+ assertNotNull("Recording surface must be non-null!", mRecordingSurface);
+ mVideoFrameRate = videoFrameRate;
+ mVideoSize = sz;
+ }
+
+ private void startRecording(boolean useMediaRecorder,
+ CameraCaptureSession.CaptureCallback listener, boolean useVideoStab) throws Exception {
+ if (!mStaticInfo.isVideoStabilizationSupported() && useVideoStab) {
+ throw new IllegalArgumentException("Video stabilization is not supported");
+ }
+
+ List<Surface> outputSurfaces = new ArrayList<Surface>(2);
+ assertTrue("Both preview and recording surfaces should be valid",
+ mPreviewSurface.isValid() && mRecordingSurface.isValid());
+ outputSurfaces.add(mPreviewSurface);
+ outputSurfaces.add(mRecordingSurface);
+ // Video snapshot surface
+ if (mReaderSurface != null) {
+ outputSurfaces.add(mReaderSurface);
+ }
+ mSessionListener = new BlockingSessionCallback();
+ mSession = configureCameraSession(mCamera, outputSurfaces, mSessionListener, mHandler);
+
+ CaptureRequest.Builder recordingRequestBuilder =
+ mCamera.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
+ // Make sure camera output frame rate is set to correct value.
+ Range<Integer> fpsRange = Range.create(mVideoFrameRate, mVideoFrameRate);
+ recordingRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
+ if (useVideoStab) {
+ recordingRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
+ CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON);
+ }
+ recordingRequestBuilder.addTarget(mRecordingSurface);
+ recordingRequestBuilder.addTarget(mPreviewSurface);
+ mSession.setRepeatingRequest(recordingRequestBuilder.build(), listener, mHandler);
+
+ if (useMediaRecorder) {
+ mMediaRecorder.start();
+ } else {
+ // TODO: need implement MediaCodec path.
+ }
+ mRecordingStartTime = SystemClock.elapsedRealtime();
+ }
+
+ private void stopCameraStreaming() throws Exception {
+ if (VERBOSE) {
+ Log.v(TAG, "Stopping camera streaming and waiting for idle");
+ }
+ // Stop repeating, wait for captures to complete, and disconnect from
+ // surfaces
+ mSession.close();
+ mSessionListener.getStateWaiter().waitForState(SESSION_CLOSED, SESSION_CLOSE_TIMEOUT_MS);
+ }
+
+ // Stop recording and return the estimated video duration in milliseconds.
+ private int stopRecording(boolean useMediaRecorder) throws Exception {
+ long stopRecordingTime = SystemClock.elapsedRealtime();
+ if (useMediaRecorder) {
+ stopCameraStreaming();
+
+ mMediaRecorder.stop();
+ // Can reuse the MediaRecorder object after reset.
+ mMediaRecorder.reset();
+ } else {
+ // TODO: need implement MediaCodec path.
+ }
+ if (mPersistentSurface == null && mRecordingSurface != null) {
+ mRecordingSurface.release();
+ mRecordingSurface = null;
+ }
+ return (int) (stopRecordingTime - mRecordingStartTime);
+ }
+
+ private void releaseRecorder() {
+ if (mMediaRecorder != null) {
+ mMediaRecorder.release();
+ mMediaRecorder = null;
+ }
+ }
+
+ private void validateRecording(Size sz, int expectedDurationMs) throws Exception {
+ File outFile = new File(mOutMediaFileName);
+ assertTrue("No video is recorded", outFile.exists());
+
+ MediaExtractor extractor = new MediaExtractor();
+ try {
+ extractor.setDataSource(mOutMediaFileName);
+ long durationUs = 0;
+ int width = -1, height = -1;
+ int numTracks = extractor.getTrackCount();
+ final String VIDEO_MIME_TYPE = "video";
+ for (int i = 0; i < numTracks; i++) {
+ MediaFormat format = extractor.getTrackFormat(i);
+ String mime = format.getString(MediaFormat.KEY_MIME);
+ if (mime.contains(VIDEO_MIME_TYPE)) {
+ Log.i(TAG, "video format is: " + format.toString());
+ durationUs = format.getLong(MediaFormat.KEY_DURATION);
+ width = format.getInteger(MediaFormat.KEY_WIDTH);
+ height = format.getInteger(MediaFormat.KEY_HEIGHT);
+ break;
+ }
+ }
+ Size videoSz = new Size(width, height);
+ assertTrue("Video size doesn't match, expected " + sz.toString() +
+ " got " + videoSz.toString(), videoSz.equals(sz));
+ int duration = (int) (durationUs / 1000);
+ if (VERBOSE) {
+ Log.v(TAG, String.format("Video duration: recorded %dms, expected %dms",
+ duration, expectedDurationMs));
+ }
+
+ // TODO: Don't skip this for video snapshot
+ if (!mStaticInfo.isHardwareLevelLegacy()) {
+ assertTrue(String.format(
+ "Camera %s: Video duration doesn't match: recorded %dms, expected %dms.",
+ mCamera.getId(), duration, expectedDurationMs),
+ Math.abs(duration - expectedDurationMs) <
+ DURATION_MARGIN * expectedDurationMs);
+ }
+ } finally {
+ extractor.release();
+ if (!DEBUG_DUMP) {
+ outFile.delete();
+ }
+ }
+ }
+
+ /**
+ * Validate video snapshot capture image object sanity and test.
+ *
+ * <p> Check for size, format and jpeg decoding</p>
+ *
+ * @param image The JPEG image to be verified.
+ * @param size The JPEG capture size to be verified against.
+ */
+ private void validateVideoSnapshotCapture(Image image, Size size) {
+ CameraTestUtils.validateImage(image, size.getWidth(), size.getHeight(),
+ ImageFormat.JPEG, /*filePath*/null);
+ }
+
+ /**
+ * Validate if video snapshot causes frame drop.
+ * Here frame drop is defined as frame duration >= 2 * expected frame duration.
+ * Return the estimated number of frames dropped during video snapshot
+ */
+ private int validateFrameDropAroundVideoSnapshot(
+ SimpleCaptureCallback resultListener, long imageTimeStamp) {
+ double expectedDurationMs = 1000.0 / mVideoFrameRate;
+ CaptureResult prevResult = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
+ long prevTS = getValueNotNull(prevResult, CaptureResult.SENSOR_TIMESTAMP);
+ while (!resultListener.hasMoreResults()) {
+ CaptureResult currentResult =
+ resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
+ long currentTS = getValueNotNull(currentResult, CaptureResult.SENSOR_TIMESTAMP);
+ if (currentTS == imageTimeStamp) {
+ // validate the timestamp before and after, then return
+ CaptureResult nextResult =
+ resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
+ long nextTS = getValueNotNull(nextResult, CaptureResult.SENSOR_TIMESTAMP);
+ double durationMs = (currentTS - prevTS) / 1000000.0;
+ int totalFramesDropped = 0;
+
+ // Snapshots in legacy mode pause the preview briefly. Skip the duration
+ // requirements for legacy mode unless this is fixed.
+ if (!mStaticInfo.isHardwareLevelLegacy()) {
+ mCollector.expectTrue(
+ String.format(
+ "Video %dx%d Frame drop detected before video snapshot: " +
+ "duration %.2fms (expected %.2fms)",
+ mVideoSize.getWidth(), mVideoSize.getHeight(),
+ durationMs, expectedDurationMs
+ ),
+ durationMs <= (expectedDurationMs * MAX_NUM_FRAME_DROP_INTERVAL_ALLOWED)
+ );
+ // Log a warning is there is any frame drop detected.
+ if (durationMs >= expectedDurationMs * 2) {
+ Log.w(TAG, String.format(
+ "Video %dx%d Frame drop detected before video snapshot: " +
+ "duration %.2fms (expected %.2fms)",
+ mVideoSize.getWidth(), mVideoSize.getHeight(),
+ durationMs, expectedDurationMs
+ ));
+ }
+
+ durationMs = (nextTS - currentTS) / 1000000.0;
+ mCollector.expectTrue(
+ String.format(
+ "Video %dx%d Frame drop detected after video snapshot: " +
+ "duration %.2fms (expected %.2fms)",
+ mVideoSize.getWidth(), mVideoSize.getHeight(),
+ durationMs, expectedDurationMs
+ ),
+ durationMs <= (expectedDurationMs * MAX_NUM_FRAME_DROP_INTERVAL_ALLOWED)
+ );
+ // Log a warning is there is any frame drop detected.
+ if (durationMs >= expectedDurationMs * 2) {
+ Log.w(TAG, String.format(
+ "Video %dx%d Frame drop detected after video snapshot: " +
+ "duration %fms (expected %fms)",
+ mVideoSize.getWidth(), mVideoSize.getHeight(),
+ durationMs, expectedDurationMs
+ ));
+ }
+
+ double totalDurationMs = (nextTS - prevTS) / 1000000.0;
+ // Minus 2 for the expected 2 frames interval
+ totalFramesDropped = (int) (totalDurationMs / expectedDurationMs) - 2;
+ if (totalFramesDropped < 0) {
+ Log.w(TAG, "totalFrameDropped is " + totalFramesDropped +
+ ". Video frame rate might be too fast.");
+ }
+ totalFramesDropped = Math.max(0, totalFramesDropped);
+ }
+ return totalFramesDropped;
+ }
+ prevTS = currentTS;
+ }
+ throw new AssertionFailedError(
+ "Video snapshot timestamp does not match any of capture results!");
+ }
+
+ /**
+ * Calculate a video bit rate based on the size. The bit rate is scaled
+ * based on ratio of video size to 1080p size.
+ */
+ private int getVideoBitRate(Size sz) {
+ int rate = BIT_RATE_1080P;
+ float scaleFactor = sz.getHeight() * sz.getWidth() / (float)(1920 * 1080);
+ rate = (int)(rate * scaleFactor);
+
+ // Clamp to the MIN, MAX range.
+ return Math.max(BIT_RATE_MIN, Math.min(BIT_RATE_MAX, rate));
+ }
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/Camera2ReprocessCaptureTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/Camera2ReprocessCaptureTest.java
new file mode 100644
index 0000000..2dac371
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/Camera2ReprocessCaptureTest.java
@@ -0,0 +1,1162 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest.stress;
+
+import com.android.ex.camera2.blocking.BlockingSessionCallback;
+import com.android.mediaframeworktest.Camera2SurfaceViewTestCase;
+import com.android.mediaframeworktest.helpers.CameraTestUtils;
+import com.android.mediaframeworktest.helpers.StaticMetadata;
+
+import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CaptureFailure;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.params.InputConfiguration;
+import android.media.Image;
+import android.media.ImageReader;
+import android.media.ImageWriter;
+import android.util.Log;
+import android.util.Size;
+import android.view.Surface;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.EXIF_TEST_DATA;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.SESSION_CLOSE_TIMEOUT_MS;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleImageReaderListener;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleImageWriterListener;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.configureReprocessableCameraSession;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.dumpFile;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.getAscendingOrderSizes;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.getDataFromImage;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.makeImageReader;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.setJpegKeys;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.verifyJpegKeys;
+
+/**
+ * <p>Tests for Reprocess API.</p>
+ *
+ * adb shell am instrument \
+ * -e class \
+ * com.android.mediaframeworktest.stress.Camera2StillCaptureTest#Camera2ReprocessCaptureTest \
+ * -e repeat 1 \
+ * -e waitIntervalMs 1000 \
+ * -e resultToFile false \
+ * -r -w com.android.mediaframeworktest/.Camera2InstrumentationTestRunner
+ */
+public class Camera2ReprocessCaptureTest extends Camera2SurfaceViewTestCase {
+ private static final String TAG = "ReprocessCaptureTest";
+ private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
+ private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
+ private static final int CAPTURE_TIMEOUT_FRAMES = 100;
+ private static final int CAPTURE_TIMEOUT_MS = 3000;
+ private static final int WAIT_FOR_SURFACE_CHANGE_TIMEOUT_MS = 1000;
+ private static final int CAPTURE_TEMPLATE = CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG;
+ private static final int ZSL_TEMPLATE = CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG;
+ private static final int NUM_REPROCESS_TEST_LOOP = 3;
+ private static final int NUM_REPROCESS_CAPTURES = 3;
+ private static final int NUM_REPROCESS_BURST = 3;
+ private int mDumpFrameCount = 0;
+
+ // The image reader for the first regular capture
+ private ImageReader mFirstImageReader;
+ // The image reader for the reprocess capture
+ private ImageReader mSecondImageReader;
+ // A flag indicating whether the regular capture and the reprocess capture share the same image
+ // reader. If it's true, mFirstImageReader should be used for regular and reprocess outputs.
+ private boolean mShareOneImageReader;
+ private SimpleImageReaderListener mFirstImageReaderListener;
+ private SimpleImageReaderListener mSecondImageReaderListener;
+ private Surface mInputSurface;
+ private ImageWriter mImageWriter;
+ private SimpleImageWriterListener mImageWriterListener;
+
+ private enum CaptureTestCase {
+ SINGLE_SHOT,
+ BURST,
+ MIXED_BURST,
+ ABORT_CAPTURE,
+ TIMESTAMPS,
+ JPEG_EXIF,
+ REQUEST_KEYS,
+ }
+
+ /**
+ * Test YUV_420_888 -> JPEG with maximal supported sizes
+ */
+ public void testBasicYuvToJpegReprocessing() throws Exception {
+ for (String id : mCameraIds) {
+ if (!isYuvReprocessSupported(id)) {
+ continue;
+ }
+
+ // Test iteration starts...
+ for (int repeat = 0; repeat < getRepeatCount(); ++repeat) {
+ Log.v(TAG, String.format("Reprocessing YUV to JPEG: %d/%d", repeat + 1,
+ getRepeatCount()));
+ // YUV_420_888 -> JPEG must be supported.
+ testBasicReprocessing(id, ImageFormat.YUV_420_888, ImageFormat.JPEG);
+ getResultPrinter().printStatus(getRepeatCount(), repeat + 1, id);
+ Thread.sleep(getTestWaitIntervalMs());
+ }
+ }
+ }
+
+ /**
+ * Test OPAQUE -> JPEG with maximal supported sizes
+ */
+ public void testBasicOpaqueToJpegReprocessing() throws Exception {
+ for (String id : mCameraIds) {
+ if (!isOpaqueReprocessSupported(id)) {
+ continue;
+ }
+
+ // Test iteration starts...
+ for (int repeat = 0; repeat < getRepeatCount(); ++repeat) {
+ Log.v(TAG, String.format("Reprocessing OPAQUE to JPEG: %d/%d", repeat + 1,
+ getRepeatCount()));
+ // OPAQUE -> JPEG must be supported.
+ testBasicReprocessing(id, ImageFormat.PRIVATE, ImageFormat.JPEG);
+ getResultPrinter().printStatus(getRepeatCount(), repeat + 1, id);
+ Thread.sleep(getTestWaitIntervalMs());
+ }
+
+ }
+ }
+
+ /**
+ * Test all supported size and format combinations with preview.
+ */
+ public void testReprocessingSizeFormatWithPreview() throws Exception {
+ for (String id : mCameraIds) {
+ if (!isYuvReprocessSupported(id) && !isOpaqueReprocessSupported(id)) {
+ continue;
+ }
+
+ try {
+ // open Camera device
+ openDevice(id);
+
+ // Test iteration starts...
+ for (int repeat = 0; repeat < getRepeatCount(); ++repeat) {
+ Log.v(TAG, String.format("Reprocessing size format with preview: %d/%d",
+ repeat + 1, getRepeatCount()));
+ testReprocessingAllCombinations(id, mOrderedPreviewSizes.get(0),
+ CaptureTestCase.SINGLE_SHOT);
+ getResultPrinter().printStatus(getRepeatCount(), repeat + 1, id);
+ Thread.sleep(getTestWaitIntervalMs());
+ }
+ } finally {
+ closeDevice();
+ }
+ }
+ }
+
+ /**
+ * Test burst captures mixed with regular and reprocess captures with and without preview.
+ */
+ public void testMixedBurstReprocessing() throws Exception {
+ for (String id : mCameraIds) {
+ if (!isYuvReprocessSupported(id) && !isOpaqueReprocessSupported(id)) {
+ continue;
+ }
+
+ try {
+ // open Camera device
+ openDevice(id);
+
+ // Test iteration starts...
+ for (int repeat = 0; repeat < getRepeatCount(); ++repeat) {
+ Log.v(TAG, String.format("Reprocessing mixed burst with or without preview: "
+ + "%d/%d", repeat + 1, getRepeatCount()));
+ // no preview
+ testReprocessingAllCombinations(id, /*previewSize*/null,
+ CaptureTestCase.MIXED_BURST);
+ // with preview
+ testReprocessingAllCombinations(id, mOrderedPreviewSizes.get(0),
+ CaptureTestCase.MIXED_BURST);
+ getResultPrinter().printStatus(getRepeatCount(), repeat + 1, id);
+ Thread.sleep(getTestWaitIntervalMs());
+ }
+ } finally {
+ closeDevice();
+ }
+ }
+ }
+
+ /**
+ * Test the input format and output format with the largest input and output sizes.
+ */
+ private void testBasicReprocessing(String cameraId, int inputFormat,
+ int reprocessOutputFormat) throws Exception {
+ try {
+ openDevice(cameraId);
+
+ testReprocessingMaxSizes(cameraId, inputFormat, reprocessOutputFormat,
+ /* previewSize */null, CaptureTestCase.SINGLE_SHOT);
+ } finally {
+ closeDevice();
+ }
+ }
+
+ /**
+ * Test the input format and output format with the largest input and output sizes for a
+ * certain test case.
+ */
+ private void testReprocessingMaxSizes(String cameraId, int inputFormat,
+ int reprocessOutputFormat, Size previewSize, CaptureTestCase captureTestCase)
+ throws Exception {
+ Size maxInputSize = getMaxSize(inputFormat, StaticMetadata.StreamDirection.Input);
+ Size maxReprocessOutputSize =
+ getMaxSize(reprocessOutputFormat, StaticMetadata.StreamDirection.Output);
+
+ switch (captureTestCase) {
+ case SINGLE_SHOT:
+ testReprocess(cameraId, maxInputSize, inputFormat, maxReprocessOutputSize,
+ reprocessOutputFormat, previewSize, NUM_REPROCESS_CAPTURES);
+ break;
+ case ABORT_CAPTURE:
+ testReprocessAbort(cameraId, maxInputSize, inputFormat, maxReprocessOutputSize,
+ reprocessOutputFormat);
+ break;
+ case TIMESTAMPS:
+ testReprocessTimestamps(cameraId, maxInputSize, inputFormat, maxReprocessOutputSize,
+ reprocessOutputFormat);
+ break;
+ case JPEG_EXIF:
+ testReprocessJpegExif(cameraId, maxInputSize, inputFormat, maxReprocessOutputSize);
+ break;
+ case REQUEST_KEYS:
+ testReprocessRequestKeys(cameraId, maxInputSize, inputFormat,
+ maxReprocessOutputSize, reprocessOutputFormat);
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid test case");
+ }
+ }
+
+ /**
+ * Test all input format, input size, output format, and output size combinations.
+ */
+ private void testReprocessingAllCombinations(String cameraId, Size previewSize,
+ CaptureTestCase captureTestCase) throws Exception {
+
+ int[] supportedInputFormats =
+ mStaticInfo.getAvailableFormats(StaticMetadata.StreamDirection.Input);
+ for (int inputFormat : supportedInputFormats) {
+ Size[] supportedInputSizes =
+ mStaticInfo.getAvailableSizesForFormatChecked(inputFormat,
+ StaticMetadata.StreamDirection.Input);
+
+ for (Size inputSize : supportedInputSizes) {
+ int[] supportedReprocessOutputFormats =
+ mStaticInfo.getValidOutputFormatsForInput(inputFormat);
+
+ for (int reprocessOutputFormat : supportedReprocessOutputFormats) {
+ Size[] supportedReprocessOutputSizes =
+ mStaticInfo.getAvailableSizesForFormatChecked(reprocessOutputFormat,
+ StaticMetadata.StreamDirection.Output);
+
+ for (Size reprocessOutputSize : supportedReprocessOutputSizes) {
+ switch (captureTestCase) {
+ case SINGLE_SHOT:
+ testReprocess(cameraId, inputSize, inputFormat,
+ reprocessOutputSize, reprocessOutputFormat, previewSize,
+ NUM_REPROCESS_CAPTURES);
+ break;
+ case BURST:
+ testReprocessBurst(cameraId, inputSize, inputFormat,
+ reprocessOutputSize, reprocessOutputFormat, previewSize,
+ NUM_REPROCESS_BURST);
+ break;
+ case MIXED_BURST:
+ testReprocessMixedBurst(cameraId, inputSize, inputFormat,
+ reprocessOutputSize, reprocessOutputFormat, previewSize,
+ NUM_REPROCESS_BURST);
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid test case");
+ }
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Test burst that is mixed with regular and reprocess capture requests.
+ */
+ private void testReprocessMixedBurst(String cameraId, Size inputSize, int inputFormat,
+ Size reprocessOutputSize, int reprocessOutputFormat, Size previewSize,
+ int numBurst) throws Exception {
+ if (VERBOSE) {
+ Log.v(TAG, "testReprocessMixedBurst: cameraId: " + cameraId + " inputSize: " +
+ inputSize + " inputFormat: " + inputFormat + " reprocessOutputSize: " +
+ reprocessOutputSize + " reprocessOutputFormat: " + reprocessOutputFormat +
+ " previewSize: " + previewSize + " numBurst: " + numBurst);
+ }
+
+ boolean enablePreview = (previewSize != null);
+ ImageResultHolder[] imageResultHolders = new ImageResultHolder[0];
+
+ try {
+ // totalNumBurst = number of regular burst + number of reprocess burst.
+ int totalNumBurst = numBurst * 2;
+
+ if (enablePreview) {
+ updatePreviewSurface(previewSize);
+ } else {
+ mPreviewSurface = null;
+ }
+
+ setupImageReaders(inputSize, inputFormat, reprocessOutputSize, reprocessOutputFormat,
+ totalNumBurst);
+ setupReprocessableSession(mPreviewSurface, /*numImageWriterImages*/numBurst);
+
+ if (enablePreview) {
+ startPreview(mPreviewSurface);
+ }
+
+ // Prepare an array of booleans indicating each capture's type (regular or reprocess)
+ boolean[] isReprocessCaptures = new boolean[totalNumBurst];
+ for (int i = 0; i < totalNumBurst; i++) {
+ if ((i & 1) == 0) {
+ isReprocessCaptures[i] = true;
+ } else {
+ isReprocessCaptures[i] = false;
+ }
+ }
+
+ imageResultHolders = doMixedReprocessBurstCapture(isReprocessCaptures);
+ for (ImageResultHolder holder : imageResultHolders) {
+ Image reprocessedImage = holder.getImage();
+ TotalCaptureResult result = holder.getTotalCaptureResult();
+
+ mCollector.expectImageProperties("testReprocessMixedBurst", reprocessedImage,
+ reprocessOutputFormat, reprocessOutputSize,
+ result.get(CaptureResult.SENSOR_TIMESTAMP));
+
+ if (DEBUG) {
+ Log.d(TAG, String.format("camera %s in %dx%d %d out %dx%d %d",
+ cameraId, inputSize.getWidth(), inputSize.getHeight(), inputFormat,
+ reprocessOutputSize.getWidth(), reprocessOutputSize.getHeight(),
+ reprocessOutputFormat));
+ dumpImage(reprocessedImage,
+ "/testReprocessMixedBurst_camera" + cameraId + "_" + mDumpFrameCount);
+ mDumpFrameCount++;
+ }
+ }
+ } finally {
+ for (ImageResultHolder holder : imageResultHolders) {
+ holder.getImage().close();
+ }
+ closeReprossibleSession();
+ closeImageReaders();
+ }
+ }
+
+ /**
+ * Test burst of reprocess capture requests.
+ */
+ private void testReprocessBurst(String cameraId, Size inputSize, int inputFormat,
+ Size reprocessOutputSize, int reprocessOutputFormat, Size previewSize,
+ int numBurst) throws Exception {
+ if (VERBOSE) {
+ Log.v(TAG, "testReprocessBurst: cameraId: " + cameraId + " inputSize: " +
+ inputSize + " inputFormat: " + inputFormat + " reprocessOutputSize: " +
+ reprocessOutputSize + " reprocessOutputFormat: " + reprocessOutputFormat +
+ " previewSize: " + previewSize + " numBurst: " + numBurst);
+ }
+
+ boolean enablePreview = (previewSize != null);
+ ImageResultHolder[] imageResultHolders = new ImageResultHolder[0];
+
+ try {
+ if (enablePreview) {
+ updatePreviewSurface(previewSize);
+ } else {
+ mPreviewSurface = null;
+ }
+
+ setupImageReaders(inputSize, inputFormat, reprocessOutputSize, reprocessOutputFormat,
+ numBurst);
+ setupReprocessableSession(mPreviewSurface, numBurst);
+
+ if (enablePreview) {
+ startPreview(mPreviewSurface);
+ }
+
+ imageResultHolders = doReprocessBurstCapture(numBurst);
+ for (ImageResultHolder holder : imageResultHolders) {
+ Image reprocessedImage = holder.getImage();
+ TotalCaptureResult result = holder.getTotalCaptureResult();
+
+ mCollector.expectImageProperties("testReprocessBurst", reprocessedImage,
+ reprocessOutputFormat, reprocessOutputSize,
+ result.get(CaptureResult.SENSOR_TIMESTAMP));
+
+ if (DEBUG) {
+ Log.d(TAG, String.format("camera %s in %dx%d %d out %dx%d %d",
+ cameraId, inputSize.getWidth(), inputSize.getHeight(), inputFormat,
+ reprocessOutputSize.getWidth(), reprocessOutputSize.getHeight(),
+ reprocessOutputFormat));
+ dumpImage(reprocessedImage,
+ "/testReprocessBurst_camera" + cameraId + "_" + mDumpFrameCount);
+ mDumpFrameCount++;
+ }
+ }
+ } finally {
+ for (ImageResultHolder holder : imageResultHolders) {
+ holder.getImage().close();
+ }
+ closeReprossibleSession();
+ closeImageReaders();
+ }
+ }
+
+ /**
+ * Test a sequences of reprocess capture requests.
+ */
+ private void testReprocess(String cameraId, Size inputSize, int inputFormat,
+ Size reprocessOutputSize, int reprocessOutputFormat, Size previewSize,
+ int numReprocessCaptures) throws Exception {
+ if (VERBOSE) {
+ Log.v(TAG, "testReprocess: cameraId: " + cameraId + " inputSize: " +
+ inputSize + " inputFormat: " + inputFormat + " reprocessOutputSize: " +
+ reprocessOutputSize + " reprocessOutputFormat: " + reprocessOutputFormat +
+ " previewSize: " + previewSize);
+ }
+
+ boolean enablePreview = (previewSize != null);
+
+ try {
+ if (enablePreview) {
+ updatePreviewSurface(previewSize);
+ } else {
+ mPreviewSurface = null;
+ }
+
+ setupImageReaders(inputSize, inputFormat, reprocessOutputSize, reprocessOutputFormat,
+ /*maxImages*/1);
+ setupReprocessableSession(mPreviewSurface, /*numImageWriterImages*/1);
+
+ if (enablePreview) {
+ startPreview(mPreviewSurface);
+ }
+
+ for (int i = 0; i < numReprocessCaptures; i++) {
+ ImageResultHolder imageResultHolder = null;
+
+ try {
+ imageResultHolder = doReprocessCapture();
+ Image reprocessedImage = imageResultHolder.getImage();
+ TotalCaptureResult result = imageResultHolder.getTotalCaptureResult();
+
+ mCollector.expectImageProperties("testReprocess", reprocessedImage,
+ reprocessOutputFormat, reprocessOutputSize,
+ result.get(CaptureResult.SENSOR_TIMESTAMP));
+
+ if (DEBUG) {
+ Log.d(TAG, String.format("camera %s in %dx%d %d out %dx%d %d",
+ cameraId, inputSize.getWidth(), inputSize.getHeight(), inputFormat,
+ reprocessOutputSize.getWidth(), reprocessOutputSize.getHeight(),
+ reprocessOutputFormat));
+
+ dumpImage(reprocessedImage,
+ "/testReprocess_camera" + cameraId + "_" + mDumpFrameCount);
+ mDumpFrameCount++;
+ }
+ } finally {
+ if (imageResultHolder != null) {
+ imageResultHolder.getImage().close();
+ }
+ }
+ }
+ } finally {
+ closeReprossibleSession();
+ closeImageReaders();
+ }
+ }
+
+ /**
+ * Test aborting a burst reprocess capture and multiple single reprocess captures.
+ */
+ private void testReprocessAbort(String cameraId, Size inputSize, int inputFormat,
+ Size reprocessOutputSize, int reprocessOutputFormat) throws Exception {
+ if (VERBOSE) {
+ Log.v(TAG, "testReprocessAbort: cameraId: " + cameraId + " inputSize: " +
+ inputSize + " inputFormat: " + inputFormat + " reprocessOutputSize: " +
+ reprocessOutputSize + " reprocessOutputFormat: " + reprocessOutputFormat);
+ }
+
+ try {
+ setupImageReaders(inputSize, inputFormat, reprocessOutputSize, reprocessOutputFormat,
+ NUM_REPROCESS_CAPTURES);
+ setupReprocessableSession(/*previewSurface*/null, NUM_REPROCESS_CAPTURES);
+
+ // Test two cases: submitting reprocess requests one by one and in a burst.
+ boolean submitInBursts[] = {false, true};
+ for (boolean submitInBurst : submitInBursts) {
+ // Prepare reprocess capture requests.
+ ArrayList<CaptureRequest> reprocessRequests =
+ new ArrayList<>(NUM_REPROCESS_CAPTURES);
+
+ for (int i = 0; i < NUM_REPROCESS_CAPTURES; i++) {
+ TotalCaptureResult result = submitCaptureRequest(mFirstImageReader.getSurface(),
+ /*inputResult*/null);
+
+ mImageWriter.queueInputImage(
+ mFirstImageReaderListener.getImage(CAPTURE_TIMEOUT_MS));
+ CaptureRequest.Builder builder = mCamera.createReprocessCaptureRequest(result);
+ builder.addTarget(getReprocessOutputImageReader().getSurface());
+ reprocessRequests.add(builder.build());
+ }
+
+ SimpleCaptureCallback captureCallback = new SimpleCaptureCallback();
+
+ // Submit reprocess capture requests.
+ if (submitInBurst) {
+ mSession.captureBurst(reprocessRequests, captureCallback, mHandler);
+ } else {
+ for (CaptureRequest request : reprocessRequests) {
+ mSession.capture(request, captureCallback, mHandler);
+ }
+ }
+
+ // Abort after getting the first result
+ TotalCaptureResult reprocessResult =
+ captureCallback.getTotalCaptureResultForRequest(reprocessRequests.get(0),
+ CAPTURE_TIMEOUT_FRAMES);
+ mSession.abortCaptures();
+
+ // Wait until the session is ready again.
+ mSessionListener.getStateWaiter().waitForState(
+ BlockingSessionCallback.SESSION_READY, SESSION_CLOSE_TIMEOUT_MS);
+
+ // Gather all failed requests.
+ ArrayList<CaptureFailure> failures =
+ captureCallback.getCaptureFailures(NUM_REPROCESS_CAPTURES - 1);
+ ArrayList<CaptureRequest> failedRequests = new ArrayList<>();
+ for (CaptureFailure failure : failures) {
+ failedRequests.add(failure.getRequest());
+ }
+
+ // For each request that didn't fail must have a valid result.
+ for (int i = 1; i < reprocessRequests.size(); i++) {
+ CaptureRequest request = reprocessRequests.get(i);
+ if (!failedRequests.contains(request)) {
+ captureCallback.getTotalCaptureResultForRequest(request,
+ CAPTURE_TIMEOUT_FRAMES);
+ }
+ }
+
+ // Drain the image reader listeners.
+ mFirstImageReaderListener.drain();
+ if (!mShareOneImageReader) {
+ mSecondImageReaderListener.drain();
+ }
+
+ // Make sure all input surfaces are released.
+ for (int i = 0; i < NUM_REPROCESS_CAPTURES; i++) {
+ mImageWriterListener.waitForImageReleased(CAPTURE_TIMEOUT_MS);
+ }
+ }
+ } finally {
+ closeReprossibleSession();
+ closeImageReaders();
+ }
+ }
+
+ /**
+ * Test timestamps for reprocess requests. Reprocess request's shutter timestamp, result's
+ * sensor timestamp, and output image's timestamp should match the reprocess input's timestamp.
+ */
+ private void testReprocessTimestamps(String cameraId, Size inputSize, int inputFormat,
+ Size reprocessOutputSize, int reprocessOutputFormat) throws Exception {
+ if (VERBOSE) {
+ Log.v(TAG, "testReprocessTimestamps: cameraId: " + cameraId + " inputSize: " +
+ inputSize + " inputFormat: " + inputFormat + " reprocessOutputSize: " +
+ reprocessOutputSize + " reprocessOutputFormat: " + reprocessOutputFormat);
+ }
+
+ try {
+ setupImageReaders(inputSize, inputFormat, reprocessOutputSize, reprocessOutputFormat,
+ NUM_REPROCESS_CAPTURES);
+ setupReprocessableSession(/*previewSurface*/null, NUM_REPROCESS_CAPTURES);
+
+ // Prepare reprocess capture requests.
+ ArrayList<CaptureRequest> reprocessRequests = new ArrayList<>(NUM_REPROCESS_CAPTURES);
+ ArrayList<Long> expectedTimestamps = new ArrayList<>(NUM_REPROCESS_CAPTURES);
+
+ for (int i = 0; i < NUM_REPROCESS_CAPTURES; i++) {
+ TotalCaptureResult result = submitCaptureRequest(mFirstImageReader.getSurface(),
+ /*inputResult*/null);
+
+ mImageWriter.queueInputImage(
+ mFirstImageReaderListener.getImage(CAPTURE_TIMEOUT_MS));
+ CaptureRequest.Builder builder = mCamera.createReprocessCaptureRequest(result);
+ builder.addTarget(getReprocessOutputImageReader().getSurface());
+ reprocessRequests.add(builder.build());
+ // Reprocess result's timestamp should match input image's timestamp.
+ expectedTimestamps.add(result.get(CaptureResult.SENSOR_TIMESTAMP));
+ }
+
+ // Submit reprocess requests.
+ SimpleCaptureCallback captureCallback = new SimpleCaptureCallback();
+ mSession.captureBurst(reprocessRequests, captureCallback, mHandler);
+
+ // Verify we get the expected timestamps.
+ for (int i = 0; i < reprocessRequests.size(); i++) {
+ captureCallback.waitForCaptureStart(reprocessRequests.get(i),
+ expectedTimestamps.get(i), CAPTURE_TIMEOUT_FRAMES);
+ }
+
+ TotalCaptureResult[] reprocessResults =
+ captureCallback.getTotalCaptureResultsForRequests(reprocessRequests,
+ CAPTURE_TIMEOUT_FRAMES);
+
+ for (int i = 0; i < expectedTimestamps.size(); i++) {
+ // Verify the result timestamps match the input image's timestamps.
+ long expected = expectedTimestamps.get(i);
+ long timestamp = reprocessResults[i].get(CaptureResult.SENSOR_TIMESTAMP);
+ assertEquals("Reprocess result timestamp (" + timestamp + ") doesn't match input " +
+ "image's timestamp (" + expected + ")", expected, timestamp);
+
+ // Verify the reprocess output image timestamps match the input image's timestamps.
+ Image image = getReprocessOutputImageReaderListener().getImage(CAPTURE_TIMEOUT_MS);
+ timestamp = image.getTimestamp();
+ image.close();
+
+ assertEquals("Reprocess output timestamp (" + timestamp + ") doesn't match input " +
+ "image's timestamp (" + expected + ")", expected, timestamp);
+ }
+
+ // Make sure all input surfaces are released.
+ for (int i = 0; i < NUM_REPROCESS_CAPTURES; i++) {
+ mImageWriterListener.waitForImageReleased(CAPTURE_TIMEOUT_MS);
+ }
+ } finally {
+ closeReprossibleSession();
+ closeImageReaders();
+ }
+ }
+
+ /**
+ * Test JPEG tags for reprocess requests. Reprocess result's JPEG tags and JPEG image's tags
+ * match reprocess request's JPEG tags.
+ */
+ private void testReprocessJpegExif(String cameraId, Size inputSize, int inputFormat,
+ Size reprocessOutputSize) throws Exception {
+ if (VERBOSE) {
+ Log.v(TAG, "testReprocessJpegExif: cameraId: " + cameraId + " inputSize: " +
+ inputSize + " inputFormat: " + inputFormat + " reprocessOutputSize: " +
+ reprocessOutputSize);
+ }
+
+ Size[] thumbnailSizes = mStaticInfo.getAvailableThumbnailSizesChecked();
+ Size[] testThumbnailSizes = new Size[EXIF_TEST_DATA.length];
+ Arrays.fill(testThumbnailSizes, thumbnailSizes[thumbnailSizes.length - 1]);
+ // Make sure thumbnail size (0, 0) is covered.
+ testThumbnailSizes[0] = new Size(0, 0);
+
+ try {
+ setupImageReaders(inputSize, inputFormat, reprocessOutputSize, ImageFormat.JPEG,
+ EXIF_TEST_DATA.length);
+ setupReprocessableSession(/*previewSurface*/null, EXIF_TEST_DATA.length);
+
+ // Prepare reprocess capture requests.
+ ArrayList<CaptureRequest> reprocessRequests = new ArrayList<>(EXIF_TEST_DATA.length);
+
+ for (int i = 0; i < EXIF_TEST_DATA.length; i++) {
+ TotalCaptureResult result = submitCaptureRequest(mFirstImageReader.getSurface(),
+ /*inputResult*/null);
+ mImageWriter.queueInputImage(
+ mFirstImageReaderListener.getImage(CAPTURE_TIMEOUT_MS));
+
+ CaptureRequest.Builder builder = mCamera.createReprocessCaptureRequest(result);
+ builder.addTarget(getReprocessOutputImageReader().getSurface());
+
+ // set jpeg keys
+ setJpegKeys(builder, EXIF_TEST_DATA[i], testThumbnailSizes[i], mCollector);
+ reprocessRequests.add(builder.build());
+ }
+
+ // Submit reprocess requests.
+ SimpleCaptureCallback captureCallback = new SimpleCaptureCallback();
+ mSession.captureBurst(reprocessRequests, captureCallback, mHandler);
+
+ TotalCaptureResult[] reprocessResults =
+ captureCallback.getTotalCaptureResultsForRequests(reprocessRequests,
+ CAPTURE_TIMEOUT_FRAMES);
+
+ for (int i = 0; i < EXIF_TEST_DATA.length; i++) {
+ // Verify output image's and result's JPEG EXIF data.
+ Image image = getReprocessOutputImageReaderListener().getImage(CAPTURE_TIMEOUT_MS);
+ verifyJpegKeys(image, reprocessResults[i], reprocessOutputSize,
+ testThumbnailSizes[i], EXIF_TEST_DATA[i], mStaticInfo, mCollector);
+ image.close();
+
+ }
+ } finally {
+ closeReprossibleSession();
+ closeImageReaders();
+ }
+ }
+
+
+
+ /**
+ * Test the following keys in reprocess results match the keys in reprocess requests:
+ * 1. EDGE_MODE
+ * 2. NOISE_REDUCTION_MODE
+ * 3. REPROCESS_EFFECTIVE_EXPOSURE_FACTOR (only for YUV reprocess)
+ */
+ private void testReprocessRequestKeys(String cameraId, Size inputSize, int inputFormat,
+ Size reprocessOutputSize, int reprocessOutputFormat) throws Exception {
+ if (VERBOSE) {
+ Log.v(TAG, "testReprocessRequestKeys: cameraId: " + cameraId + " inputSize: " +
+ inputSize + " inputFormat: " + inputFormat + " reprocessOutputSize: " +
+ reprocessOutputSize + " reprocessOutputFormat: " + reprocessOutputFormat);
+ }
+
+ final Integer[] EDGE_MODES = {CaptureRequest.EDGE_MODE_FAST,
+ CaptureRequest.EDGE_MODE_HIGH_QUALITY, CaptureRequest.EDGE_MODE_OFF,
+ CaptureRequest.EDGE_MODE_ZERO_SHUTTER_LAG};
+ final Integer[] NR_MODES = {CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY,
+ CaptureRequest.NOISE_REDUCTION_MODE_OFF,
+ CaptureRequest.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG,
+ CaptureRequest.NOISE_REDUCTION_MODE_FAST};
+ final Float[] EFFECTIVE_EXP_FACTORS = {null, 1.0f, 2.5f, 4.0f};
+ int numFrames = EDGE_MODES.length;
+
+ try {
+ setupImageReaders(inputSize, inputFormat, reprocessOutputSize, reprocessOutputFormat,
+ numFrames);
+ setupReprocessableSession(/*previewSurface*/null, numFrames);
+
+ // Prepare reprocess capture requests.
+ ArrayList<CaptureRequest> reprocessRequests = new ArrayList<>(numFrames);
+
+ for (int i = 0; i < numFrames; i++) {
+ TotalCaptureResult result = submitCaptureRequest(mFirstImageReader.getSurface(),
+ /*inputResult*/null);
+ mImageWriter.queueInputImage(
+ mFirstImageReaderListener.getImage(CAPTURE_TIMEOUT_MS));
+
+ CaptureRequest.Builder builder = mCamera.createReprocessCaptureRequest(result);
+ builder.addTarget(getReprocessOutputImageReader().getSurface());
+
+ // Set reprocess request keys
+ builder.set(CaptureRequest.EDGE_MODE, EDGE_MODES[i]);
+ builder.set(CaptureRequest.NOISE_REDUCTION_MODE, NR_MODES[i]);
+ if (inputFormat == ImageFormat.YUV_420_888) {
+ builder.set(CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR,
+ EFFECTIVE_EXP_FACTORS[i]);
+ }
+ reprocessRequests.add(builder.build());
+ }
+
+ // Submit reprocess requests.
+ SimpleCaptureCallback captureCallback = new SimpleCaptureCallback();
+ mSession.captureBurst(reprocessRequests, captureCallback, mHandler);
+
+ TotalCaptureResult[] reprocessResults =
+ captureCallback.getTotalCaptureResultsForRequests(reprocessRequests,
+ CAPTURE_TIMEOUT_FRAMES);
+
+ for (int i = 0; i < numFrames; i++) {
+ // Verify result's keys
+ Integer resultEdgeMode = reprocessResults[i].get(CaptureResult.EDGE_MODE);
+ Integer resultNoiseReductionMode =
+ reprocessResults[i].get(CaptureResult.NOISE_REDUCTION_MODE);
+
+ assertEquals("Reprocess result edge mode (" + resultEdgeMode +
+ ") doesn't match requested edge mode (" + EDGE_MODES[i] + ")",
+ resultEdgeMode, EDGE_MODES[i]);
+ assertEquals("Reprocess result noise reduction mode (" + resultNoiseReductionMode +
+ ") doesn't match requested noise reduction mode (" +
+ NR_MODES[i] + ")", resultNoiseReductionMode,
+ NR_MODES[i]);
+
+ if (inputFormat == ImageFormat.YUV_420_888) {
+ Float resultEffectiveExposureFactor = reprocessResults[i].get(
+ CaptureResult.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR);
+ assertEquals("Reprocess effective exposure factor (" +
+ resultEffectiveExposureFactor + ") doesn't match requested " +
+ "effective exposure factor (" + EFFECTIVE_EXP_FACTORS[i] + ")",
+ resultEffectiveExposureFactor, EFFECTIVE_EXP_FACTORS[i]);
+ }
+ }
+ } finally {
+ closeReprossibleSession();
+ closeImageReaders();
+ }
+ }
+
+ /**
+ * Set up two image readers: one for regular capture (used for reprocess input) and one for
+ * reprocess capture.
+ */
+ private void setupImageReaders(Size inputSize, int inputFormat, Size reprocessOutputSize,
+ int reprocessOutputFormat, int maxImages) {
+
+ mShareOneImageReader = false;
+ // If the regular output and reprocess output have the same size and format,
+ // they can share one image reader.
+ if (inputFormat == reprocessOutputFormat &&
+ inputSize.equals(reprocessOutputSize)) {
+ maxImages *= 2;
+ mShareOneImageReader = true;
+ }
+ // create an ImageReader for the regular capture
+ mFirstImageReaderListener = new SimpleImageReaderListener();
+ mFirstImageReader = makeImageReader(inputSize, inputFormat, maxImages,
+ mFirstImageReaderListener, mHandler);
+
+ if (!mShareOneImageReader) {
+ // create an ImageReader for the reprocess capture
+ mSecondImageReaderListener = new SimpleImageReaderListener();
+ mSecondImageReader = makeImageReader(reprocessOutputSize, reprocessOutputFormat,
+ maxImages, mSecondImageReaderListener, mHandler);
+ }
+ }
+
+ /**
+ * Close two image readers.
+ */
+ private void closeImageReaders() {
+ CameraTestUtils.closeImageReader(mFirstImageReader);
+ mFirstImageReader = null;
+ CameraTestUtils.closeImageReader(mSecondImageReader);
+ mSecondImageReader = null;
+ }
+
+ /**
+ * Get the ImageReader for reprocess output.
+ */
+ private ImageReader getReprocessOutputImageReader() {
+ if (mShareOneImageReader) {
+ return mFirstImageReader;
+ } else {
+ return mSecondImageReader;
+ }
+ }
+
+ private SimpleImageReaderListener getReprocessOutputImageReaderListener() {
+ if (mShareOneImageReader) {
+ return mFirstImageReaderListener;
+ } else {
+ return mSecondImageReaderListener;
+ }
+ }
+
+ /**
+ * Set up a reprocessable session and create an ImageWriter with the sessoin's input surface.
+ */
+ private void setupReprocessableSession(Surface previewSurface, int numImageWriterImages)
+ throws Exception {
+ // create a reprocessable capture session
+ List<Surface> outSurfaces = new ArrayList<Surface>();
+ outSurfaces.add(mFirstImageReader.getSurface());
+ if (!mShareOneImageReader) {
+ outSurfaces.add(mSecondImageReader.getSurface());
+ }
+ if (previewSurface != null) {
+ outSurfaces.add(previewSurface);
+ }
+
+ InputConfiguration inputConfig = new InputConfiguration(mFirstImageReader.getWidth(),
+ mFirstImageReader.getHeight(), mFirstImageReader.getImageFormat());
+ String inputConfigString = inputConfig.toString();
+ if (VERBOSE) {
+ Log.v(TAG, "InputConfiguration: " + inputConfigString);
+ }
+ assertTrue(String.format("inputConfig is wrong: %dx%d format %d. Expect %dx%d format %d",
+ inputConfig.getWidth(), inputConfig.getHeight(), inputConfig.getFormat(),
+ mFirstImageReader.getWidth(), mFirstImageReader.getHeight(),
+ mFirstImageReader.getImageFormat()),
+ inputConfig.getWidth() == mFirstImageReader.getWidth() &&
+ inputConfig.getHeight() == mFirstImageReader.getHeight() &&
+ inputConfig.getFormat() == mFirstImageReader.getImageFormat());
+
+ mSessionListener = new BlockingSessionCallback();
+ mSession = configureReprocessableCameraSession(mCamera, inputConfig, outSurfaces,
+ mSessionListener, mHandler);
+
+ // create an ImageWriter
+ mInputSurface = mSession.getInputSurface();
+ mImageWriter = ImageWriter.newInstance(mInputSurface,
+ numImageWriterImages);
+
+ mImageWriterListener = new SimpleImageWriterListener(mImageWriter);
+ mImageWriter.setOnImageReleasedListener(mImageWriterListener, mHandler);
+ }
+
+ /**
+ * Close the reprocessable session and ImageWriter.
+ */
+ private void closeReprossibleSession() {
+ mInputSurface = null;
+
+ if (mSession != null) {
+ mSession.close();
+ mSession = null;
+ }
+
+ if (mImageWriter != null) {
+ mImageWriter.close();
+ mImageWriter = null;
+ }
+ }
+
+ /**
+ * Do one reprocess capture.
+ */
+ private ImageResultHolder doReprocessCapture() throws Exception {
+ return doReprocessBurstCapture(/*numBurst*/1)[0];
+ }
+
+ /**
+ * Do a burst of reprocess captures.
+ */
+ private ImageResultHolder[] doReprocessBurstCapture(int numBurst) throws Exception {
+ boolean[] isReprocessCaptures = new boolean[numBurst];
+ for (int i = 0; i < numBurst; i++) {
+ isReprocessCaptures[i] = true;
+ }
+
+ return doMixedReprocessBurstCapture(isReprocessCaptures);
+ }
+
+ /**
+ * Do a burst of captures that are mixed with regular and reprocess captures.
+ *
+ * @param isReprocessCaptures An array whose elements indicate whether it's a reprocess capture
+ * request. If the element is true, it represents a reprocess capture
+ * request. If the element is false, it represents a regular capture
+ * request. The size of the array is the number of capture requests
+ * in the burst.
+ */
+ private ImageResultHolder[] doMixedReprocessBurstCapture(boolean[] isReprocessCaptures)
+ throws Exception {
+ if (isReprocessCaptures == null || isReprocessCaptures.length <= 0) {
+ throw new IllegalArgumentException("isReprocessCaptures must have at least 1 capture.");
+ }
+
+ boolean hasReprocessRequest = false;
+ boolean hasRegularRequest = false;
+
+ TotalCaptureResult[] results = new TotalCaptureResult[isReprocessCaptures.length];
+ for (int i = 0; i < isReprocessCaptures.length; i++) {
+ // submit a capture and get the result if this entry is a reprocess capture.
+ if (isReprocessCaptures[i]) {
+ results[i] = submitCaptureRequest(mFirstImageReader.getSurface(),
+ /*inputResult*/null);
+ mImageWriter.queueInputImage(
+ mFirstImageReaderListener.getImage(CAPTURE_TIMEOUT_MS));
+ hasReprocessRequest = true;
+ } else {
+ hasRegularRequest = true;
+ }
+ }
+
+ Surface[] outputSurfaces = new Surface[isReprocessCaptures.length];
+ for (int i = 0; i < isReprocessCaptures.length; i++) {
+ outputSurfaces[i] = getReprocessOutputImageReader().getSurface();
+ }
+
+ TotalCaptureResult[] finalResults = submitMixedCaptureBurstRequest(outputSurfaces, results);
+
+ ImageResultHolder[] holders = new ImageResultHolder[isReprocessCaptures.length];
+ for (int i = 0; i < isReprocessCaptures.length; i++) {
+ Image image = getReprocessOutputImageReaderListener().getImage(CAPTURE_TIMEOUT_MS);
+ if (hasReprocessRequest && hasRegularRequest) {
+ // If there are mixed requests, images and results may not be in the same order.
+ for (int j = 0; j < finalResults.length; j++) {
+ if (finalResults[j] != null &&
+ finalResults[j].get(CaptureResult.SENSOR_TIMESTAMP) ==
+ image.getTimestamp()) {
+ holders[i] = new ImageResultHolder(image, finalResults[j]);
+ finalResults[j] = null;
+ break;
+ }
+ }
+
+ assertNotNull("Cannot find a result matching output image's timestamp: " +
+ image.getTimestamp(), holders[i]);
+ } else {
+ // If no mixed requests, images and results should be in the same order.
+ holders[i] = new ImageResultHolder(image, finalResults[i]);
+ }
+ }
+
+ return holders;
+ }
+
+ /**
+ * Start preview without a listener.
+ */
+ private void startPreview(Surface previewSurface) throws Exception {
+ CaptureRequest.Builder builder = mCamera.createCaptureRequest(ZSL_TEMPLATE);
+ builder.addTarget(previewSurface);
+ mSession.setRepeatingRequest(builder.build(), null, mHandler);
+ }
+
+ /**
+ * Issue a capture request and return the result. If inputResult is null, it's a regular
+ * request. Otherwise, it's a reprocess request.
+ */
+ private TotalCaptureResult submitCaptureRequest(Surface output,
+ TotalCaptureResult inputResult) throws Exception {
+ Surface[] outputs = new Surface[1];
+ outputs[0] = output;
+ TotalCaptureResult[] inputResults = new TotalCaptureResult[1];
+ inputResults[0] = inputResult;
+
+ return submitMixedCaptureBurstRequest(outputs, inputResults)[0];
+ }
+
+ /**
+ * Submit a burst request mixed with regular and reprocess requests.
+ *
+ * @param outputs An array of output surfaces. One output surface will be used in one request
+ * so the length of the array is the number of requests in a burst request.
+ * @param inputResults An array of input results. If it's null, all requests are regular
+ * requests. If an element is null, that element represents a regular
+ * request. If an element if not null, that element represents a reprocess
+ * request.
+ *
+ */
+ private TotalCaptureResult[] submitMixedCaptureBurstRequest(Surface[] outputs,
+ TotalCaptureResult[] inputResults) throws Exception {
+ if (outputs == null || outputs.length <= 0) {
+ throw new IllegalArgumentException("outputs must have at least 1 surface");
+ } else if (inputResults != null && inputResults.length != outputs.length) {
+ throw new IllegalArgumentException("The lengths of outputs and inputResults " +
+ "don't match");
+ }
+
+ int numReprocessCaptures = 0;
+ SimpleCaptureCallback captureCallback = new SimpleCaptureCallback();
+ ArrayList<CaptureRequest> captureRequests = new ArrayList<>(outputs.length);
+
+ // Prepare a list of capture requests. Whether it's a regular or reprocess capture request
+ // is based on inputResults array.
+ for (int i = 0; i < outputs.length; i++) {
+ CaptureRequest.Builder builder;
+ boolean isReprocess = (inputResults != null && inputResults[i] != null);
+ if (isReprocess) {
+ builder = mCamera.createReprocessCaptureRequest(inputResults[i]);
+ numReprocessCaptures++;
+ } else {
+ builder = mCamera.createCaptureRequest(CAPTURE_TEMPLATE);
+ }
+ builder.addTarget(outputs[i]);
+ CaptureRequest request = builder.build();
+ assertTrue("Capture request reprocess type " + request.isReprocess() + " is wrong.",
+ request.isReprocess() == isReprocess);
+
+ captureRequests.add(request);
+ }
+
+ if (captureRequests.size() == 1) {
+ mSession.capture(captureRequests.get(0), captureCallback, mHandler);
+ } else {
+ mSession.captureBurst(captureRequests, captureCallback, mHandler);
+ }
+
+ TotalCaptureResult[] results;
+ if (numReprocessCaptures == 0 || numReprocessCaptures == outputs.length) {
+ results = new TotalCaptureResult[outputs.length];
+ // If the requests are not mixed, they should come in order.
+ for (int i = 0; i < results.length; i++){
+ results[i] = captureCallback.getTotalCaptureResultForRequest(
+ captureRequests.get(i), CAPTURE_TIMEOUT_FRAMES);
+ }
+ } else {
+ // If the requests are mixed, they may not come in order.
+ results = captureCallback.getTotalCaptureResultsForRequests(
+ captureRequests, CAPTURE_TIMEOUT_FRAMES * captureRequests.size());
+ }
+
+ // make sure all input surfaces are released.
+ for (int i = 0; i < numReprocessCaptures; i++) {
+ mImageWriterListener.waitForImageReleased(CAPTURE_TIMEOUT_MS);
+ }
+
+ return results;
+ }
+
+ private Size getMaxSize(int format, StaticMetadata.StreamDirection direction) {
+ Size[] sizes = mStaticInfo.getAvailableSizesForFormatChecked(format, direction);
+ return getAscendingOrderSizes(Arrays.asList(sizes), /*ascending*/false).get(0);
+ }
+
+ private boolean isYuvReprocessSupported(String cameraId) throws Exception {
+ return isReprocessSupported(cameraId, ImageFormat.YUV_420_888);
+ }
+
+ private boolean isOpaqueReprocessSupported(String cameraId) throws Exception {
+ return isReprocessSupported(cameraId, ImageFormat.PRIVATE);
+ }
+
+ private void dumpImage(Image image, String name) {
+ String filename = DEBUG_FILE_NAME_BASE + name;
+ switch(image.getFormat()) {
+ case ImageFormat.JPEG:
+ filename += ".jpg";
+ break;
+ case ImageFormat.NV16:
+ case ImageFormat.NV21:
+ case ImageFormat.YUV_420_888:
+ filename += ".yuv";
+ break;
+ default:
+ filename += "." + image.getFormat();
+ break;
+ }
+
+ Log.d(TAG, "dumping an image to " + filename);
+ dumpFile(filename , getDataFromImage(image));
+ }
+
+ /**
+ * A class that holds an Image and a TotalCaptureResult.
+ */
+ private static class ImageResultHolder {
+ private final Image mImage;
+ private final TotalCaptureResult mResult;
+
+ public ImageResultHolder(Image image, TotalCaptureResult result) {
+ mImage = image;
+ mResult = result;
+ }
+
+ public Image getImage() {
+ return mImage;
+ }
+
+ public TotalCaptureResult getTotalCaptureResult() {
+ return mResult;
+ }
+ }
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/Camera2StillCaptureTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/Camera2StillCaptureTest.java
new file mode 100644
index 0000000..16dfb2b
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/Camera2StillCaptureTest.java
@@ -0,0 +1,628 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest.stress;
+
+import com.android.ex.camera2.blocking.BlockingSessionCallback;
+import com.android.ex.camera2.exceptions.TimeoutRuntimeException;
+import com.android.mediaframeworktest.Camera2SurfaceViewTestCase;
+import com.android.mediaframeworktest.helpers.Camera2Focuser;
+import com.android.mediaframeworktest.helpers.CameraTestUtils;
+import com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback;
+
+import android.graphics.ImageFormat;
+import android.graphics.Point;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.DngCreator;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.media.Image;
+import android.media.ImageReader;
+import android.os.ConditionVariable;
+import android.util.Log;
+import android.util.Pair;
+import android.util.Rational;
+import android.util.Size;
+import android.view.Surface;
+
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.List;
+
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.MAX_READER_IMAGES;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleImageReaderListener;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.basicValidateJpegImage;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.configureCameraSession;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.dumpFile;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.getDataFromImage;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.getValueNotNull;
+import static com.android.mediaframeworktest.helpers.CameraTestUtils.makeImageReader;
+
+/**
+ * <p>Tests for still capture API.</p>
+ *
+ * adb shell am instrument \
+ * -e class com.android.mediaframeworktest.stress.Camera2StillCaptureTest#testTakePicture \
+ * -e repeat 200 \
+ * -e waitIntervalMs 1000 \
+ * -e resultToFile false \
+ * -r -w com.android.mediaframeworktest/.Camera2InstrumentationTestRunner
+ */
+public class Camera2StillCaptureTest extends Camera2SurfaceViewTestCase {
+ private static final String TAG = "StillCaptureTest";
+ private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
+ private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
+ // 60 second to accommodate the possible long exposure time.
+ private static final int MAX_REGIONS_AE_INDEX = 0;
+ private static final int MAX_REGIONS_AWB_INDEX = 1;
+ private static final int MAX_REGIONS_AF_INDEX = 2;
+ private static final int WAIT_FOR_FOCUS_DONE_TIMEOUT_MS = 6000;
+ private static final double AE_COMPENSATION_ERROR_TOLERANCE = 0.2;
+ // 5 percent error margin for resulting metering regions
+ private static final float METERING_REGION_ERROR_PERCENT_DELTA = 0.05f;
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ }
+
+ /**
+ * Test normal still capture sequence.
+ * <p>
+ * Preview and and jpeg output streams are configured. Max still capture
+ * size is used for jpeg capture. The sequence of still capture being test
+ * is: start preview, auto focus, precapture metering (if AE is not
+ * converged), then capture jpeg. The AWB and AE are in auto modes. AF mode
+ * is CONTINUOUS_PICTURE.
+ * </p>
+ */
+ public void testTakePicture() throws Exception{
+ for (String id : mCameraIds) {
+ try {
+ Log.i(TAG, "Testing basic take picture for Camera " + id);
+ openDevice(id);
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
+
+ // Test iteration starts...
+ for (int repeat = 0; repeat < getRepeatCount(); ++repeat) {
+ Log.v(TAG, String.format("Taking pictures: %d/%d", repeat + 1,
+ getRepeatCount()));
+ takePictureTestByCamera(/*aeRegions*/null, /*awbRegions*/null,
+ /*afRegions*/null);
+ getResultPrinter().printStatus(getRepeatCount(), repeat + 1, id);
+ Thread.sleep(getTestWaitIntervalMs());
+ }
+ } finally {
+ closeDevice();
+ closeImageReader();
+ }
+ }
+ }
+
+ /**
+ * Test the full raw capture use case.
+ *
+ * This includes:
+ * - Configuring the camera with a preview, jpeg, and raw output stream.
+ * - Running preview until AE/AF can settle.
+ * - Capturing with a request targeting all three output streams.
+ */
+ public void testFullRawCapture() throws Exception {
+ for (int i = 0; i < mCameraIds.length; i++) {
+ try {
+ Log.i(TAG, "Testing raw capture for Camera " + mCameraIds[i]);
+ openDevice(mCameraIds[i]);
+ if (!mStaticInfo.isCapabilitySupported(
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
+ Log.i(TAG, "RAW capability is not supported in camera " + mCameraIds[i] +
+ ". Skip the test.");
+ continue;
+ }
+
+ // Test iteration starts...
+ for (int repeat = 0; repeat < getRepeatCount(); ++repeat) {
+ Log.v(TAG, String.format("Taking full RAW pictures: %d/%d", repeat + 1,
+ getRepeatCount()));
+ fullRawCaptureTestByCamera();
+ getResultPrinter().printStatus(getRepeatCount(), repeat + 1, mCameraIds[i]);
+ Thread.sleep(getTestWaitIntervalMs());
+ }
+ } finally {
+ closeDevice();
+ closeImageReader();
+ }
+ }
+ }
+
+ /**
+ * Take a picture for a given set of 3A regions for a particular camera.
+ * <p>
+ * Before take a still capture, it triggers an auto focus and lock it first,
+ * then wait for AWB to converge and lock it, then trigger a precapture
+ * metering sequence and wait for AE converged. After capture is received, the
+ * capture result and image are validated.
+ * </p>
+ *
+ * @param aeRegions AE regions for this capture
+ * @param awbRegions AWB regions for this capture
+ * @param afRegions AF regions for this capture
+ */
+ private void takePictureTestByCamera(
+ MeteringRectangle[] aeRegions, MeteringRectangle[] awbRegions,
+ MeteringRectangle[] afRegions) throws Exception {
+ takePictureTestByCamera(aeRegions, awbRegions, afRegions,
+ /*addAeTriggerCancel*/false);
+ }
+
+ /**
+ * Take a picture for a given set of 3A regions for a particular camera.
+ * <p>
+ * Before take a still capture, it triggers an auto focus and lock it first,
+ * then wait for AWB to converge and lock it, then trigger a precapture
+ * metering sequence and wait for AE converged. After capture is received, the
+ * capture result and image are validated. If {@code addAeTriggerCancel} is true,
+ * a precapture trigger cancel will be inserted between two adjacent triggers, which
+ * should effective cancel the first trigger.
+ * </p>
+ *
+ * @param aeRegions AE regions for this capture
+ * @param awbRegions AWB regions for this capture
+ * @param afRegions AF regions for this capture
+ * @param addAeTriggerCancel If a AE precapture trigger cancel is sent after the trigger.
+ */
+ private void takePictureTestByCamera(
+ MeteringRectangle[] aeRegions, MeteringRectangle[] awbRegions,
+ MeteringRectangle[] afRegions, boolean addAeTriggerCancel) throws Exception {
+
+ boolean hasFocuser = mStaticInfo.hasFocuser();
+
+ Size maxStillSz = mOrderedStillSizes.get(0);
+ Size maxPreviewSz = mOrderedPreviewSizes.get(0);
+ CaptureResult result;
+ SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
+ SimpleImageReaderListener imageListener = new SimpleImageReaderListener();
+ CaptureRequest.Builder previewRequest =
+ mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+ CaptureRequest.Builder stillRequest =
+ mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
+ prepareStillCaptureAndStartPreview(previewRequest, stillRequest, maxPreviewSz,
+ maxStillSz, resultListener, imageListener);
+
+ // Set AE mode to ON_AUTO_FLASH if flash is available.
+ if (mStaticInfo.hasFlash()) {
+ previewRequest.set(CaptureRequest.CONTROL_AE_MODE,
+ CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
+ stillRequest.set(CaptureRequest.CONTROL_AE_MODE,
+ CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
+ }
+
+ Camera2Focuser focuser = null;
+ /**
+ * Step 1: trigger an auto focus run, and wait for AF locked.
+ */
+ boolean canSetAfRegion = hasFocuser && (afRegions != null) &&
+ isRegionsSupportedFor3A(MAX_REGIONS_AF_INDEX);
+ if (hasFocuser) {
+ SimpleAutoFocusListener afListener = new SimpleAutoFocusListener();
+ focuser = new Camera2Focuser(mCamera, mSession, mPreviewSurface, afListener,
+ mStaticInfo.getCharacteristics(), mHandler);
+ if (canSetAfRegion) {
+ stillRequest.set(CaptureRequest.CONTROL_AF_REGIONS, afRegions);
+ }
+ focuser.startAutoFocus(afRegions);
+ afListener.waitForAutoFocusDone(WAIT_FOR_FOCUS_DONE_TIMEOUT_MS);
+ }
+
+ /**
+ * Have to get the current AF mode to be used for other 3A repeating
+ * request, otherwise, the new AF mode in AE/AWB request could be
+ * different with existing repeating requests being sent by focuser,
+ * then it could make AF unlocked too early. Beside that, for still
+ * capture, AF mode must not be different with the one in current
+ * repeating request, otherwise, the still capture itself would trigger
+ * an AF mode change, and the AF lock would be lost for this capture.
+ */
+ int currentAfMode = CaptureRequest.CONTROL_AF_MODE_OFF;
+ if (hasFocuser) {
+ currentAfMode = focuser.getCurrentAfMode();
+ }
+ previewRequest.set(CaptureRequest.CONTROL_AF_MODE, currentAfMode);
+ stillRequest.set(CaptureRequest.CONTROL_AF_MODE, currentAfMode);
+
+ /**
+ * Step 2: AF is already locked, wait for AWB converged, then lock it.
+ */
+ resultListener = new SimpleCaptureCallback();
+ boolean canSetAwbRegion =
+ (awbRegions != null) && isRegionsSupportedFor3A(MAX_REGIONS_AWB_INDEX);
+ if (canSetAwbRegion) {
+ previewRequest.set(CaptureRequest.CONTROL_AWB_REGIONS, awbRegions);
+ stillRequest.set(CaptureRequest.CONTROL_AWB_REGIONS, awbRegions);
+ }
+ mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
+ if (mStaticInfo.isHardwareLevelLimitedOrBetter()) {
+ waitForResultValue(resultListener, CaptureResult.CONTROL_AWB_STATE,
+ CaptureResult.CONTROL_AWB_STATE_CONVERGED, NUM_RESULTS_WAIT_TIMEOUT);
+ } else {
+ // LEGACY Devices don't have the AWB_STATE reported in results, so just wait
+ waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
+ }
+ boolean canSetAwbLock = mStaticInfo.isAwbLockSupported();
+ if (canSetAwbLock) {
+ previewRequest.set(CaptureRequest.CONTROL_AWB_LOCK, true);
+ }
+ mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
+ // Validate the next result immediately for region and mode.
+ result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
+ mCollector.expectEquals("AWB mode in result and request should be same",
+ previewRequest.get(CaptureRequest.CONTROL_AWB_MODE),
+ result.get(CaptureResult.CONTROL_AWB_MODE));
+ if (canSetAwbRegion) {
+ MeteringRectangle[] resultAwbRegions =
+ getValueNotNull(result, CaptureResult.CONTROL_AWB_REGIONS);
+ mCollector.expectEquals("AWB regions in result and request should be same",
+ awbRegions, resultAwbRegions);
+ }
+
+ /**
+ * Step 3: trigger an AE precapture metering sequence and wait for AE converged.
+ */
+ resultListener = new SimpleCaptureCallback();
+ boolean canSetAeRegion =
+ (aeRegions != null) && isRegionsSupportedFor3A(MAX_REGIONS_AE_INDEX);
+ if (canSetAeRegion) {
+ previewRequest.set(CaptureRequest.CONTROL_AE_REGIONS, aeRegions);
+ stillRequest.set(CaptureRequest.CONTROL_AE_REGIONS, aeRegions);
+ }
+ mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
+ previewRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
+ CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
+ mSession.capture(previewRequest.build(), resultListener, mHandler);
+ if (addAeTriggerCancel) {
+ // Cancel the current precapture trigger, then send another trigger.
+ // The camera device should behave as if the first trigger is not sent.
+ // Wait one request to make the trigger start doing something before cancel.
+ waitForNumResults(resultListener, /*numResultsWait*/ 1);
+ previewRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
+ CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL);
+ mSession.capture(previewRequest.build(), resultListener, mHandler);
+ waitForResultValue(resultListener, CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER,
+ CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL,
+ NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
+ // Issue another trigger
+ previewRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
+ CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
+ mSession.capture(previewRequest.build(), resultListener, mHandler);
+ }
+ waitForAeStable(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
+
+ // Validate the next result immediately for region and mode.
+ result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
+ mCollector.expectEquals("AE mode in result and request should be same",
+ previewRequest.get(CaptureRequest.CONTROL_AE_MODE),
+ result.get(CaptureResult.CONTROL_AE_MODE));
+ if (canSetAeRegion) {
+ MeteringRectangle[] resultAeRegions =
+ getValueNotNull(result, CaptureResult.CONTROL_AE_REGIONS);
+
+ mCollector.expectMeteringRegionsAreSimilar(
+ "AE regions in result and request should be similar",
+ aeRegions,
+ resultAeRegions,
+ METERING_REGION_ERROR_PERCENT_DELTA);
+ }
+
+ /**
+ * Step 4: take a picture when all 3A are in good state.
+ */
+ resultListener = new SimpleCaptureCallback();
+ CaptureRequest request = stillRequest.build();
+ mSession.capture(request, resultListener, mHandler);
+ // Validate the next result immediately for region and mode.
+ result = resultListener.getCaptureResultForRequest(request, WAIT_FOR_RESULT_TIMEOUT_MS);
+ mCollector.expectEquals("AF mode in result and request should be same",
+ stillRequest.get(CaptureRequest.CONTROL_AF_MODE),
+ result.get(CaptureResult.CONTROL_AF_MODE));
+ if (canSetAfRegion) {
+ MeteringRectangle[] resultAfRegions =
+ getValueNotNull(result, CaptureResult.CONTROL_AF_REGIONS);
+ mCollector.expectMeteringRegionsAreSimilar(
+ "AF regions in result and request should be similar",
+ afRegions,
+ resultAfRegions,
+ METERING_REGION_ERROR_PERCENT_DELTA);
+ }
+
+ if (hasFocuser) {
+ // Unlock auto focus.
+ focuser.cancelAutoFocus();
+ }
+
+ // validate image
+ Image image = imageListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
+ validateJpegCapture(image, maxStillSz);
+
+ // Free image resources
+ image.close();
+
+ stopPreview();
+ }
+
+ private void fullRawCaptureTestByCamera() throws Exception {
+ Size maxPreviewSz = mOrderedPreviewSizes.get(0);
+ Size maxStillSz = mOrderedStillSizes.get(0);
+
+ SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
+ SimpleImageReaderListener jpegListener = new SimpleImageReaderListener();
+ SimpleImageReaderListener rawListener = new SimpleImageReaderListener();
+
+ Size size = mStaticInfo.getRawDimensChecked();
+
+ if (VERBOSE) {
+ Log.v(TAG, "Testing multi capture with size " + size.toString()
+ + ", preview size " + maxPreviewSz);
+ }
+
+ // Prepare raw capture and start preview.
+ CaptureRequest.Builder previewBuilder =
+ mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+ CaptureRequest.Builder multiBuilder =
+ mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
+
+ ImageReader rawReader = null;
+ ImageReader jpegReader = null;
+
+ try {
+ // Create ImageReaders.
+ rawReader = makeImageReader(size,
+ ImageFormat.RAW_SENSOR, MAX_READER_IMAGES, rawListener, mHandler);
+ jpegReader = makeImageReader(maxStillSz,
+ ImageFormat.JPEG, MAX_READER_IMAGES, jpegListener, mHandler);
+ updatePreviewSurface(maxPreviewSz);
+
+ // Configure output streams with preview and jpeg streams.
+ List<Surface> outputSurfaces = new ArrayList<Surface>();
+ outputSurfaces.add(rawReader.getSurface());
+ outputSurfaces.add(jpegReader.getSurface());
+ outputSurfaces.add(mPreviewSurface);
+ mSessionListener = new BlockingSessionCallback();
+ mSession = configureCameraSession(mCamera, outputSurfaces,
+ mSessionListener, mHandler);
+
+ // Configure the requests.
+ previewBuilder.addTarget(mPreviewSurface);
+ multiBuilder.addTarget(mPreviewSurface);
+ multiBuilder.addTarget(rawReader.getSurface());
+ multiBuilder.addTarget(jpegReader.getSurface());
+
+ // Start preview.
+ mSession.setRepeatingRequest(previewBuilder.build(), null, mHandler);
+
+ // Poor man's 3A, wait 2 seconds for AE/AF (if any) to settle.
+ // TODO: Do proper 3A trigger and lock (see testTakePictureTest).
+ Thread.sleep(3000);
+
+ multiBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE,
+ CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE_ON);
+ CaptureRequest multiRequest = multiBuilder.build();
+
+ mSession.capture(multiRequest, resultListener, mHandler);
+
+ CaptureResult result = resultListener.getCaptureResultForRequest(multiRequest,
+ NUM_RESULTS_WAIT_TIMEOUT);
+ Image jpegImage = jpegListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
+ basicValidateJpegImage(jpegImage, maxStillSz);
+ Image rawImage = rawListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
+ validateRaw16Image(rawImage, size);
+ verifyRawCaptureResult(multiRequest, result);
+
+
+ ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+ try (DngCreator dngCreator = new DngCreator(mStaticInfo.getCharacteristics(), result)) {
+ dngCreator.writeImage(outputStream, rawImage);
+ }
+
+ if (DEBUG) {
+ byte[] rawBuffer = outputStream.toByteArray();
+ String rawFileName = DEBUG_FILE_NAME_BASE + "/raw16_" + TAG + size.toString() +
+ "_cam_" + mCamera.getId() + ".dng";
+ Log.d(TAG, "Dump raw file into " + rawFileName);
+ dumpFile(rawFileName, rawBuffer);
+
+ byte[] jpegBuffer = getDataFromImage(jpegImage);
+ String jpegFileName = DEBUG_FILE_NAME_BASE + "/jpeg_" + TAG + size.toString() +
+ "_cam_" + mCamera.getId() + ".jpg";
+ Log.d(TAG, "Dump jpeg file into " + rawFileName);
+ dumpFile(jpegFileName, jpegBuffer);
+ }
+
+ stopPreview();
+ } finally {
+ CameraTestUtils.closeImageReader(rawReader);
+ CameraTestUtils.closeImageReader(jpegReader);
+ rawReader = null;
+ jpegReader = null;
+ }
+ }
+
+ /**
+ * Validate that raw {@link CaptureResult}.
+ *
+ * @param rawRequest a {@link CaptureRequest} use to capture a RAW16 image.
+ * @param rawResult the {@link CaptureResult} corresponding to the given request.
+ */
+ private void verifyRawCaptureResult(CaptureRequest rawRequest, CaptureResult rawResult) {
+ assertNotNull(rawRequest);
+ assertNotNull(rawResult);
+
+ Rational[] empty = new Rational[] { Rational.ZERO, Rational.ZERO, Rational.ZERO};
+ Rational[] neutralColorPoint = mCollector.expectKeyValueNotNull("NeutralColorPoint",
+ rawResult, CaptureResult.SENSOR_NEUTRAL_COLOR_POINT);
+ if (neutralColorPoint != null) {
+ mCollector.expectEquals("NeutralColorPoint length", empty.length,
+ neutralColorPoint.length);
+ mCollector.expectNotEquals("NeutralColorPoint cannot be all zeroes, ", empty,
+ neutralColorPoint);
+ mCollector.expectValuesGreaterOrEqual("NeutralColorPoint", neutralColorPoint,
+ Rational.ZERO);
+ }
+
+ mCollector.expectKeyValueGreaterOrEqual(rawResult, CaptureResult.SENSOR_GREEN_SPLIT, 0.0f);
+
+ Pair<Double, Double>[] noiseProfile = mCollector.expectKeyValueNotNull("NoiseProfile",
+ rawResult, CaptureResult.SENSOR_NOISE_PROFILE);
+ if (noiseProfile != null) {
+ mCollector.expectEquals("NoiseProfile length", noiseProfile.length,
+ /*Num CFA channels*/4);
+ for (Pair<Double, Double> p : noiseProfile) {
+ mCollector.expectTrue("NoiseProfile coefficients " + p +
+ " must have: S > 0, O >= 0", p.first > 0 && p.second >= 0);
+ }
+ }
+
+ Integer hotPixelMode = mCollector.expectKeyValueNotNull("HotPixelMode", rawResult,
+ CaptureResult.HOT_PIXEL_MODE);
+ Boolean hotPixelMapMode = mCollector.expectKeyValueNotNull("HotPixelMapMode", rawResult,
+ CaptureResult.STATISTICS_HOT_PIXEL_MAP_MODE);
+ Point[] hotPixelMap = rawResult.get(CaptureResult.STATISTICS_HOT_PIXEL_MAP);
+
+ Size pixelArraySize = mStaticInfo.getPixelArraySizeChecked();
+ boolean[] availableHotPixelMapModes = mStaticInfo.getValueFromKeyNonNull(
+ CameraCharacteristics.STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES);
+
+ if (hotPixelMode != null) {
+ Integer requestMode = mCollector.expectKeyValueNotNull(rawRequest,
+ CaptureRequest.HOT_PIXEL_MODE);
+ if (requestMode != null) {
+ mCollector.expectKeyValueEquals(rawResult, CaptureResult.HOT_PIXEL_MODE,
+ requestMode);
+ }
+ }
+
+ if (hotPixelMapMode != null) {
+ Boolean requestMapMode = mCollector.expectKeyValueNotNull(rawRequest,
+ CaptureRequest.STATISTICS_HOT_PIXEL_MAP_MODE);
+ if (requestMapMode != null) {
+ mCollector.expectKeyValueEquals(rawResult,
+ CaptureResult.STATISTICS_HOT_PIXEL_MAP_MODE, requestMapMode);
+ }
+
+ if (!hotPixelMapMode) {
+ mCollector.expectTrue("HotPixelMap must be empty", hotPixelMap == null ||
+ hotPixelMap.length == 0);
+ } else {
+ mCollector.expectTrue("HotPixelMap must not be empty", hotPixelMap != null);
+ mCollector.expectNotNull("AvailableHotPixelMapModes must not be null",
+ availableHotPixelMapModes);
+ if (availableHotPixelMapModes != null) {
+ mCollector.expectContains("HotPixelMapMode", availableHotPixelMapModes, true);
+ }
+
+ int height = pixelArraySize.getHeight();
+ int width = pixelArraySize.getWidth();
+ for (Point p : hotPixelMap) {
+ mCollector.expectTrue("Hotpixel " + p + " must be in pixelArray " +
+ pixelArraySize, p.x >= 0 && p.x < width && p.y >= 0 && p.y < height);
+ }
+ }
+ }
+ // TODO: profileHueSatMap, and profileToneCurve aren't supported yet.
+
+ }
+
+ //----------------------------------------------------------------
+ //---------Below are common functions for all tests.--------------
+ //----------------------------------------------------------------
+ /**
+ * Validate standard raw (RAW16) capture image.
+ *
+ * @param image The raw16 format image captured
+ * @param rawSize The expected raw size
+ */
+ private static void validateRaw16Image(Image image, Size rawSize) {
+ CameraTestUtils.validateImage(image, rawSize.getWidth(), rawSize.getHeight(),
+ ImageFormat.RAW_SENSOR, /*filePath*/null);
+ }
+
+ /**
+ * Validate JPEG capture image object sanity and test.
+ * <p>
+ * In addition to image object sanity, this function also does the decoding
+ * test, which is slower.
+ * </p>
+ *
+ * @param image The JPEG image to be verified.
+ * @param jpegSize The JPEG capture size to be verified against.
+ */
+ private static void validateJpegCapture(Image image, Size jpegSize) {
+ CameraTestUtils.validateImage(image, jpegSize.getWidth(), jpegSize.getHeight(),
+ ImageFormat.JPEG, /*filePath*/null);
+ }
+
+ private static class SimpleAutoFocusListener implements Camera2Focuser.AutoFocusListener {
+ final ConditionVariable focusDone = new ConditionVariable();
+ @Override
+ public void onAutoFocusLocked(boolean success) {
+ focusDone.open();
+ }
+
+ public void waitForAutoFocusDone(long timeoutMs) {
+ if (focusDone.block(timeoutMs)) {
+ focusDone.close();
+ } else {
+ throw new TimeoutRuntimeException("Wait for auto focus done timed out after "
+ + timeoutMs + "ms");
+ }
+ }
+ }
+
+ private boolean isRegionsSupportedFor3A(int index) {
+ int maxRegions = 0;
+ switch (index) {
+ case MAX_REGIONS_AE_INDEX:
+ maxRegions = mStaticInfo.getAeMaxRegionsChecked();
+ break;
+ case MAX_REGIONS_AWB_INDEX:
+ maxRegions = mStaticInfo.getAwbMaxRegionsChecked();
+ break;
+ case MAX_REGIONS_AF_INDEX:
+ maxRegions = mStaticInfo.getAfMaxRegionsChecked();
+ break;
+ default:
+ throw new IllegalArgumentException("Unknown algorithm index");
+ }
+ boolean isRegionsSupported = maxRegions > 0;
+ if (index == MAX_REGIONS_AF_INDEX && isRegionsSupported) {
+ mCollector.expectTrue(
+ "Device reports non-zero max AF region count for a camera without focuser!",
+ mStaticInfo.hasFocuser());
+ isRegionsSupported = isRegionsSupported && mStaticInfo.hasFocuser();
+ }
+
+ return isRegionsSupported;
+ }
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/CameraStressTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/CameraStressTest.java
index a112c73..d1193de 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/CameraStressTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/CameraStressTest.java
@@ -17,18 +17,16 @@
package com.android.mediaframeworktest.stress;
import com.android.mediaframeworktest.MediaFrameworkTest;
-import com.android.mediaframeworktest.CameraTestHelper;
+import com.android.mediaframeworktest.helpers.CameraTestHelper;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
-import java.io.IOException;
import java.io.Writer;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.List;
-import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.os.Environment;
import android.os.Handler;
@@ -37,9 +35,6 @@
import android.test.suitebuilder.annotation.LargeTest;
import android.util.Log;
import android.view.SurfaceHolder;
-import com.android.mediaframeworktest.CameraStressTestRunner;
-
-import junit.framework.Assert;
/**
* Junit / Instrumentation test case for the following camera APIs:
diff --git a/packages/SystemUI/res/drawable/recents_button_bg.xml b/packages/SystemUI/res/drawable/recents_button_bg.xml
new file mode 100644
index 0000000..7456365
--- /dev/null
+++ b/packages/SystemUI/res/drawable/recents_button_bg.xml
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<ripple xmlns:android="http://schemas.android.com/apk/res/android"
+ android:color="#40ffffff">
+</ripple>
diff --git a/packages/SystemUI/res/drawable/recents_task_view_header_bg.xml b/packages/SystemUI/res/drawable/recents_task_view_header_bg.xml
new file mode 100644
index 0000000..745af33
--- /dev/null
+++ b/packages/SystemUI/res/drawable/recents_task_view_header_bg.xml
@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<ripple xmlns:android="http://schemas.android.com/apk/res/android"
+ android:color="?android:attr/colorControlHighlight">
+ <item android:drawable="@android:color/transparent" />
+</ripple>
\ No newline at end of file
diff --git a/packages/SystemUI/res/drawable/recents_task_view_header_bg_color.xml b/packages/SystemUI/res/drawable/recents_task_view_header_bg_color.xml
new file mode 100644
index 0000000..5f9341c
--- /dev/null
+++ b/packages/SystemUI/res/drawable/recents_task_view_header_bg_color.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<shape xmlns:android="http://schemas.android.com/apk/res/android"
+ android:shape="rectangle">
+ <corners android:topLeftRadius="@dimen/recents_task_view_rounded_corners_radius"
+ android:topRightRadius="@dimen/recents_task_view_rounded_corners_radius"/>
+ <solid android:color="#00000000" />
+</shape>
\ No newline at end of file
diff --git a/packages/SystemUI/res/layout/recents_task_view_header.xml b/packages/SystemUI/res/layout/recents_task_view_header.xml
index deb8e91..5c67f80 100644
--- a/packages/SystemUI/res/layout/recents_task_view_header.xml
+++ b/packages/SystemUI/res/layout/recents_task_view_header.xml
@@ -49,9 +49,8 @@
android:layout_marginEnd="@dimen/recents_task_view_header_button_width"
android:layout_gravity="center_vertical|end"
android:padding="15dp"
+ android:background="@drawable/recents_button_bg"
android:src="@drawable/star"
- android:background="?android:selectableItemBackground"
- android:alpha="0"
android:visibility="gone" />
<com.android.systemui.recents.views.FixedSizeImageView
android:id="@+id/dismiss_task"
@@ -59,10 +58,9 @@
android:layout_height="@dimen/recents_task_view_header_button_height"
android:layout_gravity="center_vertical|end"
android:padding="15dp"
- android:src="@drawable/recents_dismiss_light"
- android:background="?android:selectableItemBackground"
- android:alpha="0"
- android:visibility="gone" />
+ android:background="@drawable/recents_button_bg"
+ android:visibility="invisible"
+ android:src="@drawable/recents_dismiss_light" />
<!-- The progress indicator shows if auto-paging is enabled -->
<ViewStub android:id="@+id/focus_timer_indicator_stub"
diff --git a/packages/SystemUI/res/layout/recents_task_view_header_overlay.xml b/packages/SystemUI/res/layout/recents_task_view_header_overlay.xml
index 10659a3..dabfc80 100644
--- a/packages/SystemUI/res/layout/recents_task_view_header_overlay.xml
+++ b/packages/SystemUI/res/layout/recents_task_view_header_overlay.xml
@@ -45,6 +45,6 @@
android:layout_height="@dimen/recents_task_bar_height"
android:layout_gravity="center_vertical|end"
android:padding="15dp"
- android:background="?android:selectableItemBackground"
+ android:background="@drawable/recents_button_bg"
android:src="@drawable/recents_info_light" />
</FrameLayout>
\ No newline at end of file
diff --git a/packages/SystemUI/src/com/android/systemui/recents/RecentsImpl.java b/packages/SystemUI/src/com/android/systemui/recents/RecentsImpl.java
index 5a60a19..35e97e5 100644
--- a/packages/SystemUI/src/com/android/systemui/recents/RecentsImpl.java
+++ b/packages/SystemUI/src/com/android/systemui/recents/RecentsImpl.java
@@ -806,7 +806,7 @@
} else {
Canvas c = new Canvas(thumbnail);
c.scale(toTransform.scale, toTransform.scale);
- mHeaderBar.rebindToTask(toTask, false /* touchExplorationEnabled */);
+ mHeaderBar.rebindToTask(toTask);
mHeaderBar.draw(c);
c.setBitmap(null);
}
diff --git a/packages/SystemUI/src/com/android/systemui/recents/misc/SystemServicesProxy.java b/packages/SystemUI/src/com/android/systemui/recents/misc/SystemServicesProxy.java
index d22cd53..ecf8ecb 100644
--- a/packages/SystemUI/src/com/android/systemui/recents/misc/SystemServicesProxy.java
+++ b/packages/SystemUI/src/com/android/systemui/recents/misc/SystemServicesProxy.java
@@ -256,6 +256,8 @@
* Returns whether this device has freeform workspaces.
*/
public boolean hasFreeformWorkspaceSupport() {
+ if (mPm == null) return false;
+
return mHasFreeformWorkspaceSupport;
}
diff --git a/packages/SystemUI/src/com/android/systemui/recents/views/FixedSizeImageView.java b/packages/SystemUI/src/com/android/systemui/recents/views/FixedSizeImageView.java
index d3b5e47..f5ab01f 100644
--- a/packages/SystemUI/src/com/android/systemui/recents/views/FixedSizeImageView.java
+++ b/packages/SystemUI/src/com/android/systemui/recents/views/FixedSizeImageView.java
@@ -20,14 +20,13 @@
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
-
-import com.android.systemui.statusbar.AlphaOptimizedImageView;
+import android.widget.ImageView;
/**
* This is an optimized ImageView that does not trigger a <code>requestLayout()</code> or
* <code>invalidate()</code> when setting the image to <code>null</code>.
*/
-public class FixedSizeImageView extends AlphaOptimizedImageView {
+public class FixedSizeImageView extends ImageView {
private boolean mAllowRelayout = true;
private boolean mAllowInvalidate = true;
@@ -74,4 +73,9 @@
mAllowRelayout = true;
mAllowInvalidate = true;
}
+
+ @Override
+ public boolean hasOverlappingRendering() {
+ return false;
+ }
}
diff --git a/packages/SystemUI/src/com/android/systemui/recents/views/TaskStackView.java b/packages/SystemUI/src/com/android/systemui/recents/views/TaskStackView.java
index 232b416..7079ff44 100644
--- a/packages/SystemUI/src/com/android/systemui/recents/views/TaskStackView.java
+++ b/packages/SystemUI/src/com/android/systemui/recents/views/TaskStackView.java
@@ -233,8 +233,8 @@
@Override
protected void onAttachedToWindow() {
EventBus.getDefault().register(this, RecentsActivity.EVENT_BUS_PRIORITY + 1);
- super.onAttachedToWindow();
readSystemFlags();
+ super.onAttachedToWindow();
}
@Override
@@ -1418,9 +1418,7 @@
Recents.getTaskLoader().loadTaskData(task, true /* fetchAndInvalidateThumbnails */);
// If the doze trigger has already fired, then update the state for this task view
- if (mUIDozeTrigger.hasTriggered()) {
- tv.setNoUserInteractionState();
- }
+ tv.setNoUserInteractionState();
// Set the new state for this view, including the callbacks and view clipping
tv.setCallbacks(this);
diff --git a/packages/SystemUI/src/com/android/systemui/recents/views/TaskView.java b/packages/SystemUI/src/com/android/systemui/recents/views/TaskView.java
index 853f868..2e8e665 100644
--- a/packages/SystemUI/src/com/android/systemui/recents/views/TaskView.java
+++ b/packages/SystemUI/src/com/android/systemui/recents/views/TaskView.java
@@ -29,7 +29,6 @@
import android.graphics.PorterDuff;
import android.graphics.PorterDuffColorFilter;
import android.graphics.Rect;
-import android.provider.Settings;
import android.util.AttributeSet;
import android.util.FloatProperty;
import android.util.IntProperty;
@@ -115,7 +114,6 @@
Task mTask;
boolean mTaskDataLoaded;
boolean mClipViewInStack = true;
- boolean mTouchExplorationEnabled;
AnimateableViewBounds mViewBounds;
private AnimatorSet mTransformAnimation;
@@ -164,7 +162,6 @@
void reset() {
resetViewProperties();
resetNoUserInteractionState();
- readSystemFlags();
setClipViewInStack(false);
setCallbacks(null);
}
@@ -180,12 +177,6 @@
}
@Override
- protected void onAttachedToWindow() {
- super.onAttachedToWindow();
- readSystemFlags();
- }
-
- @Override
protected void onFinishInflate() {
// Bind the views
mContent = findViewById(R.id.task_view_content);
@@ -554,7 +545,7 @@
public void onTaskDataLoaded(Task task) {
// Bind each of the views to the new task data
mThumbnailView.rebindToTask(mTask);
- mHeaderView.rebindToTask(mTask, mTouchExplorationEnabled);
+ mHeaderView.rebindToTask(mTask);
mTaskDataLoaded = true;
}
@@ -563,13 +554,13 @@
// Unbind each of the views from the task data and remove the task callback
mTask.removeCallback(this);
mThumbnailView.unbindFromTask();
- mHeaderView.unbindFromTask(mTouchExplorationEnabled);
+ mHeaderView.unbindFromTask();
mTaskDataLoaded = false;
}
@Override
public void onTaskStackIdChanged() {
- mHeaderView.rebindToTask(mTask, mTouchExplorationEnabled);
+ mHeaderView.rebindToTask(mTask);
}
/**** View.OnClickListener Implementation ****/
@@ -624,12 +615,4 @@
}
EventBus.getDefault().unregister(this);
}
-
- /**
- * Reads current system flags related to accessibility and screen pinning.
- */
- private void readSystemFlags() {
- SystemServicesProxy ssp = Recents.getSystemServices();
- mTouchExplorationEnabled = ssp.isTouchExplorationEnabled();
- }
}
diff --git a/packages/SystemUI/src/com/android/systemui/recents/views/TaskViewHeader.java b/packages/SystemUI/src/com/android/systemui/recents/views/TaskViewHeader.java
index 5e17b90..408ffb4 100644
--- a/packages/SystemUI/src/com/android/systemui/recents/views/TaskViewHeader.java
+++ b/packages/SystemUI/src/com/android/systemui/recents/views/TaskViewHeader.java
@@ -214,18 +214,14 @@
@Override
protected void onFinishInflate() {
- SystemServicesProxy ssp = Recents.getSystemServices();
-
// Initialize the icon and description views
mIconView = (ImageView) findViewById(R.id.icon);
mIconView.setClickable(false);
mIconView.setOnLongClickListener(this);
mTitleView = (TextView) findViewById(R.id.title);
mDismissButton = (ImageView) findViewById(R.id.dismiss_task);
- if (ssp.hasFreeformWorkspaceSupport()) {
- mMoveTaskButton = (ImageView) findViewById(R.id.move_task);
- mMoveTaskButton.setVisibility(View.VISIBLE);
- }
+ mDismissButton.setOnClickListener(this);
+ mMoveTaskButton = (ImageView) findViewById(R.id.move_task);
mFocusTimerIndicatorStub = (ViewStub) findViewById(R.id.focus_timer_indicator_stub);
mAppOverlayViewStub = (ViewStub) findViewById(R.id.app_overlay_stub);
}
@@ -343,6 +339,7 @@
void setDimAlpha(float dimAlpha) {
mDimAlpha = dimAlpha;
updateBackgroundColor(dimAlpha);
+ invalidate();
}
/**
@@ -356,12 +353,11 @@
mTmpHSL[2] = Math.min(1f, mTmpHSL[2] + OVERLAY_LIGHTNESS_INCREMENT * (1.0f - dimAlpha));
mOverlayBackground.setColorAndDim(ColorUtils.HSLToColor(mTmpHSL), dimAlpha);
mDimLayerPaint.setAlpha((int) (dimAlpha * 255));
- invalidate();
}
}
/** Binds the bar view to the task */
- public void rebindToTask(Task t, boolean touchExplorationEnabled) {
+ public void rebindToTask(Task t) {
SystemServicesProxy ssp = Recents.getSystemServices();
mTask = t;
@@ -395,6 +391,10 @@
? mLightFreeformIcon
: mDarkFreeformIcon);
}
+ if (mMoveTaskButton.getVisibility() != View.VISIBLE) {
+ mMoveTaskButton.setVisibility(View.VISIBLE);
+ }
+ mMoveTaskButton.setOnClickListener(this);
}
if (Recents.getDebugFlags().isFastToggleRecentsEnabled()) {
@@ -408,37 +408,32 @@
}
// In accessibility, a single click on the focused app info button will show it
- if (touchExplorationEnabled) {
+ if (ssp.isTouchExplorationEnabled()) {
mIconView.setOnClickListener(this);
}
}
/** Unbinds the bar view from the task */
- void unbindFromTask(boolean touchExplorationEnabled) {
+ void unbindFromTask() {
mTask = null;
mIconView.setImageDrawable(null);
- if (touchExplorationEnabled) {
- mIconView.setOnClickListener(null);
- }
+ mIconView.setOnClickListener(null);
+ mMoveTaskButton.setOnClickListener(null);
}
/** Animates this task bar if the user does not interact with the stack after a certain time. */
void startNoUserInteractionAnimation() {
- int duration = getResources().getInteger(R.integer.recents_task_enter_from_app_duration);
- mDismissButton.setOnClickListener(this);
- mDismissButton.setVisibility(View.VISIBLE);
- mDismissButton.animate()
- .alpha(1f)
- .setInterpolator(Interpolators.FAST_OUT_LINEAR_IN)
- .setDuration(duration)
- .start();
- mMoveTaskButton.setOnClickListener(this);
- mMoveTaskButton.setVisibility(View.VISIBLE);
- mMoveTaskButton.animate()
- .alpha(1f)
- .setInterpolator(Interpolators.FAST_OUT_LINEAR_IN)
- .setDuration(duration)
- .start();
+ if (mDismissButton.getVisibility() != View.VISIBLE) {
+ mDismissButton.setVisibility(View.VISIBLE);
+ mDismissButton.setAlpha(0f);
+ mDismissButton.animate()
+ .alpha(1f)
+ .setStartDelay(0)
+ .setInterpolator(Interpolators.FAST_OUT_LINEAR_IN)
+ .setDuration(getResources().getInteger(
+ R.integer.recents_task_enter_from_app_duration))
+ .start();
+ }
}
/**
@@ -446,14 +441,11 @@
* time.
*/
void setNoUserInteractionState() {
- mDismissButton.setVisibility(View.VISIBLE);
- mDismissButton.animate().cancel();
- mDismissButton.setAlpha(1f);
- mDismissButton.setOnClickListener(this);
- mMoveTaskButton.setVisibility(View.VISIBLE);
- mMoveTaskButton.animate().cancel();
- mMoveTaskButton.setAlpha(1f);
- mMoveTaskButton.setOnClickListener(this);
+ if (mDismissButton.getVisibility() != View.VISIBLE) {
+ mDismissButton.animate().cancel();
+ mDismissButton.setVisibility(View.VISIBLE);
+ mDismissButton.setAlpha(1f);
+ }
}
/**
@@ -462,11 +454,6 @@
*/
void resetNoUserInteractionState() {
mDismissButton.setVisibility(View.INVISIBLE);
- mDismissButton.setAlpha(0f);
- mDismissButton.setOnClickListener(null);
- mMoveTaskButton.setVisibility(View.INVISIBLE);
- mMoveTaskButton.setAlpha(0f);
- mMoveTaskButton.setOnClickListener(null);
}
@Override
@@ -480,8 +467,11 @@
@Override
public void onClick(View v) {
if (v == mIconView) {
- // In accessibility, a single click on the focused app info button will show it
- EventBus.getDefault().send(new ShowApplicationInfoEvent(mTask));
+ SystemServicesProxy ssp = Recents.getSystemServices();
+ if (ssp.isTouchExplorationEnabled()) {
+ // In accessibility, a single click on the focused app info button will show it
+ EventBus.getDefault().send(new ShowApplicationInfoEvent(mTask));
+ }
} else if (v == mDismissButton) {
TaskView tv = Utilities.findParent(this, TaskView.class);
tv.dismissTask();
diff --git a/packages/SystemUI/src/com/android/systemui/recents/views/TaskViewThumbnail.java b/packages/SystemUI/src/com/android/systemui/recents/views/TaskViewThumbnail.java
index f90951e..ea40644 100644
--- a/packages/SystemUI/src/com/android/systemui/recents/views/TaskViewThumbnail.java
+++ b/packages/SystemUI/src/com/android/systemui/recents/views/TaskViewThumbnail.java
@@ -93,6 +93,7 @@
mTaskViewRect.set(0, 0, width, height);
updateThumbnailScale();
+ invalidate();
}
@Override
@@ -119,6 +120,7 @@
mDrawPaint.setShader(null);
mThumbnailRect.setEmpty();
}
+ invalidate();
}
/** Updates the paint to draw the thumbnail. */
@@ -136,9 +138,7 @@
mDrawPaint.setColorFilter(null);
mDrawPaint.setColor(Color.argb(255, grey, grey, grey));
}
- if (!mInvisible) {
- invalidate();
- }
+ invalidate();
}
/**
diff --git a/packages/SystemUI/src/com/android/systemui/statusbar/CommandQueue.java b/packages/SystemUI/src/com/android/systemui/statusbar/CommandQueue.java
index 60fc0fa..de6e7fe 100644
--- a/packages/SystemUI/src/com/android/systemui/statusbar/CommandQueue.java
+++ b/packages/SystemUI/src/com/android/systemui/statusbar/CommandQueue.java
@@ -40,31 +40,32 @@
private static final int OP_SET_ICON = 1;
private static final int OP_REMOVE_ICON = 2;
- private static final int MSG_ICON = 1 << MSG_SHIFT;
- private static final int MSG_DISABLE = 2 << MSG_SHIFT;
- private static final int MSG_EXPAND_NOTIFICATIONS = 3 << MSG_SHIFT;
- private static final int MSG_COLLAPSE_PANELS = 4 << MSG_SHIFT;
- private static final int MSG_EXPAND_SETTINGS = 5 << MSG_SHIFT;
- private static final int MSG_SET_SYSTEMUI_VISIBILITY = 6 << MSG_SHIFT;
- private static final int MSG_TOP_APP_WINDOW_CHANGED = 7 << MSG_SHIFT;
- private static final int MSG_SHOW_IME_BUTTON = 8 << MSG_SHIFT;
- private static final int MSG_TOGGLE_RECENT_APPS = 9 << MSG_SHIFT;
- private static final int MSG_PRELOAD_RECENT_APPS = 10 << MSG_SHIFT;
- private static final int MSG_CANCEL_PRELOAD_RECENT_APPS = 11 << MSG_SHIFT;
- private static final int MSG_SET_WINDOW_STATE = 12 << MSG_SHIFT;
- private static final int MSG_SHOW_RECENT_APPS = 13 << MSG_SHIFT;
- private static final int MSG_HIDE_RECENT_APPS = 14 << MSG_SHIFT;
- private static final int MSG_BUZZ_BEEP_BLINKED = 15 << MSG_SHIFT;
- private static final int MSG_NOTIFICATION_LIGHT_OFF = 16 << MSG_SHIFT;
- private static final int MSG_NOTIFICATION_LIGHT_PULSE = 17 << MSG_SHIFT;
- private static final int MSG_SHOW_SCREEN_PIN_REQUEST = 18 << MSG_SHIFT;
- private static final int MSG_APP_TRANSITION_PENDING = 19 << MSG_SHIFT;
- private static final int MSG_APP_TRANSITION_CANCELLED = 20 << MSG_SHIFT;
- private static final int MSG_APP_TRANSITION_STARTING = 21 << MSG_SHIFT;
- private static final int MSG_ASSIST_DISCLOSURE = 22 << MSG_SHIFT;
- private static final int MSG_START_ASSIST = 23 << MSG_SHIFT;
- private static final int MSG_CAMERA_LAUNCH_GESTURE = 24 << MSG_SHIFT;
- private static final int MSG_TOGGLE_KEYBOARD_SHORTCUTS = 25 << MSG_SHIFT;
+ private static final int MSG_ICON = 1 << MSG_SHIFT;
+ private static final int MSG_DISABLE = 2 << MSG_SHIFT;
+ private static final int MSG_EXPAND_NOTIFICATIONS = 3 << MSG_SHIFT;
+ private static final int MSG_COLLAPSE_PANELS = 4 << MSG_SHIFT;
+ private static final int MSG_EXPAND_SETTINGS = 5 << MSG_SHIFT;
+ private static final int MSG_SET_SYSTEMUI_VISIBILITY = 6 << MSG_SHIFT;
+ private static final int MSG_TOP_APP_WINDOW_CHANGED = 7 << MSG_SHIFT;
+ private static final int MSG_SHOW_IME_BUTTON = 8 << MSG_SHIFT;
+ private static final int MSG_TOGGLE_RECENT_APPS = 9 << MSG_SHIFT;
+ private static final int MSG_PRELOAD_RECENT_APPS = 10 << MSG_SHIFT;
+ private static final int MSG_CANCEL_PRELOAD_RECENT_APPS = 11 << MSG_SHIFT;
+ private static final int MSG_SET_WINDOW_STATE = 12 << MSG_SHIFT;
+ private static final int MSG_SHOW_RECENT_APPS = 13 << MSG_SHIFT;
+ private static final int MSG_HIDE_RECENT_APPS = 14 << MSG_SHIFT;
+ private static final int MSG_BUZZ_BEEP_BLINKED = 15 << MSG_SHIFT;
+ private static final int MSG_NOTIFICATION_LIGHT_OFF = 16 << MSG_SHIFT;
+ private static final int MSG_NOTIFICATION_LIGHT_PULSE = 17 << MSG_SHIFT;
+ private static final int MSG_SHOW_SCREEN_PIN_REQUEST = 18 << MSG_SHIFT;
+ private static final int MSG_APP_TRANSITION_PENDING = 19 << MSG_SHIFT;
+ private static final int MSG_APP_TRANSITION_CANCELLED = 20 << MSG_SHIFT;
+ private static final int MSG_APP_TRANSITION_STARTING = 21 << MSG_SHIFT;
+ private static final int MSG_ASSIST_DISCLOSURE = 22 << MSG_SHIFT;
+ private static final int MSG_START_ASSIST = 23 << MSG_SHIFT;
+ private static final int MSG_CAMERA_LAUNCH_GESTURE = 24 << MSG_SHIFT;
+ private static final int MSG_TOGGLE_KEYBOARD_SHORTCUTS = 25 << MSG_SHIFT;
+ private static final int MSG_REQUEST_TV_PICTURE_IN_PICTURE = 26 << MSG_SHIFT;
public static final int FLAG_EXCLUDE_NONE = 0;
public static final int FLAG_EXCLUDE_SEARCH_PANEL = 1 << 0;
@@ -110,6 +111,7 @@
public void showAssistDisclosure();
public void startAssist(Bundle args);
public void onCameraLaunchGestureDetected(int source);
+ public void requestTvPictureInPicture();
}
public CommandQueue(Callbacks callbacks) {
@@ -231,6 +233,14 @@
}
}
+ @Override
+ public void requestTvPictureInPicture() {
+ synchronized (mLock) {
+ mHandler.removeMessages(MSG_REQUEST_TV_PICTURE_IN_PICTURE);
+ mHandler.obtainMessage(MSG_REQUEST_TV_PICTURE_IN_PICTURE).sendToTarget();
+ }
+ }
+
public void setWindowState(int window, int state) {
synchronized (mLock) {
// don't coalesce these
@@ -399,6 +409,9 @@
case MSG_CAMERA_LAUNCH_GESTURE:
mCallbacks.onCameraLaunchGestureDetected(msg.arg1);
break;
+ case MSG_REQUEST_TV_PICTURE_IN_PICTURE:
+ mCallbacks.requestTvPictureInPicture();
+ break;
}
}
}
diff --git a/packages/SystemUI/src/com/android/systemui/statusbar/phone/PhoneStatusBar.java b/packages/SystemUI/src/com/android/systemui/statusbar/phone/PhoneStatusBar.java
index 2b961fd..7e7a948 100644
--- a/packages/SystemUI/src/com/android/systemui/statusbar/phone/PhoneStatusBar.java
+++ b/packages/SystemUI/src/com/android/systemui/statusbar/phone/PhoneStatusBar.java
@@ -4264,6 +4264,11 @@
}
}
+ @Override
+ public void requestTvPictureInPicture() {
+ // no-op.
+ }
+
public void notifyFpAuthModeChanged() {
updateDozing();
}
diff --git a/packages/SystemUI/src/com/android/systemui/statusbar/tv/TvStatusBar.java b/packages/SystemUI/src/com/android/systemui/statusbar/tv/TvStatusBar.java
index 0406ae3..f322348 100644
--- a/packages/SystemUI/src/com/android/systemui/statusbar/tv/TvStatusBar.java
+++ b/packages/SystemUI/src/com/android/systemui/statusbar/tv/TvStatusBar.java
@@ -25,6 +25,7 @@
import com.android.systemui.statusbar.ActivatableNotificationView;
import com.android.systemui.statusbar.BaseStatusBar;
import com.android.systemui.statusbar.NotificationData;
+import com.android.systemui.tv.pip.PipManager;
/*
* Status bar implementation for "large screen" products that mostly present no on-screen nav
@@ -170,6 +171,11 @@
}
@Override
+ public void requestTvPictureInPicture() {
+ PipManager.getInstance().requestTvPictureInPicture();
+ }
+
+ @Override
protected void updateHeadsUp(String key, NotificationData.Entry entry, boolean shouldPeek,
boolean alertAgain) {
}
diff --git a/packages/SystemUI/src/com/android/systemui/tv/pip/PipManager.java b/packages/SystemUI/src/com/android/systemui/tv/pip/PipManager.java
index f4a8873..6febe5f 100644
--- a/packages/SystemUI/src/com/android/systemui/tv/pip/PipManager.java
+++ b/packages/SystemUI/src/com/android/systemui/tv/pip/PipManager.java
@@ -104,14 +104,7 @@
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
- if (Intent.ACTION_PICTURE_IN_PICTURE_BUTTON.equals(action)) {
- if (DEBUG) Log.d(TAG, "PIP button pressed");
- if (!hasPipTasks()) {
- startPip();
- } else if (mState == STATE_PIP_OVERLAY) {
- showPipMenu();
- }
- } else if (Intent.ACTION_MEDIA_RESOURCE_GRANTED.equals(action)) {
+ if (Intent.ACTION_MEDIA_RESOURCE_GRANTED.equals(action)) {
String[] packageNames = intent.getStringArrayExtra(Intent.EXTRA_PACKAGES);
int resourceType = intent.getIntExtra(Intent.EXTRA_MEDIA_RESOURCE_TYPE,
INVALID_RESOURCE_TYPE);
@@ -150,18 +143,29 @@
Log.e(TAG, "registerTaskStackListener failed", e);
}
IntentFilter intentFilter = new IntentFilter();
- intentFilter.addAction(Intent.ACTION_PICTURE_IN_PICTURE_BUTTON);
intentFilter.addAction(Intent.ACTION_MEDIA_RESOURCE_GRANTED);
mContext.registerReceiver(mBroadcastReceiver, intentFilter);
}
+ /**
+ * Request PIP.
+ * It could either start PIP if there's none, and show PIP menu otherwise.
+ */
+ public void requestTvPictureInPicture() {
+ if (DEBUG) Log.d(TAG, "requestTvPictureInPicture()");
+ if (!hasPipTasks()) {
+ startPip();
+ } else if (mState == STATE_PIP_OVERLAY) {
+ showPipMenu();
+ }
+ }
+
private void startPip() {
try {
mActivityManager.moveTopActivityToPinnedStack(FULLSCREEN_WORKSPACE_STACK_ID, mPipBound);
} catch (RemoteException|IllegalArgumentException e) {
Log.e(TAG, "moveTopActivityToPinnedStack failed", e);
}
-
}
/**
diff --git a/services/core/java/com/android/server/policy/PhoneWindowManager.java b/services/core/java/com/android/server/policy/PhoneWindowManager.java
index 0b1354a..43b82e9 100644
--- a/services/core/java/com/android/server/policy/PhoneWindowManager.java
+++ b/services/core/java/com/android/server/policy/PhoneWindowManager.java
@@ -658,6 +658,7 @@
private static final int MSG_POWER_LONG_PRESS = 14;
private static final int MSG_UPDATE_DREAMING_SLEEP_TOKEN = 15;
private static final int MSG_REQUEST_TRANSIENT_BARS = 16;
+ private static final int MSG_REQUEST_TV_PICTURE_IN_PICTURE = 17;
private static final int MSG_REQUEST_TRANSIENT_BARS_ARG_STATUS = 0;
private static final int MSG_REQUEST_TRANSIENT_BARS_ARG_NAVIGATION = 1;
@@ -719,6 +720,9 @@
requestTransientBars(targetBar);
}
break;
+ case MSG_REQUEST_TV_PICTURE_IN_PICTURE:
+ requestTvPictureInPictureInternal();
+ break;
}
}
}
@@ -1337,7 +1341,7 @@
launchAssistAction(null, deviceId);
break;
case LONG_PRESS_HOME_PICTURE_IN_PICTURE:
- handlePipKey(event);
+ requestTvPictureInPicture(event);
break;
default:
Log.w(TAG, "Not defined home long press behavior: " + mLongPressOnHomeBehavior);
@@ -1352,11 +1356,25 @@
}
}
- private void handlePipKey(KeyEvent event) {
- if (DEBUG_INPUT) Log.d(TAG, "handlePipKey event=" + event);
- Intent intent = new Intent(Intent.ACTION_PICTURE_IN_PICTURE_BUTTON);
- intent.putExtra(Intent.EXTRA_KEY_EVENT, event);
- mContext.sendBroadcastAsUser(intent, UserHandle.ALL);
+ private void requestTvPictureInPicture(KeyEvent event) {
+ if (DEBUG_INPUT) Log.d(TAG, "requestTvPictureInPicture event=" + event);
+ mHandler.removeMessages(MSG_REQUEST_TV_PICTURE_IN_PICTURE);
+ Message msg = mHandler.obtainMessage(MSG_REQUEST_TV_PICTURE_IN_PICTURE);
+ msg.setAsynchronous(true);
+ msg.sendToTarget();
+ }
+
+ private void requestTvPictureInPictureInternal() {
+ try {
+ IStatusBarService statusbar = getStatusBarService();
+ if (statusbar != null) {
+ statusbar.requestTvPictureInPicture();
+ }
+ } catch (RemoteException|IllegalArgumentException e) {
+ Slog.e(TAG, "Cannot handle picture-in-picture key", e);
+ // re-acquire status bar service next time it is needed.
+ mStatusBarService = null;
+ }
}
private final Runnable mHomeDoubleTapTimeoutRunnable = new Runnable() {
@@ -1648,14 +1666,16 @@
* eg. Disable long press on home goes to recents on sw600dp.
*/
private void readConfigurationDependentBehaviors() {
- mLongPressOnHomeBehavior = mContext.getResources().getInteger(
+ final Resources res = mContext.getResources();
+
+ mLongPressOnHomeBehavior = res.getInteger(
com.android.internal.R.integer.config_longPressOnHomeBehavior);
if (mLongPressOnHomeBehavior < LONG_PRESS_HOME_NOTHING ||
mLongPressOnHomeBehavior > LAST_LONG_PRESS_HOME_BEHAVIOR) {
mLongPressOnHomeBehavior = LONG_PRESS_HOME_NOTHING;
}
- mDoubleTapOnHomeBehavior = mContext.getResources().getInteger(
+ mDoubleTapOnHomeBehavior = res.getInteger(
com.android.internal.R.integer.config_doubleTapOnHomeBehavior);
if (mDoubleTapOnHomeBehavior < DOUBLE_TAP_HOME_NOTHING ||
mDoubleTapOnHomeBehavior > DOUBLE_TAP_HOME_RECENT_SYSTEM_UI) {
@@ -5337,7 +5357,7 @@
case KeyEvent.KEYCODE_WINDOW: {
if (mShortPressWindowBehavior == SHORT_PRESS_WINDOW_PICTURE_IN_PICTURE) {
if (!down) {
- handlePipKey(event);
+ requestTvPictureInPicture(event);
}
result &= ~ACTION_PASS_TO_USER;
}
diff --git a/services/core/java/com/android/server/statusbar/StatusBarManagerService.java b/services/core/java/com/android/server/statusbar/StatusBarManagerService.java
index 2a1f46e..9e2ba95 100644
--- a/services/core/java/com/android/server/statusbar/StatusBarManagerService.java
+++ b/services/core/java/com/android/server/statusbar/StatusBarManagerService.java
@@ -491,6 +491,15 @@
}
@Override
+ public void requestTvPictureInPicture() {
+ if (mBar != null) {
+ try {
+ mBar.requestTvPictureInPicture();
+ } catch (RemoteException ex) {}
+ }
+ }
+
+ @Override
public void setCurrentUser(int newUserId) {
if (SPEW) Slog.d(TAG, "Setting current user to user " + newUserId);
mCurrentUserId = newUserId;