Merge "CTS: add tests for more complicated transformation"
diff --git a/tests/tests/accessibilityservice/AndroidManifest.xml b/tests/tests/accessibilityservice/AndroidManifest.xml
index 7c22b96..4039193 100644
--- a/tests/tests/accessibilityservice/AndroidManifest.xml
+++ b/tests/tests/accessibilityservice/AndroidManifest.xml
@@ -22,8 +22,6 @@
<uses-sdk android:minSdkVersion="18"
android:targetSdkVersion="18" />
- <uses-permission android:name="android.permission.DISABLE_KEYGUARD" />
-
<application android:theme="@android:style/Theme.Holo.NoActionBar" >
<uses-library android:name="android.test.runner"/>
diff --git a/tests/tests/accessibilityservice/res/layout/end_to_end_test.xml b/tests/tests/accessibilityservice/res/layout/end_to_end_test.xml
index 351ea24..79f87dc 100644
--- a/tests/tests/accessibilityservice/res/layout/end_to_end_test.xml
+++ b/tests/tests/accessibilityservice/res/layout/end_to_end_test.xml
@@ -18,7 +18,8 @@
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
- android:gravity="center" android:orientation="vertical">
+ android:gravity="center"
+ android:orientation="vertical">
<ListView android:id="@+id/listview"
android:layout_width="fill_parent"
diff --git a/tests/tests/accessibilityservice/res/layout/query_window_test.xml b/tests/tests/accessibilityservice/res/layout/query_window_test.xml
index 88d4cc9..001b024 100644
--- a/tests/tests/accessibilityservice/res/layout/query_window_test.xml
+++ b/tests/tests/accessibilityservice/res/layout/query_window_test.xml
@@ -17,6 +17,7 @@
*/
-->
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/added_content"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
diff --git a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityActivityTestCase.java b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityActivityTestCase.java
index c7e4f4c..c121071 100644
--- a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityActivityTestCase.java
+++ b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityActivityTestCase.java
@@ -22,12 +22,13 @@
import android.test.ActivityInstrumentationTestCase2;
import android.view.accessibility.AccessibilityEvent;
+import java.util.concurrent.TimeoutException;
+
/**
* Base text case for testing accessibility APIs by instrumenting an Activity.
*/
public abstract class AccessibilityActivityTestCase<T extends Activity>
extends ActivityInstrumentationTestCase2<T> {
-
/**
* Timeout required for pending Binder calls or event processing to
* complete.
@@ -37,7 +38,7 @@
/**
* The timeout since the last accessibility event to consider the device idle.
*/
- public static final long TIMEOUT_ACCESSIBILITY_STATE_IDLE = 200;
+ public static final long TIMEOUT_ACCESSIBILITY_STATE_IDLE = 500;
/**
* @param activityClass
@@ -55,11 +56,20 @@
info.flags &= ~AccessibilityServiceInfo.FLAG_INCLUDE_NOT_IMPORTANT_VIEWS;
getInstrumentation().getUiAutomation().setServiceInfo(info);
+ startActivityAndWaitForFirstEvent();
+
+ waitForIdle();
+ }
+
+ /**
+ * Waits for the UI do be idle.
+ *
+ * @throws TimeoutException if idle cannot be detected.
+ */
+ public void waitForIdle() throws TimeoutException {
getInstrumentation().getUiAutomation().waitForIdle(
TIMEOUT_ACCESSIBILITY_STATE_IDLE,
TIMEOUT_ASYNC_PROCESSING);
-
- startActivityAndWaitForFirstEvent();
}
/**
diff --git a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityEndToEndActivity.java b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityEndToEndActivity.java
index 12bcd96..157a3dc 100644
--- a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityEndToEndActivity.java
+++ b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityEndToEndActivity.java
@@ -18,7 +18,6 @@
import com.android.cts.accessibilityservice.R;
-import android.app.Activity;
import android.os.Bundle;
import android.view.View;
import android.view.ViewGroup;
@@ -28,11 +27,11 @@
import android.widget.TextView;
/**
- * This class is an {@link Activity} used to perform end-to-end
+ * This class is an {@link android.app.Activity} used to perform end-to-end
* testing of the accessibility feature by interaction with the
* UI widgets.
*/
-public class AccessibilityEndToEndActivity extends Activity {
+public class AccessibilityEndToEndActivity extends AccessibilityTestActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
diff --git a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityEndToEndTest.java b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityEndToEndTest.java
index 81db5be..39b116a 100644
--- a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityEndToEndTest.java
+++ b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityEndToEndTest.java
@@ -366,7 +366,6 @@
&& first.getCurrentItemIndex() == second.getCurrentItemIndex()
&& first.isEnabled() == second.isEnabled()
&& first.getFromIndex() == second.getFromIndex()
- && first.isFullScreen() == second.isFullScreen()
&& first.getItemCount() == second.getItemCount()
&& first.isPassword() == second.isPassword()
&& first.getRemovedCount() == second.getRemovedCount()
diff --git a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityFocusAndInputFocusSyncActivity.java b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityFocusAndInputFocusSyncActivity.java
index cdd2e42..46f04aa 100644
--- a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityFocusAndInputFocusSyncActivity.java
+++ b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityFocusAndInputFocusSyncActivity.java
@@ -14,7 +14,6 @@
package android.accessibilityservice.cts;
-import android.app.Activity;
import android.os.Bundle;
import com.android.cts.accessibilityservice.R;
@@ -26,7 +25,7 @@
* this activity is for verifying the the sync between accessibility
* and input focus.
*/
-public class AccessibilityFocusAndInputFocusSyncActivity extends Activity {
+public class AccessibilityFocusAndInputFocusSyncActivity extends AccessibilityTestActivity {
@Override
public void onCreate(Bundle savedInstanceState) {
diff --git a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityTestActivity.java b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityTestActivity.java
new file mode 100644
index 0000000..4d9b97d
--- /dev/null
+++ b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityTestActivity.java
@@ -0,0 +1,30 @@
+/**
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
+ * in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied. See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.accessibilityservice.cts;
+
+import android.app.Activity;
+import android.os.Bundle;
+import android.view.WindowManager;
+
+public abstract class AccessibilityTestActivity extends Activity {
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON
+ | WindowManager.LayoutParams.FLAG_TURN_SCREEN_ON);
+ }
+}
diff --git a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityTextTraversalActivity.java b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityTextTraversalActivity.java
index 7c9b45a..4a4a4ba 100644
--- a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityTextTraversalActivity.java
+++ b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityTextTraversalActivity.java
@@ -14,7 +14,6 @@
package android.accessibilityservice.cts;
-import android.app.Activity;
import android.os.Bundle;
import com.android.cts.accessibilityservice.R;
@@ -23,7 +22,7 @@
* Activity for testing the accessibility APIs for traversing the
* text content of a View at several granularities.
*/
-public class AccessibilityTextTraversalActivity extends Activity {
+public class AccessibilityTextTraversalActivity extends AccessibilityTestActivity {
@Override
public void onCreate(Bundle savedInstanceState) {
diff --git a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityTextTraversalTest.java b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityTextTraversalTest.java
index a53fdea..bdc0cd5 100644
--- a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityTextTraversalTest.java
+++ b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityTextTraversalTest.java
@@ -15,6 +15,7 @@
package android.accessibilityservice.cts;
import android.app.UiAutomation;
+import android.content.pm.PackageManager;
import android.os.Bundle;
import android.test.suitebuilder.annotation.MediumTest;
import android.text.Selection;
@@ -4486,6 +4487,11 @@
@MediumTest
public void testTextEditingActions() throws Exception {
+ if (!getActivity().getPackageManager().hasSystemFeature(
+ PackageManager.FEATURE_INPUT_METHODS)) {
+ return;
+ }
+
final EditText editText = (EditText) getActivity().findViewById(R.id.edit);
final String textContent = getString(R.string.foo_bar_baz);
@@ -4513,7 +4519,7 @@
AccessibilityNodeInfo.ACTION_SET_SELECTION, arguments));
// Copy the selected text.
- text.performAction( AccessibilityNodeInfo.ACTION_COPY);
+ text.performAction(AccessibilityNodeInfo.ACTION_COPY);
// Set selection at the end.
final int textLength = editText.getText().length();
diff --git a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityViewTreeReportingActivity.java b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityViewTreeReportingActivity.java
index 5aaa067..6aa4f44 100644
--- a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityViewTreeReportingActivity.java
+++ b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityViewTreeReportingActivity.java
@@ -14,7 +14,6 @@
package android.accessibilityservice.cts;
-import android.app.Activity;
import android.os.Bundle;
import com.android.cts.accessibilityservice.R;
@@ -26,7 +25,7 @@
* this activity is for verifying the hierarchical movement of the
* accessibility focus.
*/
-public class AccessibilityViewTreeReportingActivity extends Activity {
+public class AccessibilityViewTreeReportingActivity extends AccessibilityTestActivity {
@Override
public void onCreate(Bundle savedInstanceState) {
diff --git a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityWindowQueryActivity.java b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityWindowQueryActivity.java
index 698989c..aa66a45 100644
--- a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityWindowQueryActivity.java
+++ b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityWindowQueryActivity.java
@@ -14,7 +14,6 @@
package android.accessibilityservice.cts;
-import android.app.Activity;
import android.os.Bundle;
import android.view.View;
@@ -28,7 +27,7 @@
* requesting an action to be performed on a given view from an
* AccessibilityService.
*/
-public class AccessibilityWindowQueryActivity extends Activity {
+public class AccessibilityWindowQueryActivity extends AccessibilityTestActivity {
@Override
public void onCreate(Bundle savedInstanceState) {
diff --git a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityWindowQueryTest.java b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityWindowQueryTest.java
index b8d543d..ded27ff 100644
--- a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityWindowQueryTest.java
+++ b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityWindowQueryTest.java
@@ -27,7 +27,6 @@
import android.accessibilityservice.AccessibilityServiceInfo;
import android.app.UiAutomation;
import android.graphics.Rect;
-import android.os.SystemClock;
import android.test.suitebuilder.annotation.MediumTest;
import android.view.accessibility.AccessibilityEvent;
import android.view.accessibility.AccessibilityNodeInfo;
@@ -624,7 +623,7 @@
AccessibilityService.GLOBAL_ACTION_BACK));
// Sleep a bit so the UI is settles.
- SystemClock.sleep(3000);
+ waitForIdle();
}
@MediumTest
@@ -633,7 +632,7 @@
AccessibilityService.GLOBAL_ACTION_HOME));
// Sleep a bit so the UI is settles.
- SystemClock.sleep(3000);
+ waitForIdle();
}
@MediumTest
@@ -643,14 +642,14 @@
AccessibilityService.GLOBAL_ACTION_RECENTS));
// Sleep a bit so the UI is settles.
- SystemClock.sleep(3000);
+ waitForIdle();
// Clean up.
getInstrumentation().getUiAutomation().performGlobalAction(
AccessibilityService.GLOBAL_ACTION_BACK);
// Sleep a bit so the UI is settles.
- SystemClock.sleep(3000);
+ waitForIdle();
}
@MediumTest
@@ -660,14 +659,14 @@
AccessibilityService.GLOBAL_ACTION_NOTIFICATIONS));
// Sleep a bit so the UI is settles.
- SystemClock.sleep(3000);
+ waitForIdle();
// Clean up.
assertTrue(getInstrumentation().getUiAutomation().performGlobalAction(
AccessibilityService.GLOBAL_ACTION_BACK));
// Sleep a bit so the UI is settles.
- SystemClock.sleep(3000);
+ waitForIdle();
}
@MediumTest
@@ -677,14 +676,14 @@
AccessibilityService.GLOBAL_ACTION_QUICK_SETTINGS));
// Sleep a bit so the UI is settles.
- SystemClock.sleep(3000);
+ waitForIdle();
// Clean up.
getInstrumentation().getUiAutomation().performGlobalAction(
AccessibilityService.GLOBAL_ACTION_BACK);
// Sleep a bit so the UI is settles.
- SystemClock.sleep(3000);
+ waitForIdle();
}
@MediumTest
@@ -741,9 +740,6 @@
// make list of expected nodes
List<String> classNameAndTextList = new ArrayList<String>();
- classNameAndTextList.add("android.widget.FrameLayout");
- classNameAndTextList.add("android.widget.LinearLayout");
- classNameAndTextList.add("android.widget.FrameLayout");
classNameAndTextList.add("android.widget.LinearLayout");
classNameAndTextList.add("android.widget.LinearLayout");
classNameAndTextList.add("android.widget.LinearLayout");
@@ -758,6 +754,9 @@
classNameAndTextList.add("android.widget.ButtonB8");
classNameAndTextList.add("android.widget.ButtonB9");
+ String contentViewIdResName = "com.android.cts.accessibilityservice:id/added_content";
+ boolean verifyContent = false;
+
Queue<AccessibilityNodeInfo> fringe = new LinkedList<AccessibilityNodeInfo>();
fringe.add(root);
@@ -765,13 +764,20 @@
while (!fringe.isEmpty()) {
AccessibilityNodeInfo current = fringe.poll();
- CharSequence text = current.getText();
- String receivedClassNameAndText = current.getClassName().toString()
- + ((text != null) ? text.toString() : "");
- String expectedClassNameAndText = classNameAndTextList.remove(0);
+ if (!verifyContent
+ && contentViewIdResName.equals(current.getViewIdResourceName())) {
+ verifyContent = true;
+ }
- assertEquals("Did not get the expected node info",
- expectedClassNameAndText, receivedClassNameAndText);
+ if (verifyContent) {
+ CharSequence text = current.getText();
+ String receivedClassNameAndText = current.getClassName().toString()
+ + ((text != null) ? text.toString() : "");
+ String expectedClassNameAndText = classNameAndTextList.remove(0);
+
+ assertEquals("Did not get the expected node info",
+ expectedClassNameAndText, receivedClassNameAndText);
+ }
final int childCount = current.getChildCount();
for (int i = 0; i < childCount; i++) {
diff --git a/tests/tests/display/src/android/display/cts/VirtualDisplayTest.java b/tests/tests/display/src/android/display/cts/VirtualDisplayTest.java
index f452f50..f2f859a 100644
--- a/tests/tests/display/src/android/display/cts/VirtualDisplayTest.java
+++ b/tests/tests/display/src/android/display/cts/VirtualDisplayTest.java
@@ -29,6 +29,7 @@
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
+import android.os.HandlerThread;
import android.os.SystemClock;
import android.test.AndroidTestCase;
import android.util.DisplayMetrics;
@@ -72,6 +73,8 @@
private ImageReader mImageReader;
private Surface mSurface;
private ImageListener mImageListener;
+ private HandlerThread mCheckThread;
+ private Handler mCheckHandler;
@Override
protected void setUp() throws Exception {
@@ -80,11 +83,15 @@
mDisplayManager = (DisplayManager)mContext.getSystemService(Context.DISPLAY_SERVICE);
mHandler = new Handler(Looper.getMainLooper());
mImageListener = new ImageListener();
+ // thread for image checking
+ mCheckThread = new HandlerThread("TestHandler");
+ mCheckThread.start();
+ mCheckHandler = new Handler(mCheckThread.getLooper());
mImageReaderLock.lock();
try {
mImageReader = ImageReader.newInstance(WIDTH, HEIGHT, PixelFormat.RGBA_8888, 2);
- mImageReader.setOnImageAvailableListener(mImageListener, mHandler);
+ mImageReader.setOnImageAvailableListener(mImageListener, mCheckHandler);
mSurface = mImageReader.getSurface();
} finally {
mImageReaderLock.unlock();
@@ -94,7 +101,6 @@
@Override
protected void tearDown() throws Exception {
super.tearDown();
-
mImageReaderLock.lock();
try {
mImageReader.close();
@@ -103,6 +109,7 @@
} finally {
mImageReaderLock.unlock();
}
+ mCheckThread.quit();
}
/**
diff --git a/tests/tests/graphics/src/android/graphics/cts/PictureTest.java b/tests/tests/graphics/src/android/graphics/cts/PictureTest.java
index 81f053d..17e9b63 100644
--- a/tests/tests/graphics/src/android/graphics/cts/PictureTest.java
+++ b/tests/tests/graphics/src/android/graphics/cts/PictureTest.java
@@ -24,18 +24,88 @@
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
+import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Picture;
+import android.graphics.Rect;
import android.graphics.Paint.Style;
-
+import android.graphics.Region.Op;
public class PictureTest extends TestCase {
private static final int TEST_WIDTH = 4; // must be >= 2
private static final int TEST_HEIGHT = 3; // must >= 2
- public void testPicture() throws Exception {
+ private final Rect mClipRect = new Rect(0, 0, 2, 2);
+ // This method tests out some edge cases w.r.t. Picture creation.
+ // In particular, this test verifies that, in the following situations,
+ // the created picture (effectively) has balanced saves and restores:
+ // - copy constructed picture from actively recording picture
+ // - writeToStream/createFromStream created picture from actively recording picture
+ // - actively recording picture after draw call
+ public void testSaveRestoreBalance() throws Exception {
+ Picture original = new Picture();
+ Canvas canvas = original.beginRecording(TEST_WIDTH, TEST_HEIGHT);
+ assertNotNull(canvas);
+ createImbalance(canvas);
+
+ int expectedSaveCount = canvas.getSaveCount();
+
+ Picture copy = new Picture(original);
+ checkBalance(copy);
+
+ assertEquals(expectedSaveCount, canvas.getSaveCount());
+
+ ByteArrayOutputStream bout = new ByteArrayOutputStream();
+ original.writeToStream(bout);
+
+ assertEquals(expectedSaveCount, canvas.getSaveCount());
+
+ Picture serialized = Picture.createFromStream(new ByteArrayInputStream(bout.toByteArray()));
+ // The serialization/deserialization process will balance the saves and restores
+ checkBalance(serialized);
+
+ assertEquals(expectedSaveCount, canvas.getSaveCount());
+
+ Bitmap bitmap = Bitmap.createBitmap(TEST_WIDTH, TEST_HEIGHT, Bitmap.Config.ARGB_8888);
+ Canvas drawDest = new Canvas(bitmap);
+ original.draw(drawDest);
+ checkBalance(original);
+ }
+
+ // Add an extra save with a transform and clip
+ private void createImbalance(Canvas canvas) {
+ canvas.save();
+ canvas.clipRect(mClipRect, Op.REPLACE);
+ canvas.translate(1.0f, 1.0f);
+ Paint paint = new Paint();
+ paint.setColor(Color.GREEN);
+ canvas.drawRect(0, 0, 10, 10, paint);
+ }
+
+ private void checkBalance(Picture picture) {
+ Bitmap bitmap = Bitmap.createBitmap(TEST_WIDTH, TEST_HEIGHT, Bitmap.Config.ARGB_8888);
+ Canvas canvas = new Canvas(bitmap);
+
+ int beforeSaveCount = canvas.getSaveCount();
+
+ final Matrix beforeMatrix = canvas.getMatrix();
+
+ canvas.drawPicture(picture);
+
+ assertEquals(beforeSaveCount, canvas.getSaveCount());
+
+ assertTrue(beforeMatrix.equals(canvas.getMatrix()));
+
+ Rect afterClip = new Rect();
+
+ assertTrue(canvas.getClipBounds(afterClip));
+ assertEquals(TEST_WIDTH, afterClip.width());
+ assertEquals(TEST_HEIGHT, afterClip.height());
+ }
+
+ public void testPicture() throws Exception {
Picture picture = new Picture();
ByteArrayOutputStream bout = new ByteArrayOutputStream();
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/AllocationTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/AllocationTest.java
index 442a7a5..8baecac 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/AllocationTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/AllocationTest.java
@@ -32,6 +32,9 @@
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.params.ColorSpaceTransform;
+import android.hardware.camera2.params.RggbChannelVector;
import android.util.Size;
import android.hardware.camera2.cts.helpers.MaybeNull;
import android.hardware.camera2.cts.helpers.StaticMetadata;
@@ -148,14 +151,15 @@
// Identity transform
request.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM,
- new Rational[] {
+ new ColorSpaceTransform(new Rational[] {
ONE, ZERO, ZERO,
ZERO, ONE, ZERO,
ZERO, ZERO, ONE
- });
+ }));
// Identity gains
- request.set(CaptureRequest.COLOR_CORRECTION_GAINS, new float[] { 1.0f, 1.0f, 1.0f, 1.0f });
+ request.set(CaptureRequest.COLOR_CORRECTION_GAINS,
+ new RggbChannelVector(1.0f, 1.0f, 1.0f, 1.0f ));
request.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_FAST);
}
@@ -378,7 +382,7 @@
mCamera.capture(request, new CameraDevice.CaptureListener() {
@Override
public void onCaptureCompleted(CameraDevice camera, CaptureRequest request,
- CaptureResult result) {
+ TotalCaptureResult result) {
if (VERBOSE) Log.v(TAG, "Capture completed");
}
}, mHandler);
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraCharacteristicsTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraCharacteristicsTest.java
index ac5c889..effb6ba 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraCharacteristicsTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraCharacteristicsTest.java
@@ -287,7 +287,7 @@
}
}
- public void testCameraCharacteristicsAndroidControlMaxRegions() throws Exception {
+ public void testCameraCharacteristicsAndroidControlMaxRegionsAe() throws Exception {
String[] ids = mCameraManager.getCameraIdList();
for (int i = 0; i < ids.length; i++) {
CameraCharacteristics props = mCameraManager.getCameraCharacteristics(ids[i]);
@@ -296,14 +296,60 @@
{
- assertNotNull("Invalid property: android.control.maxRegions",
- props.get(CameraCharacteristics.CONTROL_MAX_REGIONS));
+ assertNotNull("Invalid property: android.control.maxRegionsAe",
+ props.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE));
List<Key<?>> allKeys = props.getKeys();
assertNotNull(String.format("Can't get camera characteristics keys from: ID %s",
ids[i], props));
- assertTrue("Key not in keys list: android.control.maxRegions", allKeys.contains(
- CameraCharacteristics.CONTROL_MAX_REGIONS));
+ assertTrue("Key not in keys list: android.control.maxRegionsAe", allKeys.contains(
+ CameraCharacteristics.CONTROL_MAX_REGIONS_AE));
+
+ }
+
+ }
+ }
+
+ public void testCameraCharacteristicsAndroidControlMaxRegionsAwb() throws Exception {
+ String[] ids = mCameraManager.getCameraIdList();
+ for (int i = 0; i < ids.length; i++) {
+ CameraCharacteristics props = mCameraManager.getCameraCharacteristics(ids[i]);
+ assertNotNull(String.format("Can't get camera characteristics from: ID %s", ids[i]),
+ props);
+
+ {
+
+ assertNotNull("Invalid property: android.control.maxRegionsAwb",
+ props.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB));
+
+ List<Key<?>> allKeys = props.getKeys();
+ assertNotNull(String.format("Can't get camera characteristics keys from: ID %s",
+ ids[i], props));
+ assertTrue("Key not in keys list: android.control.maxRegionsAwb", allKeys.contains(
+ CameraCharacteristics.CONTROL_MAX_REGIONS_AWB));
+
+ }
+
+ }
+ }
+
+ public void testCameraCharacteristicsAndroidControlMaxRegionsAf() throws Exception {
+ String[] ids = mCameraManager.getCameraIdList();
+ for (int i = 0; i < ids.length; i++) {
+ CameraCharacteristics props = mCameraManager.getCameraCharacteristics(ids[i]);
+ assertNotNull(String.format("Can't get camera characteristics from: ID %s", ids[i]),
+ props);
+
+ {
+
+ assertNotNull("Invalid property: android.control.maxRegionsAf",
+ props.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF));
+
+ List<Key<?>> allKeys = props.getKeys();
+ assertNotNull(String.format("Can't get camera characteristics keys from: ID %s",
+ ids[i], props));
+ assertTrue("Key not in keys list: android.control.maxRegionsAf", allKeys.contains(
+ CameraCharacteristics.CONTROL_MAX_REGIONS_AF));
}
@@ -567,29 +613,6 @@
}
}
- public void testCameraCharacteristicsAndroidLensInfoShadingMapSize() throws Exception {
- String[] ids = mCameraManager.getCameraIdList();
- for (int i = 0; i < ids.length; i++) {
- CameraCharacteristics props = mCameraManager.getCameraCharacteristics(ids[i]);
- assertNotNull(String.format("Can't get camera characteristics from: ID %s", ids[i]),
- props);
-
- {
-
- assertNotNull("Invalid property: android.lens.info.shadingMapSize",
- props.get(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE));
-
- List<Key<?>> allKeys = props.getKeys();
- assertNotNull(String.format("Can't get camera characteristics keys from: ID %s",
- ids[i], props));
- assertTrue("Key not in keys list: android.lens.info.shadingMapSize", allKeys.contains(
- CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE));
-
- }
-
- }
- }
-
public void testCameraCharacteristicsAndroidLensInfoFocusDistanceCalibration() throws Exception {
String[] ids = mCameraManager.getCameraIdList();
for (int i = 0; i < ids.length; i++) {
@@ -636,7 +659,7 @@
}
}
- public void testCameraCharacteristicsAndroidRequestMaxNumOutputStreams() throws Exception {
+ public void testCameraCharacteristicsAndroidRequestMaxNumOutputRaw() throws Exception {
String[] ids = mCameraManager.getCameraIdList();
for (int i = 0; i < ids.length; i++) {
CameraCharacteristics props = mCameraManager.getCameraCharacteristics(ids[i]);
@@ -645,14 +668,60 @@
{
- assertNotNull("Invalid property: android.request.maxNumOutputStreams",
- props.get(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS));
+ assertNotNull("Invalid property: android.request.maxNumOutputRaw",
+ props.get(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW));
List<Key<?>> allKeys = props.getKeys();
assertNotNull(String.format("Can't get camera characteristics keys from: ID %s",
ids[i], props));
- assertTrue("Key not in keys list: android.request.maxNumOutputStreams", allKeys.contains(
- CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS));
+ assertTrue("Key not in keys list: android.request.maxNumOutputRaw", allKeys.contains(
+ CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW));
+
+ }
+
+ }
+ }
+
+ public void testCameraCharacteristicsAndroidRequestMaxNumOutputProc() throws Exception {
+ String[] ids = mCameraManager.getCameraIdList();
+ for (int i = 0; i < ids.length; i++) {
+ CameraCharacteristics props = mCameraManager.getCameraCharacteristics(ids[i]);
+ assertNotNull(String.format("Can't get camera characteristics from: ID %s", ids[i]),
+ props);
+
+ {
+
+ assertNotNull("Invalid property: android.request.maxNumOutputProc",
+ props.get(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC));
+
+ List<Key<?>> allKeys = props.getKeys();
+ assertNotNull(String.format("Can't get camera characteristics keys from: ID %s",
+ ids[i], props));
+ assertTrue("Key not in keys list: android.request.maxNumOutputProc", allKeys.contains(
+ CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC));
+
+ }
+
+ }
+ }
+
+ public void testCameraCharacteristicsAndroidRequestMaxNumOutputProcStalling() throws Exception {
+ String[] ids = mCameraManager.getCameraIdList();
+ for (int i = 0; i < ids.length; i++) {
+ CameraCharacteristics props = mCameraManager.getCameraCharacteristics(ids[i]);
+ assertNotNull(String.format("Can't get camera characteristics from: ID %s", ids[i]),
+ props);
+
+ {
+
+ assertNotNull("Invalid property: android.request.maxNumOutputProcStalling",
+ props.get(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING));
+
+ List<Key<?>> allKeys = props.getKeys();
+ assertNotNull(String.format("Can't get camera characteristics keys from: ID %s",
+ ids[i], props));
+ assertTrue("Key not in keys list: android.request.maxNumOutputProcStalling", allKeys.contains(
+ CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING));
}
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraDeviceTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraDeviceTest.java
index 6c75790..d61f667 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraDeviceTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraDeviceTest.java
@@ -28,9 +28,11 @@
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.cts.testcases.Camera2AndroidTestCase;
import android.os.SystemClock;
import android.util.Log;
+import android.util.Range;
import android.view.Surface;
import com.android.ex.camera2.blocking.BlockingStateListener;
@@ -438,14 +440,14 @@
}
private class IsCaptureResultNotEmpty
- extends ArgumentMatcher<CaptureResult> {
+ extends ArgumentMatcher<TotalCaptureResult> {
@Override
public boolean matches(Object obj) {
/**
* Do the simple verification here. Only verify the timestamp for now.
* TODO: verify more required capture result metadata fields.
*/
- CaptureResult result = (CaptureResult) obj;
+ TotalCaptureResult result = (TotalCaptureResult) obj;
Long timeStamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
if (timeStamp != null && timeStamp.longValue() > 0L) {
return true;
@@ -638,30 +640,20 @@
private void checkFpsRange(CaptureRequest.Builder request, int template,
CameraCharacteristics props) {
- CaptureRequest.Key<int[]> fpsRangeKey = CONTROL_AE_TARGET_FPS_RANGE;
- int[] fpsRange;
+ CaptureRequest.Key<Range<Integer>> fpsRangeKey = CONTROL_AE_TARGET_FPS_RANGE;
+ Range<Integer> fpsRange;
if ((fpsRange = mCollector.expectKeyValueNotNull(request, fpsRangeKey)) == null) {
return;
}
- // TODO: Use generated array dimensions
- final int CONTROL_AE_TARGET_FPS_RANGE_SIZE = 2;
- final int CONTROL_AE_TARGET_FPS_RANGE_MIN = 0;
- final int CONTROL_AE_TARGET_FPS_RANGE_MAX = 1;
-
- String cause = "Failed with fps range size check";
- if (!mCollector.expectEquals(cause, CONTROL_AE_TARGET_FPS_RANGE_SIZE, fpsRange.length)) {
- return;
- }
-
- int minFps = fpsRange[CONTROL_AE_TARGET_FPS_RANGE_MIN];
- int maxFps = fpsRange[CONTROL_AE_TARGET_FPS_RANGE_MAX];
- int[] availableFpsRange = props
+ int minFps = fpsRange.getLower();
+ int maxFps = fpsRange.getUpper();
+ Range<Integer>[] availableFpsRange = props
.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
boolean foundRange = false;
- for (int i = 0; i < availableFpsRange.length; i += CONTROL_AE_TARGET_FPS_RANGE_SIZE) {
- if (minFps == availableFpsRange[i + CONTROL_AE_TARGET_FPS_RANGE_MIN]
- && maxFps == availableFpsRange[i + CONTROL_AE_TARGET_FPS_RANGE_MAX]) {
+ for (int i = 0; i < availableFpsRange.length; i += 1) {
+ if (minFps == availableFpsRange[i].getLower()
+ && maxFps == availableFpsRange[i].getUpper()) {
foundRange = true;
break;
}
@@ -699,7 +691,7 @@
}
int targetAfMode = CaptureRequest.CONTROL_AF_MODE_AUTO;
- byte[] availableAfMode = props.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
+ int[] availableAfMode = props.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
if (template == CameraDevice.TEMPLATE_PREVIEW ||
template == CameraDevice.TEMPLATE_STILL_CAPTURE ||
template == CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG) {
@@ -807,7 +799,7 @@
mCollector.expectKeyValueNotNull(request, LENS_FOCAL_LENGTH);
}
- byte[] availableOIS =
+ int[] availableOIS =
props.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION);
if (availableOIS.length > 1) {
mCollector.expectKeyValueNotNull(request, LENS_OPTICAL_STABILIZATION_MODE);
@@ -834,26 +826,26 @@
request, COLOR_CORRECTION_MODE,
CaptureRequest.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX);
- List<Byte> availableEdgeModes =
+ List<Integer> availableEdgeModes =
Arrays.asList(toObject(mStaticInfo.getAvailableEdgeModesChecked()));
- if (availableEdgeModes.contains((byte) CaptureRequest.EDGE_MODE_HIGH_QUALITY)) {
+ if (availableEdgeModes.contains(CaptureRequest.EDGE_MODE_HIGH_QUALITY)) {
mCollector.expectKeyValueEquals(request, EDGE_MODE,
CaptureRequest.EDGE_MODE_HIGH_QUALITY);
- } else if (availableEdgeModes.contains((byte) CaptureRequest.EDGE_MODE_FAST)) {
+ } else if (availableEdgeModes.contains(CaptureRequest.EDGE_MODE_FAST)) {
mCollector.expectKeyValueEquals(request, EDGE_MODE, CaptureRequest.EDGE_MODE_FAST);
} else {
mCollector.expectKeyValueEquals(request, EDGE_MODE, CaptureRequest.EDGE_MODE_OFF);
}
- List<Byte> availableNoiseReductionModes =
+ List<Integer> availableNoiseReductionModes =
Arrays.asList(toObject(mStaticInfo.getAvailableNoiseReductionModesChecked()));
if (availableNoiseReductionModes
- .contains((byte) CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY)) {
+ .contains(CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY)) {
mCollector.expectKeyValueEquals(
request, NOISE_REDUCTION_MODE,
CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
} else if (availableNoiseReductionModes
- .contains((byte) CaptureRequest.NOISE_REDUCTION_MODE_FAST)) {
+ .contains(CaptureRequest.NOISE_REDUCTION_MODE_FAST)) {
mCollector.expectKeyValueEquals(
request, NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_FAST);
} else {
@@ -861,9 +853,9 @@
request, NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_OFF);
}
- List<Byte> availableToneMapModes =
+ List<Integer> availableToneMapModes =
Arrays.asList(toObject(mStaticInfo.getAvailableToneMapModesChecked()));
- if (availableToneMapModes.contains((byte) CaptureRequest.TONEMAP_MODE_HIGH_QUALITY)) {
+ if (availableToneMapModes.contains(CaptureRequest.TONEMAP_MODE_HIGH_QUALITY)) {
mCollector.expectKeyValueEquals(request, TONEMAP_MODE,
CaptureRequest.TONEMAP_MODE_HIGH_QUALITY);
} else {
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraTestUtils.java b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraTestUtils.java
index 850456d..52a486f 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraTestUtils.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraTestUtils.java
@@ -29,7 +29,9 @@
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
import android.util.Size;
+import android.hardware.camera2.params.MeteringRectangle;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
@@ -139,7 +141,7 @@
@Override
public void onCaptureCompleted(CameraDevice camera, CaptureRequest request,
- CaptureResult result) {
+ TotalCaptureResult result) {
try {
mQueue.put(result);
} catch (InterruptedException e) {
@@ -155,7 +157,7 @@
@Override
public void onCaptureSequenceCompleted(CameraDevice camera, int sequenceId,
- int frameNumber) {
+ long frameNumber) {
}
public CaptureResult getCaptureResult(long timeout) {
@@ -880,21 +882,22 @@
/**
* Calculate output 3A region from the intersection of input 3A region and cropped region.
*
- * @param requestRegion The input 3A region [xmin, ymin, xmax, ymax, weight]
+ * @param requestRegions The input 3A regions
* @param cropRect The cropped region
- * @return expected 3A region output in capture result
+ * @return expected 3A regions output in capture result
*/
- public static int[] getExpectedOutputRegion(int[] requestRegion, Rect cropRect){
- Rect requestRect = new Rect(requestRegion[0], requestRegion[1],
- requestRegion[2], requestRegion[3]);
- Rect resultRect = new Rect();
- assertTrue("Input 3A region must intersect cropped region",
- resultRect.setIntersect(requestRect, cropRect));
- return new int[] {
- resultRect.left,
- resultRect.top,
- resultRect.right,
- resultRect.bottom,
- requestRegion[4]};
+ public static MeteringRectangle[] getExpectedOutputRegion(
+ MeteringRectangle[] requestRegions, Rect cropRect){
+ MeteringRectangle[] resultRegions = new MeteringRectangle[requestRegions.length];
+ for (int i = 0; i < requestRegions.length; i++) {
+ Rect requestRect = requestRegions[i].getRect();
+ Rect resultRect = new Rect();
+ assertTrue("Input 3A region must intersect cropped region",
+ resultRect.setIntersect(requestRect, cropRect));
+ resultRegions[i] = new MeteringRectangle(
+ resultRect,
+ requestRegions[i].getMeteringWeight());
+ }
+ return resultRegions;
}
}
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureRequestTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureRequestTest.java
index add91a4..aa548ed 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureRequestTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureRequestTest.java
@@ -29,8 +29,15 @@
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureListener;
import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase;
+import android.hardware.camera2.params.ColorSpaceTransform;
import android.hardware.camera2.params.Face;
+import android.hardware.camera2.params.LensShadingMap;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.hardware.camera2.params.RggbChannelVector;
+import android.hardware.camera2.params.TonemapCurve;
+
import android.util.Log;
+import android.util.Range;
import android.util.Rational;
import android.util.Size;
@@ -93,7 +100,6 @@
private final int INDEX_ALGORITHM_AE = 0;
private final int INDEX_ALGORITHM_AWB = 1;
private final int INDEX_ALGORITHM_AF = 2;
- private final int LENGTH_ALGORITHM_REGION = 5;
@Override
protected void setUp() throws Exception {
@@ -186,14 +192,13 @@
requestBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE,
STATISTICS_LENS_SHADING_MAP_MODE_ON);
- Size mapSz = mStaticInfo.getCharacteristics().get(LENS_INFO_SHADING_MAP_SIZE);
Size previewSz =
getMaxPreviewSize(mCamera.getId(), mCameraManager, PREVIEW_SIZE_BOUND);
listener = new SimpleCaptureListener();
startPreview(requestBuilder, previewSz, listener);
- verifyShadingMap(listener, NUM_FRAMES_VERIFIED, mapSz, SHADING_MODE_OFF);
+ verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_OFF);
// Shading map mode FAST, lensShadingMapMode ON, camera device
// should output valid maps.
@@ -203,7 +208,7 @@
startPreview(requestBuilder, previewSz, listener);
// Allow at most one lock OFF state as the exposure is changed once.
- verifyShadingMap(listener, NUM_FRAMES_VERIFIED, mapSz, SHADING_MODE_FAST);
+ verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_FAST);
// Shading map mode HIGH_QUALITY, lensShadingMapMode ON, camera device
// should output valid maps.
@@ -212,7 +217,7 @@
listener = new SimpleCaptureListener();
startPreview(requestBuilder, previewSz, listener);
- verifyShadingMap(listener, NUM_FRAMES_VERIFIED, mapSz, SHADING_MODE_HIGH_QUALITY);
+ verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_HIGH_QUALITY);
stopPreview();
} finally {
@@ -237,12 +242,12 @@
continue;
}
- byte[] modes = mStaticInfo.getAeAvailableAntiBandingModesChecked();
+ int[] modes = mStaticInfo.getAeAvailableAntiBandingModesChecked();
Size previewSz =
getMaxPreviewSize(mCamera.getId(), mCameraManager, PREVIEW_SIZE_BOUND);
- for (byte mode : modes) {
+ for (int mode : modes) {
antiBandingTestByMode(previewSz, mode);
}
} finally {
@@ -278,8 +283,8 @@
updatePreviewSurface(maxPreviewSz);
// Test aeMode and lock
- byte[] aeModes = mStaticInfo.getAeAvailableModesChecked();
- for (byte mode : aeModes) {
+ int[] aeModes = mStaticInfo.getAeAvailableModesChecked();
+ for (int mode : aeModes) {
aeModeAndLockTestByMode(mode);
}
} finally {
@@ -542,12 +547,12 @@
Size maxPrevSize = mOrderedPreviewSizes.get(0);
CaptureRequest.Builder requestBuilder =
mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
- byte[] availableModes = mStaticInfo.getAvailableNoiseReductionModesChecked();
+ int[] availableModes = mStaticInfo.getAvailableNoiseReductionModesChecked();
SimpleCaptureListener resultListener = new SimpleCaptureListener();
startPreview(requestBuilder, maxPrevSize, resultListener);
- for (byte mode : availableModes) {
- requestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, (int)mode);
+ for (int mode : availableModes) {
+ requestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, mode);
resultListener = new SimpleCaptureListener();
mCamera.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler);
@@ -659,14 +664,14 @@
*/
private void edgeModesTestByCamera() throws Exception {
Size maxPrevSize = mOrderedPreviewSizes.get(0);
- byte[] edgeModes = mStaticInfo.getAvailableEdgeModesChecked();
+ int[] edgeModes = mStaticInfo.getAvailableEdgeModesChecked();
CaptureRequest.Builder requestBuilder =
mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
SimpleCaptureListener resultListener = new SimpleCaptureListener();
startPreview(requestBuilder, maxPrevSize, resultListener);
- for (byte mode : edgeModes) {
- requestBuilder.set(CaptureRequest.EDGE_MODE, (int)mode);
+ for (int mode : edgeModes) {
+ requestBuilder.set(CaptureRequest.EDGE_MODE, mode);
resultListener = new SimpleCaptureListener();
mCamera.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler);
@@ -706,12 +711,15 @@
// TRANSFORM_MATRIX mode
// Only test unit gain and identity transform
- float[] UNIT_GAIN = {1.0f, 1.0f, 1.0f, 1.0f};
- Rational[] IDENTITY_TRANSFORM = {
+ RggbChannelVector UNIT_GAIN = new RggbChannelVector(1.0f, 1.0f, 1.0f, 1.0f);
+
+ ColorSpaceTransform IDENTITY_TRANSFORM = new ColorSpaceTransform(
+ new Rational[] {
ONE_R, ZERO_R, ZERO_R,
ZERO_R, ONE_R, ZERO_R,
ZERO_R, ZERO_R, ONE_R
- };
+ });
+
manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_OFF);
manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE,
CaptureRequest.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX);
@@ -720,11 +728,11 @@
request = manualRequestBuilder.build();
mCamera.capture(request, listener, mHandler);
result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT);
- float[] gains = result.get(CaptureResult.COLOR_CORRECTION_GAINS);
- Rational[] transform = result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM);
+ RggbChannelVector gains = result.get(CaptureResult.COLOR_CORRECTION_GAINS);
+ ColorSpaceTransform transform = result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM);
validateColorCorrectionResult(result);
mCollector.expectEquals("Color correction gain result/request mismatch",
- CameraTestUtils.toObject(UNIT_GAIN), CameraTestUtils.toObject(gains));
+ UNIT_GAIN, gains);
mCollector.expectEquals("Color correction gain result/request mismatch",
IDENTITY_TRANSFORM, transform);
@@ -748,27 +756,24 @@
}
private void validateColorCorrectionResult(CaptureResult result) {
- float[] ZERO_GAINS = {0, 0, 0, 0};
+ final RggbChannelVector ZERO_GAINS = new RggbChannelVector(0, 0, 0, 0);
final int TRANSFORM_SIZE = 9;
Rational[] zeroTransform = new Rational[TRANSFORM_SIZE];
Arrays.fill(zeroTransform, ZERO_R);
+ final ColorSpaceTransform ZERO_TRANSFORM = new ColorSpaceTransform(zeroTransform);
- float[] resultGain;
+ RggbChannelVector resultGain;
if ((resultGain = mCollector.expectKeyValueNotNull(result,
CaptureResult.COLOR_CORRECTION_GAINS)) != null) {
- mCollector.expectEquals("Color correction gain size in incorrect",
- ZERO_GAINS.length, resultGain.length);
mCollector.expectKeyValueNotEquals(result,
CaptureResult.COLOR_CORRECTION_GAINS, ZERO_GAINS);
}
- Rational[] resultTransform;
+ ColorSpaceTransform resultTransform;
if ((resultTransform = mCollector.expectKeyValueNotNull(result,
CaptureResult.COLOR_CORRECTION_TRANSFORM)) != null) {
- mCollector.expectEquals("Color correction transform size is incorrect",
- zeroTransform.length, resultTransform.length);
mCollector.expectKeyValueNotEquals(result,
- CaptureResult.COLOR_CORRECTION_TRANSFORM, zeroTransform);
+ CaptureResult.COLOR_CORRECTION_TRANSFORM, ZERO_TRANSFORM);
}
}
@@ -1101,19 +1106,18 @@
* Verify shading map for different shading modes.
*/
private void verifyShadingMap(SimpleCaptureListener listener, int numFramesVerified,
- Size mapSize, int shadingMode) throws Exception {
- int numElementsInMap = mapSize.getWidth() * mapSize.getHeight() * RGGB_COLOR_CHANNEL_COUNT;
- float[] unityMap = new float[numElementsInMap];
- Arrays.fill(unityMap, 1.0f);
+ int shadingMode) throws Exception {
for (int i = 0; i < numFramesVerified; i++) {
CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
mCollector.expectEquals("Shading mode result doesn't match request",
shadingMode, result.get(CaptureResult.SHADING_MODE));
- float[] map = result.get(CaptureResult.STATISTICS_LENS_SHADING_MAP);
+ LensShadingMap mapObj = result.get(CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP);
+ assertNotNull("Map object must not be null", mapObj);
+ int numElementsInMap = mapObj.getGainFactorCount();
+ float[] map = new float[numElementsInMap];
+ mapObj.copyGainFactors(map, /*offset*/0);
assertNotNull("Map must not be null", map);
- assertTrue("Map size " + map.length + " must be " + numElementsInMap,
- map.length == numElementsInMap);
assertFalse(String.format(
"Map size %d should be less than %d", numElementsInMap, MAX_SHADING_MAP_SIZE),
numElementsInMap >= MAX_SHADING_MAP_SIZE);
@@ -1135,6 +1139,8 @@
assertEquals("Number of value in the map is " + badValueCnt + " out of "
+ numElementsInMap, /*expected*/0, /*actual*/badValueCnt);
} else if (shadingMode == CaptureRequest.SHADING_MODE_OFF) {
+ float[] unityMap = new float[numElementsInMap];
+ Arrays.fill(unityMap, 1.0f);
// shading mode is OFF, expect to receive a unity map.
assertTrue("Result map " + Arrays.toString(map) + " must be an unity map",
Arrays.equals(unityMap, map));
@@ -1151,15 +1157,15 @@
if (!mStaticInfo.isHardwareLevelFull()) {
return;
}
- byte[] faceDetectModes = mStaticInfo.getAvailableFaceDetectModesChecked();
+ int[] faceDetectModes = mStaticInfo.getAvailableFaceDetectModesChecked();
SimpleCaptureListener listener;
CaptureRequest.Builder requestBuilder =
mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
- for (byte mode : faceDetectModes) {
- requestBuilder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, (int)mode);
+ for (int mode : faceDetectModes) {
+ requestBuilder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, mode);
if (VERBOSE) {
Log.v(TAG, "Start testing face detection mode " + mode);
}
@@ -1272,17 +1278,17 @@
Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
- byte[] toneMapModes = mStaticInfo.getAvailableToneMapModesChecked();
- for (byte mode : toneMapModes) {
- requestBuilder.set(CaptureRequest.TONEMAP_MODE, (int)mode);
+ int[] toneMapModes = mStaticInfo.getAvailableToneMapModesChecked();
+ for (int mode : toneMapModes) {
+ requestBuilder.set(CaptureRequest.TONEMAP_MODE, mode);
if (VERBOSE) {
Log.v(TAG, "Testing tonemap mode " + mode);
}
if (mode == CaptureRequest.TONEMAP_MODE_CONTRAST_CURVE) {
- requestBuilder.set(CaptureRequest.TONEMAP_CURVE_RED, TONEMAP_CURVE_LINEAR);
- requestBuilder.set(CaptureRequest.TONEMAP_CURVE_GREEN, TONEMAP_CURVE_LINEAR);
- requestBuilder.set(CaptureRequest.TONEMAP_CURVE_BLUE, TONEMAP_CURVE_LINEAR);
+ TonemapCurve tcLinear = new TonemapCurve(
+ TONEMAP_CURVE_LINEAR, TONEMAP_CURVE_LINEAR, TONEMAP_CURVE_LINEAR);
+ requestBuilder.set(CaptureRequest.TONEMAP_CURVE, tcLinear);
// Create a new listener for each run to avoid the results from one run spill
// into another run.
listener = new SimpleCaptureListener();
@@ -1290,9 +1296,9 @@
verifyToneMapModeResults(listener, NUM_FRAMES_VERIFIED, mode,
TONEMAP_CURVE_LINEAR);
- requestBuilder.set(CaptureRequest.TONEMAP_CURVE_RED, TONEMAP_CURVE_SRGB);
- requestBuilder.set(CaptureRequest.TONEMAP_CURVE_GREEN, TONEMAP_CURVE_SRGB);
- requestBuilder.set(CaptureRequest.TONEMAP_CURVE_BLUE, TONEMAP_CURVE_SRGB);
+ TonemapCurve tcSrgb = new TonemapCurve(
+ TONEMAP_CURVE_SRGB, TONEMAP_CURVE_SRGB, TONEMAP_CURVE_SRGB);
+ requestBuilder.set(CaptureRequest.TONEMAP_CURVE, tcSrgb);
// Create a new listener for each run to avoid the results from one run spill
// into another run.
listener = new SimpleCaptureListener();
@@ -1335,18 +1341,16 @@
CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
mCollector.expectEquals("Capture result tonemap mode should match request", tonemapMode,
result.get(CaptureResult.TONEMAP_MODE));
- float[] mapRed = result.get(CaptureResult.TONEMAP_CURVE_RED);
- float[] mapGreen = result.get(CaptureResult.TONEMAP_CURVE_GREEN);
- float[] mapBlue = result.get(CaptureResult.TONEMAP_CURVE_BLUE);
- boolean redAvailable =
- mCollector.expectTrue("Tonemap curve red shouldn't be null for mode "
- + tonemapMode, mapRed != null);
- boolean greenAvailable =
- mCollector.expectTrue("Tonemap curve red shouldn't be null for mode "
- + tonemapMode, mapGreen != null);
- boolean blueAvailable =
- mCollector.expectTrue("Tonemap curve red shouldn't be null for mode "
- + tonemapMode, mapBlue != null);
+ TonemapCurve tc = getValueNotNull(result, CaptureResult.TONEMAP_CURVE);
+ int pointCount = tc.getPointCount(TonemapCurve.CHANNEL_RED);
+ float[] mapRed = new float[pointCount * TonemapCurve.POINT_SIZE];
+ pointCount = tc.getPointCount(TonemapCurve.CHANNEL_GREEN);
+ float[] mapGreen = new float[pointCount * TonemapCurve.POINT_SIZE];
+ pointCount = tc.getPointCount(TonemapCurve.CHANNEL_BLUE);
+ float[] mapBlue = new float[pointCount * TonemapCurve.POINT_SIZE];
+ tc.copyColorCurve(TonemapCurve.CHANNEL_RED, mapRed, 0);
+ tc.copyColorCurve(TonemapCurve.CHANNEL_GREEN, mapGreen, 0);
+ tc.copyColorCurve(TonemapCurve.CHANNEL_BLUE, mapBlue, 0);
if (tonemapMode == CaptureResult.TONEMAP_MODE_CONTRAST_CURVE) {
/**
* TODO: need figure out a good way to measure the difference
@@ -1356,24 +1360,18 @@
}
// Tonemap curve result availability and basic sanity check for all modes.
- if (redAvailable) {
- mCollector.expectValuesInRange("Tonemap curve red values are out of range",
- CameraTestUtils.toObject(mapRed), /*min*/ZERO, /*max*/ONE);
- mCollector.expectInRange("Tonemap curve red length is out of range",
- mapRed.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
- }
- if (greenAvailable) {
- mCollector.expectValuesInRange("Tonemap curve green values are out of range",
- CameraTestUtils.toObject(mapGreen), /*min*/ZERO, /*max*/ONE);
- mCollector.expectInRange("Tonemap curve green length is out of range",
- mapGreen.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
- }
- if (blueAvailable) {
- mCollector.expectValuesInRange("Tonemap curve blue values are out of range",
- CameraTestUtils.toObject(mapBlue), /*min*/ZERO, /*max*/ONE);
- mCollector.expectInRange("Tonemap curve blue length is out of range",
- mapBlue.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
- }
+ mCollector.expectValuesInRange("Tonemap curve red values are out of range",
+ CameraTestUtils.toObject(mapRed), /*min*/ZERO, /*max*/ONE);
+ mCollector.expectInRange("Tonemap curve red length is out of range",
+ mapRed.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
+ mCollector.expectValuesInRange("Tonemap curve green values are out of range",
+ CameraTestUtils.toObject(mapGreen), /*min*/ZERO, /*max*/ONE);
+ mCollector.expectInRange("Tonemap curve green length is out of range",
+ mapGreen.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
+ mCollector.expectValuesInRange("Tonemap curve blue values are out of range",
+ CameraTestUtils.toObject(mapBlue), /*min*/ZERO, /*max*/ONE);
+ mCollector.expectInRange("Tonemap curve blue length is out of range",
+ mapBlue.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
}
}
@@ -1386,20 +1384,20 @@
* </p>
*/
private void awbModeAndLockTestByCamera() throws Exception {
- byte[] awbModes = mStaticInfo.getAwbAvailableModesChecked();
+ int[] awbModes = mStaticInfo.getAwbAvailableModesChecked();
Size maxPreviewSize = mOrderedPreviewSizes.get(0);
CaptureRequest.Builder requestBuilder =
mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
startPreview(requestBuilder, maxPreviewSize, /*listener*/null);
- for (byte mode : awbModes) {
+ for (int mode : awbModes) {
SimpleCaptureListener listener;
- requestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, (int)mode);
+ requestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, mode);
listener = new SimpleCaptureListener();
mCamera.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
// Verify AWB mode in capture result.
- verifyCaptureResultForKey(CaptureResult.CONTROL_AWB_MODE, (int)mode, listener,
+ verifyCaptureResultForKey(CaptureResult.CONTROL_AWB_MODE, mode, listener,
NUM_FRAMES_VERIFIED);
// Verify color correction transform and gains stay unchanged after a lock.
@@ -1415,19 +1413,20 @@
private void verifyAwbCaptureResultUnchanged(SimpleCaptureListener listener,
int numFramesVerified) {
CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
- float[] lockedGains = getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS);
- Rational[] lockedTransform =
+ RggbChannelVector lockedGains =
+ getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS);
+ ColorSpaceTransform lockedTransform =
getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM);
for (int i = 0; i < numFramesVerified; i++) {
result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
validateColorCorrectionResult(result);
- float[] gains = getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS);
- Rational[] transform =
+ RggbChannelVector gains = getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS);
+ ColorSpaceTransform transform =
getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM);
mCollector.expectEquals("Color correction gains should remain unchanged after awb lock",
- toObject(lockedGains), toObject(gains));
+ lockedGains, gains);
mCollector.expectEquals("Color correction transform should remain unchanged after"
+ " awb lock", lockedTransform, transform);
}
@@ -1443,20 +1442,20 @@
* </p>
*/
private void afModeTestByCamera() throws Exception {
- byte[] afModes = mStaticInfo.getAfAvailableModesChecked();
+ int[] afModes = mStaticInfo.getAfAvailableModesChecked();
Size maxPreviewSize = mOrderedPreviewSizes.get(0);
CaptureRequest.Builder requestBuilder =
mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
startPreview(requestBuilder, maxPreviewSize, /*listener*/null);
- for (byte mode : afModes) {
+ for (int mode : afModes) {
SimpleCaptureListener listener;
- requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, (int)mode);
+ requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, mode);
listener = new SimpleCaptureListener();
mCamera.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
// Verify AF mode in capture result.
- verifyCaptureResultForKey(CaptureResult.CONTROL_AF_MODE, (int)mode, listener,
+ verifyCaptureResultForKey(CaptureResult.CONTROL_AF_MODE, mode, listener,
NUM_FRAMES_VERIFIED);
// Verify AF can finish a scan for CONTROL_AF_MODE_CONTINUOUS_* modes
@@ -1476,17 +1475,17 @@
*/
private void stabilizationTestByCamera() throws Exception {
// video stabilization test.
- byte[] videoStabModes = mStaticInfo.getAvailableVideoStabilizationModesChecked();
- byte[] opticalStabModes = mStaticInfo.getAvailableOpticalStabilizationChecked();
+ int[] videoStabModes = mStaticInfo.getAvailableVideoStabilizationModesChecked();
+ int[] opticalStabModes = mStaticInfo.getAvailableOpticalStabilizationChecked();
Size maxPreviewSize = mOrderedPreviewSizes.get(0);
CaptureRequest.Builder requestBuilder =
mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
SimpleCaptureListener listener = new SimpleCaptureListener();
startPreview(requestBuilder, maxPreviewSize, listener);
- for ( byte mode : videoStabModes) {
+ for (int mode : videoStabModes) {
listener = new SimpleCaptureListener();
- requestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, (int) mode);
+ requestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, mode);
mCamera.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
// TODO: enable below code when b/14059883 is fixed.
/*
@@ -1518,7 +1517,7 @@
final float maxZoom = mStaticInfo.getAvailableMaxDigitalZoomChecked();
final Rect activeArraySize = mStaticInfo.getActiveArraySizeChecked();
Rect[] cropRegions = new Rect[ZOOM_STEPS];
- int [][] expectRegions = new int[ZOOM_STEPS][];
+ MeteringRectangle[][] expectRegions = new MeteringRectangle[ZOOM_STEPS][];
Size maxPreviewSize = mOrderedPreviewSizes.get(0);
CaptureRequest.Builder requestBuilder =
mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
@@ -1528,16 +1527,12 @@
// Set algorithm regions to full active region
// TODO: test more different 3A regions
- final int[] algoDefaultRegion = new int[] {
- 0, // xmin
- 0, // ymin
- activeArraySize.width() - 1, // xmax
- activeArraySize.height() - 1, // ymax
- 1, // weight
- };
+ final MeteringRectangle[] defaultMeteringRect = new MeteringRectangle[] {
+ new MeteringRectangle (
+ 0, 0, activeArraySize.width(), activeArraySize.height(), 1)};
for (int algo = 0; algo < NUM_ALGORITHMS; algo++) {
- update3aRegion(requestBuilder, algo, algoDefaultRegion);
+ update3aRegion(requestBuilder, algo, defaultMeteringRect);
}
for (PointF center : TEST_ZOOM_CENTERS) {
@@ -1551,7 +1546,7 @@
requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, cropRegions[i]);
requests[i] = requestBuilder.build();
expectRegions[i] = getExpectedOutputRegion(
- /*requestRegion*/algoDefaultRegion,
+ /*requestRegion*/defaultMeteringRect,
/*cropRect*/ cropRegions[i]);
mCamera.capture(requests[i], listener, mHandler);
}
@@ -1564,7 +1559,7 @@
Rect cropRegion = getValueNotNull(result, CaptureResult.SCALER_CROP_REGION);
// Verify Output 3A region is intersection of input 3A region and crop region
for (int algo = 0; algo < NUM_ALGORITHMS; algo++) {
- validate3aRegion(result,algo, expectRegions[i]);
+ validate3aRegion(result, algo, expectRegions[i]);
}
mCollector.expectEquals(" Request and result crop region should match",
cropRegions[i], cropRegion);
@@ -1575,7 +1570,7 @@
}
private void sceneModeTestByCamera() throws Exception {
- byte[] sceneModes = mStaticInfo.getAvailableSceneModesChecked();
+ int[] sceneModes = mStaticInfo.getAvailableSceneModesChecked();
Size maxPreviewSize = mOrderedPreviewSizes.get(0);
CaptureRequest.Builder requestBuilder =
mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
@@ -1583,8 +1578,8 @@
requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE);
startPreview(requestBuilder, maxPreviewSize, listener);
- for(byte mode : sceneModes) {
- requestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, (int)mode);
+ for(int mode : sceneModes) {
+ requestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, mode);
listener = new SimpleCaptureListener();
mCamera.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
// Enable below check when b/14059883 is fixed.
@@ -1599,7 +1594,7 @@
}
private void effectModeTestByCamera() throws Exception {
- byte[] effectModes = mStaticInfo.getAvailableEffectModesChecked();
+ int[] effectModes = mStaticInfo.getAvailableEffectModesChecked();
Size maxPreviewSize = mOrderedPreviewSizes.get(0);
CaptureRequest.Builder requestBuilder =
mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
@@ -1607,8 +1602,8 @@
SimpleCaptureListener listener = new SimpleCaptureListener();
startPreview(requestBuilder, maxPreviewSize, listener);
- for(byte mode : effectModes) {
- requestBuilder.set(CaptureRequest.CONTROL_EFFECT_MODE, (int)mode);
+ for(int mode : effectModes) {
+ requestBuilder.set(CaptureRequest.CONTROL_EFFECT_MODE, mode);
listener = new SimpleCaptureListener();
mCamera.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
// Enable below check when b/14059883 is fixed.
@@ -1820,26 +1815,24 @@
*/
private void verifyFpsNotSlowDown(CaptureRequest.Builder requestBuilder,
int numFramesVerified) throws Exception {
- int[] fpsRanges = mStaticInfo.getAeAvailableTargetFpsRangesChecked();
+ Range<Integer>[] fpsRanges = mStaticInfo.getAeAvailableTargetFpsRangesChecked();
boolean antiBandingOffIsSupported = mStaticInfo.isAntiBandingOffModeSupported();
- final int FPS_RANGE_SIZE = 2;
- int[] fpsRange = new int[FPS_RANGE_SIZE];
+ Range<Integer> fpsRange;
SimpleCaptureListener resultListener;
- for (int i = 0; i < fpsRanges.length; i += FPS_RANGE_SIZE) {
- fpsRange[0] = fpsRanges[i];
- fpsRange[1] = fpsRanges[i + 1];
+ for (int i = 0; i < fpsRanges.length; i += 1) {
+ fpsRange = fpsRanges[i];
Size previewSz = getMaxPreviewSizeForFpsRange(fpsRange);
// If unable to find a preview size, then log the failure, and skip this run.
if (!mCollector.expectTrue(String.format(
- "Unable to find a preview size supporting given fps range [%d, %d]",
- fpsRange[0], fpsRange[1]), previewSz != null)) {
+ "Unable to find a preview size supporting given fps range %s",
+ fpsRange), previewSz != null)) {
continue;
}
if (VERBOSE) {
- Log.v(TAG, String.format("Test fps range [%d, %d] for preview size %s",
- fpsRange[0], fpsRange[1], previewSz.toString()));
+ Log.v(TAG, String.format("Test fps range %s for preview size %s",
+ fpsRange, previewSz.toString()));
}
requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
// Turn off auto antibanding to avoid exposure time and frame duration interference
@@ -1858,7 +1851,7 @@
resultListener = new SimpleCaptureListener();
startPreview(requestBuilder, previewSz, resultListener);
long[] frameDurationRange =
- new long[]{(long) (1e9 / fpsRange[1]), (long) (1e9 / fpsRange[0])};
+ new long[]{(long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())};
for (int j = 0; j < numFramesVerified; j++) {
CaptureResult result =
resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
@@ -1929,28 +1922,28 @@
* if the specified 3A region is not supported by camera device.
* @param requestBuilder The request to be updated
* @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
- * @param region The 3A region to be set
+ * @param regions The 3A regions to be set
*/
- private void update3aRegion(CaptureRequest.Builder requestBuilder, int algoIdx, int[] region)
+ private void update3aRegion(
+ CaptureRequest.Builder requestBuilder, int algoIdx, MeteringRectangle[] regions)
{
int[] maxRegions = mStaticInfo.get3aMaxRegionsChecked();
- if (region.length == 0 ||
- region.length % LENGTH_ALGORITHM_REGION != 0) {
+ if (regions == null || regions.length == 0) {
throw new IllegalArgumentException("Invalid input 3A region!");
}
- if (maxRegions[algoIdx] * LENGTH_ALGORITHM_REGION >= region.length)
+ if (maxRegions[algoIdx] >= regions.length)
{
switch (algoIdx) {
case INDEX_ALGORITHM_AE:
- requestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, region);
+ requestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, regions);
break;
case INDEX_ALGORITHM_AWB:
- requestBuilder.set(CaptureRequest.CONTROL_AWB_REGIONS, region);
+ requestBuilder.set(CaptureRequest.CONTROL_AWB_REGIONS, regions);
break;
case INDEX_ALGORITHM_AF:
- requestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, region);
+ requestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, regions);
break;
default:
throw new IllegalArgumentException("Unknown 3A Algorithm!");
@@ -1963,17 +1956,13 @@
* supported. Do nothing if the specified 3A region is not supported by camera device.
* @param result The capture result to be validated
* @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
- * @param expectRegion The 3A region expected
+ * @param expectRegion The 3A regions expected in capture result
*/
- private void validate3aRegion(CaptureResult result, int algoIdx, int[] expectRegion)
+ private void validate3aRegion(
+ CaptureResult result, int algoIdx, MeteringRectangle[] expectRegion)
{
int[] maxRegions = mStaticInfo.get3aMaxRegionsChecked();
- int[] actualRegion;
-
- if (expectRegion.length == 0 ||
- expectRegion.length % LENGTH_ALGORITHM_REGION != 0) {
- throw new IllegalArgumentException("Invalid expected 3A region!");
- }
+ MeteringRectangle[] actualRegion;
if (maxRegions[algoIdx] > 0)
{
@@ -1993,8 +1982,7 @@
mCollector.expectEquals(
"Expected 3A region: " + Arrays.toString(expectRegion) +
" does not match actual one: " + Arrays.toString(actualRegion),
- toObject(expectRegion),
- toObject(actualRegion));
+ expectRegion, actualRegion);
}
}
}
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureResultTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureResultTest.java
index 412a919..dd7439f 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureResultTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureResultTest.java
@@ -111,7 +111,7 @@
requestBuilder.addTarget(mReaderSurface);
// Enable face detection if supported
- byte[] faceModes = mStaticInfo.getAvailableFaceDetectModesChecked();
+ int[] faceModes = mStaticInfo.getAvailableFaceDetectModesChecked();
for (int i = 0; i < faceModes.length; i++) {
if (faceModes[i] == CameraMetadata.STATISTICS_FACE_DETECT_MODE_FULL) {
if (VERBOSE) {
@@ -151,13 +151,6 @@
String failMsg = "Failed capture result " + i + " test ";
result = captureListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
- if (VERBOSE) {
- Log.v(TAG, "Dump result for frame " + i);
- Log.v(TAG, "------------------------------ (START)");
- result.dumpToLog();
- Log.v(TAG, "------------------------------ (END)");
- }
-
for (CaptureResult.Key<?> key : mAllKeys) {
if (!skippedKeys.contains(key)) {
/**
@@ -248,9 +241,7 @@
resultKeys.add(CaptureResult.FLASH_MODE);
resultKeys.add(CaptureResult.FLASH_STATE);
resultKeys.add(CaptureResult.HOT_PIXEL_MODE);
- resultKeys.add(CaptureResult.JPEG_GPS_COORDINATES);
- resultKeys.add(CaptureResult.JPEG_GPS_PROCESSING_METHOD);
- resultKeys.add(CaptureResult.JPEG_GPS_TIMESTAMP);
+ resultKeys.add(CaptureResult.JPEG_GPS_LOCATION);
resultKeys.add(CaptureResult.JPEG_ORIENTATION);
resultKeys.add(CaptureResult.JPEG_QUALITY);
resultKeys.add(CaptureResult.JPEG_THUMBNAIL_QUALITY);
@@ -278,13 +269,11 @@
resultKeys.add(CaptureResult.STATISTICS_FACE_DETECT_MODE);
resultKeys.add(CaptureResult.STATISTICS_HOT_PIXEL_MAP_MODE);
resultKeys.add(CaptureResult.STATISTICS_FACES);
- resultKeys.add(CaptureResult.STATISTICS_LENS_SHADING_MAP);
+ resultKeys.add(CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP);
resultKeys.add(CaptureResult.STATISTICS_SCENE_FLICKER);
resultKeys.add(CaptureResult.STATISTICS_HOT_PIXEL_MAP);
resultKeys.add(CaptureResult.STATISTICS_LENS_SHADING_MAP_MODE);
- resultKeys.add(CaptureResult.TONEMAP_CURVE_BLUE);
- resultKeys.add(CaptureResult.TONEMAP_CURVE_GREEN);
- resultKeys.add(CaptureResult.TONEMAP_CURVE_RED);
+ resultKeys.add(CaptureResult.TONEMAP_CURVE);
resultKeys.add(CaptureResult.TONEMAP_MODE);
resultKeys.add(CaptureResult.BLACK_LEVEL_LOCK);
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/RecordingTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/RecordingTest.java
index ab9cddc..43031da 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/RecordingTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/RecordingTest.java
@@ -32,6 +32,7 @@
import android.os.SystemClock;
import android.test.suitebuilder.annotation.LargeTest;
import android.util.Log;
+import android.util.Range;
import android.view.SurfaceHolder;
import android.view.Surface;
@@ -327,7 +328,7 @@
CaptureRequest.Builder recordingRequestBuilder =
mCamera.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
// Make sure camera output frame rate is set to correct value.
- int[] fpsRange = {VIDEO_FRAME_RATE, VIDEO_FRAME_RATE};
+ Range<Integer> fpsRange = Range.create(VIDEO_FRAME_RATE, VIDEO_FRAME_RATE);
recordingRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
recordingRequestBuilder.addTarget(mRecordingSurface);
recordingRequestBuilder.addTarget(mPreviewSurface);
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/StillCaptureTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/StillCaptureTest.java
index 9f0af9b..7daf859 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/StillCaptureTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/StillCaptureTest.java
@@ -23,22 +23,27 @@
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
+import android.location.Location;
+import android.location.LocationManager;
import android.util.Size;
import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureListener;
import android.hardware.camera2.cts.CameraTestUtils.SimpleImageReaderListener;
import android.hardware.camera2.cts.helpers.Camera2Focuser;
import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase;
+import android.hardware.camera2.params.MeteringRectangle;
import android.media.ExifInterface;
import android.media.Image;
import android.os.Build;
import android.os.ConditionVariable;
import android.util.Log;
+import android.util.Range;
import android.util.Rational;
import com.android.ex.camera2.exceptions.TimeoutRuntimeException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
@@ -46,7 +51,7 @@
public class StillCaptureTest extends Camera2SurfaceViewTestCase {
private static final String TAG = "StillCaptureTest";
private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
- private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);;
+ private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
private static final String JPEG_FILE_NAME = DEBUG_FILE_NAME_BASE + "/test.jpeg";
// 60 second to accommodate the possible long exposure time.
private static final int EXIF_DATETIME_ERROR_MARGIN_SEC = 60;
@@ -54,32 +59,39 @@
// TODO: exposure time error margin need to be scaled with exposure time.
private static final float EXIF_EXPOSURE_TIME_ERROR_MARGIN_SEC = 0.002f;
private static final float EXIF_APERTURE_ERROR_MARGIN = 0.001f;
+ private static final Location sTestLocation0 = new Location(LocationManager.GPS_PROVIDER);
+ private static final Location sTestLocation1 = new Location(LocationManager.GPS_PROVIDER);
+ private static final Location sTestLocation2 = new Location(LocationManager.NETWORK_PROVIDER);
+ static {
+ sTestLocation0.setTime(1199145600L);
+ sTestLocation0.setLatitude(37.736071);
+ sTestLocation0.setLongitude(-122.441983);
+ sTestLocation0.setAltitude(21.0);
+
+ sTestLocation1.setTime(1199145601L);
+ sTestLocation1.setLatitude(0.736071);
+ sTestLocation1.setLongitude(0.441983);
+ sTestLocation1.setAltitude(1.0);
+
+ sTestLocation2.setTime(1199145602L);
+ sTestLocation2.setLatitude(-89.736071);
+ sTestLocation2.setLongitude(-179.441983);
+ sTestLocation2.setAltitude(100000.0);
+ }
// Exif test data vectors.
private static final ExifTestData[] EXIF_TEST_DATA = {
new ExifTestData(
- /* coords */new double[] {
- 37.736071, -122.441983, 21.0
- },
- /* procMethod */"GPS NETWORK HYBRID ARE ALL FINE.",
- /* timestamp */1199145600L,
+ /*gpsLocation*/ sTestLocation0,
/* orientation */90,
/* jpgQuality */(byte) 80,
/* thumbQuality */(byte) 75),
new ExifTestData(
- /* coords */new double[] {
- 0.736071, 0.441983, 1.0
- },
- /* procMethod */"GPS",
- /* timestamp */1199145601L,
+ /*gpsLocation*/ sTestLocation1,
/* orientation */180,
/* jpgQuality */(byte) 90,
/* thumbQuality */(byte) 85),
new ExifTestData(
- /* coords */new double[] {
- -89.736071, -179.441983, 100000.0
- },
- /* procMethod */"NETWORK",
- /* timestamp */1199145602L,
+ /*gpsLocation*/ sTestLocation2,
/* orientation */270,
/* jpgQuality */(byte) 100,
/* thumbQuality */(byte) 100)
@@ -254,9 +266,9 @@
continue;
}
- int[][] aeRegions = get3ATestRegionsForCamera();
- for (int i = 0; i < aeRegions.length; i++) {
- takePictureTestByCamera(aeRegions[i], /*awbRegions*/null, /*afRegions*/null);
+ ArrayList<MeteringRectangle[]> aeRegionTestCases = get3ARegionTestCasesForCamera();
+ for (MeteringRectangle[] aeRegions : aeRegionTestCases) {
+ takePictureTestByCamera(aeRegions, /*awbRegions*/null, /*afRegions*/null);
}
} finally {
closeDevice();
@@ -279,9 +291,9 @@
continue;
}
- int[][] awbRegions = get3ATestRegionsForCamera();
- for (int i = 0; i < awbRegions.length; i++) {
- takePictureTestByCamera(/*aeRegions*/null, awbRegions[i], /*afRegions*/null);
+ ArrayList<MeteringRectangle[]> awbRegionTestCases = get3ARegionTestCasesForCamera();
+ for (MeteringRectangle[] awbRegions : awbRegionTestCases) {
+ takePictureTestByCamera(/*aeRegions*/null, awbRegions, /*afRegions*/null);
}
} finally {
closeDevice();
@@ -304,9 +316,9 @@
continue;
}
- int[][] afRegions = get3ATestRegionsForCamera();
- for (int i = 0; i < afRegions.length; i++) {
- takePictureTestByCamera(/*aeRegions*/null, /*awbRegions*/null, afRegions[i]);
+ ArrayList<MeteringRectangle[]> afRegionTestCases = get3ARegionTestCasesForCamera();
+ for (MeteringRectangle[] afRegions : afRegionTestCases) {
+ takePictureTestByCamera(/*aeRegions*/null, /*awbRegions*/null, afRegions);
}
} finally {
closeDevice();
@@ -381,8 +393,10 @@
* @param awbRegions AWB regions for this capture
* @param afRegions AF regions for this capture
*/
- private void takePictureTestByCamera(int[] aeRegions, int[] awbRegions, int[] afRegions)
- throws Exception {
+ private void takePictureTestByCamera(
+ MeteringRectangle[] aeRegions, MeteringRectangle[] awbRegions,
+ MeteringRectangle[] afRegions) throws Exception {
+
boolean hasFocuser = mStaticInfo.hasFocuser();
Size maxStillSz = mOrderedStillSizes.get(0);
@@ -459,10 +473,10 @@
previewRequest.get(CaptureRequest.CONTROL_AWB_MODE),
result.get(CaptureResult.CONTROL_AWB_MODE));
if (canSetAwbRegion) {
- int[] resultAwbRegions = getValueNotNull(result, CaptureResult.CONTROL_AWB_REGIONS);
+ MeteringRectangle[] resultAwbRegions =
+ getValueNotNull(result, CaptureResult.CONTROL_AWB_REGIONS);
mCollector.expectEquals("AWB regions in result and request should be same",
- toObject(awbRegions),
- toObject(resultAwbRegions));
+ awbRegions, resultAwbRegions);
}
/**
@@ -486,10 +500,10 @@
previewRequest.get(CaptureRequest.CONTROL_AE_MODE),
result.get(CaptureResult.CONTROL_AE_MODE));
if (canSetAeRegion) {
- int[] resultAeRegions = getValueNotNull(result, CaptureResult.CONTROL_AE_REGIONS);
+ MeteringRectangle[] resultAeRegions =
+ getValueNotNull(result, CaptureResult.CONTROL_AE_REGIONS);
mCollector.expectEquals("AE regions in result and request should be same",
- toObject(aeRegions),
- toObject(resultAeRegions));
+ aeRegions, resultAeRegions);
}
/**
@@ -504,10 +518,10 @@
stillRequest.get(CaptureRequest.CONTROL_AF_MODE),
result.get(CaptureResult.CONTROL_AF_MODE));
if (canSetAfRegion) {
- int[] resultAfRegions = getValueNotNull(result, CaptureResult.CONTROL_AF_REGIONS);
+ MeteringRectangle[] resultAfRegions =
+ getValueNotNull(result, CaptureResult.CONTROL_AF_REGIONS);
mCollector.expectEquals("AF regions in result and request should be same",
- toObject(afRegions),
- toObject(resultAfRegions));
+ afRegions, resultAfRegions);
}
if (hasFocuser) {
@@ -535,10 +549,10 @@
SimpleAutoFocusListener afListener = new SimpleAutoFocusListener();
Camera2Focuser focuser = new Camera2Focuser(mCamera, mPreviewSurface, afListener,
mStaticInfo.getCharacteristics(), mHandler);
- int[][] testAfRegions = get3ATestRegionsForCamera();
+ ArrayList<MeteringRectangle[]> testAfRegions = get3ARegionTestCasesForCamera();
- for (int i = 0; i < testAfRegions.length; i++) {
- focuser.touchForAutoFocus(testAfRegions[i]);
+ for (MeteringRectangle[] afRegions : testAfRegions) {
+ focuser.touchForAutoFocus(afRegions);
afListener.waitForAutoFocusDone(WAIT_FOR_FOCUS_DONE_TIMEOUT_MS);
focuser.cancelAutoFocus();
}
@@ -616,6 +630,15 @@
// TODO: validate DNG metadata tags.
}
+ private static boolean areGpsFieldsEqual(Location a, Location b) {
+ if (a == null || b == null) {
+ return false;
+ }
+
+ return a.getTime() == b.getTime() && a.getLatitude() == b.getLatitude() &&
+ a.getLongitude() == b.getLongitude() && a.getAltitude() == b.getAltitude() &&
+ a.getProvider() == b.getProvider();
+ }
/**
* Issue a Jpeg capture and validate the exif information.
* <p>
@@ -661,10 +684,7 @@
* present and semantically correct.
*/
stillBuilder.set(CaptureRequest.JPEG_THUMBNAIL_SIZE, testThumbnailSizes[i]);
- stillBuilder.set(CaptureRequest.JPEG_GPS_COORDINATES, EXIF_TEST_DATA[i].gpsCoordinates);
- stillBuilder.set(CaptureRequest.JPEG_GPS_PROCESSING_METHOD,
- EXIF_TEST_DATA[i].gpsProcessingMethod);
- stillBuilder.set(CaptureRequest.JPEG_GPS_TIMESTAMP, EXIF_TEST_DATA[i].gpsTimeStamp);
+ stillBuilder.set(CaptureRequest.JPEG_GPS_LOCATION, EXIF_TEST_DATA[i].gpsLocation);
stillBuilder.set(CaptureRequest.JPEG_ORIENTATION, EXIF_TEST_DATA[i].jpegOrientation);
stillBuilder.set(CaptureRequest.JPEG_QUALITY, EXIF_TEST_DATA[i].jpegQuality);
stillBuilder.set(CaptureRequest.JPEG_THUMBNAIL_QUALITY,
@@ -674,15 +694,9 @@
mCollector.expectEquals("JPEG thumbnail size request set and get should match",
testThumbnailSizes[i],
stillBuilder.get(CaptureRequest.JPEG_THUMBNAIL_SIZE));
- mCollector.expectEquals("GPS coordinates request set and get should match.",
- toObject(EXIF_TEST_DATA[i].gpsCoordinates),
- toObject(stillBuilder.get(CaptureRequest.JPEG_GPS_COORDINATES)));
- mCollector.expectEquals("GPS processing method request set and get should match",
- EXIF_TEST_DATA[i].gpsProcessingMethod,
- stillBuilder.get(CaptureRequest.JPEG_GPS_PROCESSING_METHOD));
- mCollector.expectEquals("GPS time stamp request set and get should match",
- EXIF_TEST_DATA[i].gpsTimeStamp,
- stillBuilder.get(CaptureRequest.JPEG_GPS_TIMESTAMP));
+ mCollector.expectTrue("GPS locations request set and get should match.",
+ areGpsFieldsEqual(EXIF_TEST_DATA[i].gpsLocation,
+ stillBuilder.get(CaptureRequest.JPEG_GPS_LOCATION)));
mCollector.expectEquals("JPEG orientation request set and get should match",
EXIF_TEST_DATA[i].jpegOrientation,
stillBuilder.get(CaptureRequest.JPEG_ORIENTATION));
@@ -719,18 +733,12 @@
mCollector.expectEquals("JPEG thumbnail size result and request should match",
testThumbnailSizes[i],
stillResult.get(CaptureResult.JPEG_THUMBNAIL_SIZE));
- CaptureResult.Key<double[]> gpsCoordsKey = CaptureResult.JPEG_GPS_COORDINATES;
- if (mCollector.expectKeyValueNotNull(stillResult, gpsCoordsKey) != null) {
- mCollector.expectEquals("GPS coordinates result and request should match.",
- toObject(EXIF_TEST_DATA[i].gpsCoordinates),
- toObject(stillResult.get(gpsCoordsKey)));
+ if (mCollector.expectKeyValueNotNull(stillResult, CaptureResult.JPEG_GPS_LOCATION) !=
+ null) {
+ mCollector.expectTrue("GPS location result and request should match.",
+ areGpsFieldsEqual(EXIF_TEST_DATA[i].gpsLocation,
+ stillResult.get(CaptureResult.JPEG_GPS_LOCATION)));
}
- mCollector.expectEquals("GPS processing method result and request should match",
- EXIF_TEST_DATA[i].gpsProcessingMethod,
- stillResult.get(CaptureResult.JPEG_GPS_PROCESSING_METHOD));
- mCollector.expectEquals("GPS time stamp result and request should match",
- EXIF_TEST_DATA[i].gpsTimeStamp,
- stillResult.get(CaptureResult.JPEG_GPS_TIMESTAMP));
mCollector.expectEquals("JPEG orientation result and request should match",
EXIF_TEST_DATA[i].jpegOrientation,
stillResult.get(CaptureResult.JPEG_ORIENTATION));
@@ -942,18 +950,14 @@
* Immutable class wrapping the exif test data.
*/
private static class ExifTestData {
- public final double[] gpsCoordinates;
- public final String gpsProcessingMethod;
- public final long gpsTimeStamp;
+ public final Location gpsLocation;
public final int jpegOrientation;
public final byte jpegQuality;
public final byte thumbnailQuality;
- public ExifTestData(double[] coords, String procMethod, long timeStamp, int orientation,
+ public ExifTestData(Location location, int orientation,
byte jpgQuality, byte thumbQuality) {
- gpsCoordinates = coords;
- gpsProcessingMethod = procMethod;
- gpsTimeStamp = timeStamp;
+ gpsLocation = location;
jpegOrientation = orientation;
jpegQuality = jpgQuality;
thumbnailQuality = thumbQuality;
@@ -961,10 +965,10 @@
}
private void aeCompensationTestByCamera() throws Exception {
- int[] compensationRange = mStaticInfo.getAeCompensationRangeChecked();
+ Range<Integer> compensationRange = mStaticInfo.getAeCompensationRangeChecked();
Rational step = mStaticInfo.getAeCompensationStepChecked();
int stepsPerEv = (int) Math.round(1.0 / step.toFloat());
- int numSteps = (compensationRange[1] - compensationRange[0]) / stepsPerEv;
+ int numSteps = (compensationRange.getUpper() - compensationRange.getLower()) / stepsPerEv;
Size maxStillSz = mOrderedStillSizes.get(0);
Size maxPreviewSz = mOrderedPreviewSizes.get(0);
@@ -993,7 +997,7 @@
maxStillSz, resultListener, numSteps, imageListener);
for (int i = 0; i <= numSteps; i++) {
- int exposureCompensation = i * stepsPerEv + compensationRange[0];
+ int exposureCompensation = i * stepsPerEv + compensationRange.getLower();
// Wait for AE to be stabilized before capture: CONVERGED or FLASH_REQUIRED.
waitForAeStable(resultListener);
@@ -1072,10 +1076,8 @@
private long getMaxExposureValue(CaptureRequest.Builder request, long maxExposureTimeUs,
long maxSensitivity) throws Exception {
- int[] fpsRange = request.get(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE);
- mCollector.expectEquals("Length of CaptureResult FPS range must be 2",
- 2, fpsRange.length);
- long maxFrameDurationUs = Math.round(1000000.0 / fpsRange[0]);
+ Range<Integer> fpsRange = request.get(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE);
+ long maxFrameDurationUs = Math.round(1000000.0 / fpsRange.getLower());
long currentMaxExposureTimeUs = Math.min(maxFrameDurationUs, maxExposureTimeUs);
return currentMaxExposureTimeUs * maxSensitivity;
}
@@ -1206,71 +1208,89 @@
}
/**
- * Get 5 3A test square regions, one is at center, the other four are at corners of
+ * Get 5 3A region test cases, each with one square region in it.
+ * The first one is at center, the other four are at corners of
* active array rectangle.
*
* @return array of test 3A regions
*/
- private int[][] get3ATestRegionsForCamera() {
+ private ArrayList<MeteringRectangle[]> get3ARegionTestCasesForCamera() {
final int TEST_3A_REGION_NUM = 5;
- final int NUM_ELEMENT_IN_REGION = 5;
final int DEFAULT_REGION_WEIGHT = 30;
final int DEFAULT_REGION_SCALE_RATIO = 8;
- int[][] regions = new int[TEST_3A_REGION_NUM][NUM_ELEMENT_IN_REGION];
+ ArrayList<MeteringRectangle[]> testCases =
+ new ArrayList<MeteringRectangle[]>(TEST_3A_REGION_NUM);
final Rect activeArraySize = mStaticInfo.getActiveArraySizeChecked();
- int regionWidth = activeArraySize.width() / DEFAULT_REGION_SCALE_RATIO;
- int regionHeight = activeArraySize.height() / DEFAULT_REGION_SCALE_RATIO;
+ int regionWidth = activeArraySize.width() / DEFAULT_REGION_SCALE_RATIO - 1;
+ int regionHeight = activeArraySize.height() / DEFAULT_REGION_SCALE_RATIO - 1;
int centerX = activeArraySize.width() / 2;
int centerY = activeArraySize.height() / 2;
int bottomRightX = activeArraySize.width() - 1;
int bottomRightY = activeArraySize.height() - 1;
// Center region
- int i = 0;
- regions[i][0] = centerX - regionWidth / 2; // xmin
- regions[i][1] = centerY - regionHeight / 2; // ymin
- regions[i][2] = centerX + regionWidth / 2 - 1; // xmax
- regions[i][3] = centerY + regionHeight / 2 - 1; // ymax
- regions[i][4] = DEFAULT_REGION_WEIGHT;
- i++;
+ testCases.add(
+ new MeteringRectangle[] {
+ new MeteringRectangle(
+ centerX - regionWidth / 2, // x
+ centerY - regionHeight / 2, // y
+ regionWidth, // width
+ regionHeight, // height
+ DEFAULT_REGION_WEIGHT)});
// Upper left corner
- regions[i][0] = 0; // xmin
- regions[i][1] = 0; // ymin
- regions[i][2] = regionWidth - 1; // xmax
- regions[i][3] = regionHeight - 1; // ymax
- regions[i][4] = DEFAULT_REGION_WEIGHT;
- i++;
+ testCases.add(
+ new MeteringRectangle[] {
+ new MeteringRectangle(
+ 0, // x
+ 0, // y
+ regionWidth, // width
+ regionHeight, // height
+ DEFAULT_REGION_WEIGHT)});
// Upper right corner
- regions[i][0] = activeArraySize.width() - regionWidth; // xmin
- regions[i][1] = 0; // ymin
- regions[i][2] = bottomRightX; // xmax
- regions[i][3] = regionHeight - 1; // ymax
- regions[i][4] = DEFAULT_REGION_WEIGHT;
- i++;
+ testCases.add(
+ new MeteringRectangle[] {
+ new MeteringRectangle(
+ bottomRightX - regionWidth, // x
+ 0, // y
+ regionWidth, // width
+ regionHeight, // height
+ DEFAULT_REGION_WEIGHT)});
// Bootom left corner
- regions[i][0] = 0; // xmin
- regions[i][1] = activeArraySize.height() - regionHeight; // ymin
- regions[i][2] = regionWidth - 1; // xmax
- regions[i][3] = bottomRightY; // ymax
- regions[i][4] = DEFAULT_REGION_WEIGHT;
- i++;
+ testCases.add(
+ new MeteringRectangle[] {
+ new MeteringRectangle(
+ 0, // x
+ bottomRightY - regionHeight, // y
+ regionWidth, // width
+ regionHeight, // height
+ DEFAULT_REGION_WEIGHT)});
// Bootom right corner
- regions[i][0] = activeArraySize.width() - regionWidth; // xmin
- regions[i][1] = activeArraySize.height() - regionHeight; // ymin
- regions[i][2] = bottomRightX; // xmax
- regions[i][3] = bottomRightY; // ymax
- regions[i][4] = DEFAULT_REGION_WEIGHT;
- i++;
+ testCases.add(
+ new MeteringRectangle[] {
+ new MeteringRectangle(
+ bottomRightX - regionWidth, // x
+ bottomRightY - regionHeight, // y
+ regionWidth, // width
+ regionHeight, // height
+ DEFAULT_REGION_WEIGHT)});
if (VERBOSE) {
- Log.v(TAG, "Generated test regions are: " + Arrays.deepToString(regions));
+ StringBuilder sb = new StringBuilder();
+ for (MeteringRectangle[] mr : testCases) {
+ sb.append("{");
+ sb.append(Arrays.toString(mr));
+ sb.append("}, ");
+ }
+ if (sb.length() > 1)
+ sb.setLength(sb.length() - 2); // Remove the redundant comma and space at the end
+ Log.v(TAG, "Generated test regions are: " + sb.toString());
}
- return regions;
+ return testCases;
}
private boolean isRegionsSupportedFor3A(int index) {
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/SurfaceViewPreviewTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/SurfaceViewPreviewTest.java
index 9d59c9a..37b6357 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/SurfaceViewPreviewTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/SurfaceViewPreviewTest.java
@@ -23,10 +23,12 @@
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
import android.util.Size;
import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureListener;
import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase;
import android.util.Log;
+import android.util.Range;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatcher;
@@ -121,17 +123,16 @@
private void previewFpsRangeTestByCamera() throws Exception {
final int FPS_RANGE_SIZE = 2;
Size maxPreviewSz = mOrderedPreviewSizes.get(0);
- int[] fpsRanges = mStaticInfo.getAeAvailableTargetFpsRangesChecked();
+ Range<Integer>[] fpsRanges = mStaticInfo.getAeAvailableTargetFpsRangesChecked();
boolean antiBandingOffIsSupported = mStaticInfo.isAntiBandingOffModeSupported();
- int[] fpsRange = new int[FPS_RANGE_SIZE];
+ Range<Integer> fpsRange;
CaptureRequest.Builder requestBuilder =
mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
SimpleCaptureListener resultListener = new SimpleCaptureListener();
startPreview(requestBuilder, maxPreviewSz, resultListener);
- for (int i = 0; i < fpsRanges.length; i += FPS_RANGE_SIZE) {
- fpsRange[0] = fpsRanges[i];
- fpsRange[1] = fpsRanges[i + 1];
+ for (int i = 0; i < fpsRanges.length; i += 1) {
+ fpsRange = fpsRanges[i];
requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
// Turn off auto antibanding to avoid exposure time and frame duration interference
@@ -158,11 +159,11 @@
}
private void verifyPreviewTargetFpsRange(SimpleCaptureListener resultListener,
- int numFramesVerified, int[] fpsRange, Size previewSz) {
+ int numFramesVerified, Range<Integer> fpsRange, Size previewSz) {
CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
long frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION);
long[] frameDurationRange =
- new long[]{(long) (1e9 / fpsRange[1]), (long) (1e9 / fpsRange[0])};
+ new long[]{(long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())};
mCollector.expectInRange(
"Frame duration must be in the range of " + Arrays.toString(frameDurationRange),
frameDuration, (long) (frameDurationRange[0] * (1 - FRAME_DURATION_ERROR_MARGIN)),
@@ -233,10 +234,10 @@
stopPreview();
}
- private class IsCaptureResultValid extends ArgumentMatcher<CaptureResult> {
+ private class IsCaptureResultValid extends ArgumentMatcher<TotalCaptureResult> {
@Override
public boolean matches(Object obj) {
- CaptureResult result = (CaptureResult)obj;
+ TotalCaptureResult result = (TotalCaptureResult)obj;
Long timeStamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
if (timeStamp != null && timeStamp.longValue() > 0L) {
return true;
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/Camera2Focuser.java b/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/Camera2Focuser.java
index 0ed1eed..ff4af2d 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/Camera2Focuser.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/Camera2Focuser.java
@@ -21,8 +21,10 @@
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraDevice.CaptureListener;
+import android.hardware.camera2.params.MeteringRectangle;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
import android.os.Handler;
import android.util.Log;
import android.view.Surface;
@@ -49,11 +51,10 @@
private final AutoFocusListener mAutoFocusListener;
private final CameraDevice mCamera;
private final Surface mRequestSurface;
- private final int AF_REGION_NUM_ELEMENTS = 5;
private final CameraCharacteristics mStaticInfo;
private int mAfRun = 0;
- private int[] mAfRegions; // int x AF_REGION_NUM_ELEMENTS array.
+ private MeteringRectangle[] mAfRegions;
private boolean mLocked = false;
private boolean mSuccess = false;
private CaptureRequest.Builder mRepeatingBuilder;
@@ -164,7 +165,8 @@
* array size is used if afRegions is null.
* @throws CameraAccessException
*/
- public synchronized void touchForAutoFocus(int[] afRegions) throws CameraAccessException {
+ public synchronized void touchForAutoFocus(MeteringRectangle[] afRegions)
+ throws CameraAccessException {
startAutoFocusLocked(/*active*/true, afRegions);
}
@@ -181,7 +183,8 @@
* array size is used if afRegions is null.
* @throws CameraAccessException
*/
- public synchronized void startAutoFocus(int[] afRegions) throws CameraAccessException {
+ public synchronized void startAutoFocus(MeteringRectangle[] afRegions)
+ throws CameraAccessException {
startAutoFocusLocked(/*forceActive*/false, afRegions);
}
@@ -221,7 +224,9 @@
return mRepeatingBuilder.get(CaptureRequest.CONTROL_AF_MODE);
}
- private void startAutoFocusLocked(boolean forceActive, int[] afRegions) throws CameraAccessException {
+ private void startAutoFocusLocked(
+ boolean forceActive, MeteringRectangle[] afRegions) throws CameraAccessException {
+
setAfRegions(afRegions);
mAfRun++;
@@ -281,9 +286,6 @@
* @throws CameraAccessException
*/
private CaptureRequest.Builder createRequestBuilder() throws CameraAccessException {
- if (mAfRegions == null) {
- throw new IllegalStateException("AF regions are not initialized yet");
- }
CaptureRequest.Builder requestBuilder =
mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
@@ -297,17 +299,16 @@
* Set AF regions, fall back to default region if afRegions is null.
*
* @param afRegions The AF regions to set
- * @throws IllegalArgumentException if the region is malformed (length is 0
- * or not multiple times of {@value #AF_REGION_NUM_ELEMENTS}).
+ * @throws IllegalArgumentException if the region is malformed (length is 0).
*/
- private void setAfRegions(int[] afRegions) {
+ private void setAfRegions(MeteringRectangle[] afRegions) {
if (afRegions == null) {
setDefaultAfRegions();
return;
}
// Throw IAE if AF regions are malformed.
- if (afRegions.length % AF_REGION_NUM_ELEMENTS != 0 || afRegions.length == 0) {
- throw new IllegalArgumentException("afRegions is malformed, length: " + afRegions.length);
+ if (afRegions.length == 0) {
+ throw new IllegalArgumentException("afRegions is malformed, length: 0");
}
mAfRegions = afRegions;
@@ -324,7 +325,8 @@
// Initialize AF regions with all zeros, meaning that it is up to camera device to device
// the regions used by AF.
- mAfRegions = new int[]{0, 0, 0, 0, 0};
+ mAfRegions = new MeteringRectangle[] {
+ new MeteringRectangle(0, 0, 0, 0, 0)};
}
private CaptureListener createCaptureListener() {
@@ -355,7 +357,7 @@
@Override
public void onCaptureCompleted(CameraDevice camera, CaptureRequest request,
- CaptureResult result) {
+ TotalCaptureResult result) {
dispatchToFocuser(result);
}
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/StaticMetadata.java b/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/StaticMetadata.java
index e99d64a..76f79f7 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/StaticMetadata.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/StaticMetadata.java
@@ -21,10 +21,12 @@
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraCharacteristics.Key;
import android.hardware.camera2.CameraMetadata;
+import android.util.Range;
import android.util.Size;
import android.hardware.camera2.cts.CameraTestUtils;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.util.Log;
+import android.util.Range;
import android.util.Rational;
import junit.framework.Assert;
@@ -309,12 +311,12 @@
*
* @return The array contains available anti-banding modes.
*/
- public byte[] getAeAvailableAntiBandingModesChecked() {
- Key<byte[]> key = CameraCharacteristics.CONTROL_AE_AVAILABLE_ANTIBANDING_MODES;
- byte[] modes = getValueFromKeyNonNull(key);
+ public int[] getAeAvailableAntiBandingModesChecked() {
+ Key<int[]> key = CameraCharacteristics.CONTROL_AE_AVAILABLE_ANTIBANDING_MODES;
+ int[] modes = getValueFromKeyNonNull(key);
boolean foundAuto = false;
- for (byte mode : modes) {
+ for (int mode : modes) {
checkTrueForKey(key, "mode value " + mode + " is out if range",
mode >= CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_OFF ||
mode <= CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_AUTO);
@@ -335,10 +337,10 @@
* @return true if antibanding OFF mode is supported, false otherwise.
*/
public boolean isAntiBandingOffModeSupported() {
- List<Byte> antiBandingModes =
+ List<Integer> antiBandingModes =
Arrays.asList(CameraTestUtils.toObject(getAeAvailableAntiBandingModesChecked()));
- return antiBandingModes.contains((byte)CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_OFF);
+ return antiBandingModes.contains(CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_OFF);
}
public Boolean getFlashInfoChecked() {
@@ -445,20 +447,20 @@
*
* @return The non-null array of available face detection modes
*/
- public byte[] getAvailableFaceDetectModesChecked() {
- Key<byte[]> key = CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES;
- byte[] modes = getValueFromKeyNonNull(key);
+ public int[] getAvailableFaceDetectModesChecked() {
+ Key<int[]> key = CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES;
+ int[] modes = getValueFromKeyNonNull(key);
if (modes == null) {
- return new byte[0];
+ return new int[0];
}
- List<Byte> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
checkTrueForKey(key, "Array should contain OFF mode",
- modeList.contains((byte)CameraMetadata.STATISTICS_FACE_DETECT_MODE_OFF));
+ modeList.contains(CameraMetadata.STATISTICS_FACE_DETECT_MODE_OFF));
checkElementDistinct(key, modeList);
- checkArrayValuesInRange(key, modes, (byte)CameraMetadata.STATISTICS_FACE_DETECT_MODE_OFF,
- (byte)CameraMetadata.STATISTICS_FACE_DETECT_MODE_FULL);
+ checkArrayValuesInRange(key, modes, CameraMetadata.STATISTICS_FACE_DETECT_MODE_OFF,
+ CameraMetadata.STATISTICS_FACE_DETECT_MODE_FULL);
return modes;
}
@@ -476,9 +478,9 @@
return 0;
}
- List<Byte> faceDetectModes =
+ List<Integer> faceDetectModes =
Arrays.asList(CameraTestUtils.toObject(getAvailableFaceDetectModesChecked()));
- if (faceDetectModes.contains((byte)CameraMetadata.STATISTICS_FACE_DETECT_MODE_OFF) &&
+ if (faceDetectModes.contains(CameraMetadata.STATISTICS_FACE_DETECT_MODE_OFF) &&
faceDetectModes.size() == 1) {
checkTrueForKey(key, " value must be 0 if only OFF mode is supported in "
+ "availableFaceDetectionModes", count == 0);
@@ -497,26 +499,26 @@
*
* @return the availalbe tone map modes
*/
- public byte[] getAvailableToneMapModesChecked() {
- Key<byte[]> key = CameraCharacteristics.TONEMAP_AVAILABLE_TONE_MAP_MODES;
- byte[] modes = getValueFromKeyNonNull(key);
+ public int[] getAvailableToneMapModesChecked() {
+ Key<int[]> key = CameraCharacteristics.TONEMAP_AVAILABLE_TONE_MAP_MODES;
+ int[] modes = getValueFromKeyNonNull(key);
if (modes == null) {
- return new byte[0];
+ return new int[0];
}
- List<Byte> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
checkTrueForKey(key, " Camera devices must always support FAST mode",
- modeList.contains((byte)CameraMetadata.TONEMAP_MODE_FAST));
+ modeList.contains(CameraMetadata.TONEMAP_MODE_FAST));
if (isHardwareLevelFull()) {
checkTrueForKey(key, "Full-capability camera devices must support"
+ "CONTRAST_CURVE mode",
- modeList.contains((byte)CameraMetadata.TONEMAP_MODE_CONTRAST_CURVE) &&
- modeList.contains((byte)CameraMetadata.TONEMAP_MODE_FAST));
+ modeList.contains(CameraMetadata.TONEMAP_MODE_CONTRAST_CURVE) &&
+ modeList.contains(CameraMetadata.TONEMAP_MODE_FAST));
}
checkElementDistinct(key, modeList);
- checkArrayValuesInRange(key, modes, (byte)CameraMetadata.TONEMAP_MODE_CONTRAST_CURVE,
- (byte)CameraMetadata.TONEMAP_MODE_HIGH_QUALITY);
+ checkArrayValuesInRange(key, modes, CameraMetadata.TONEMAP_MODE_CONTRAST_CURVE,
+ CameraMetadata.TONEMAP_MODE_HIGH_QUALITY);
return modes;
}
@@ -534,9 +536,9 @@
return 0;
}
- List<Byte> modeList =
+ List<Integer> modeList =
Arrays.asList(CameraTestUtils.toObject(getAvailableToneMapModesChecked()));
- if (modeList.contains((byte)CameraMetadata.TONEMAP_MODE_CONTRAST_CURVE)) {
+ if (modeList.contains(CameraMetadata.TONEMAP_MODE_CONTRAST_CURVE)) {
checkTrueForKey(key, "Full-capability camera device must support maxCurvePoints "
+ ">= " + TONEMAP_MAX_CURVE_POINTS_AT_LEAST,
count >= TONEMAP_MAX_CURVE_POINTS_AT_LEAST);
@@ -687,12 +689,14 @@
* @return The value reported by the camera device or the defaultValue otherwise.
*/
public int getSensitivityMinimumOrDefault(int defaultValue) {
- return getArrayElementOrDefault(
- CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE,
- defaultValue,
- "minimum",
- SENSOR_INFO_SENSITIVITY_RANGE_MIN,
- SENSOR_INFO_SENSITIVITY_RANGE_SIZE);
+ Range<Integer> range = getValueFromKeyNonNull(
+ CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE);
+ if (range == null) {
+ failKeyCheck(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE,
+ "had no valid minimum value; using default of " + defaultValue);
+ return defaultValue;
+ }
+ return range.getLower();
}
/**
@@ -718,12 +722,14 @@
* @return The value reported by the camera device or the defaultValue otherwise.
*/
public int getSensitivityMaximumOrDefault(int defaultValue) {
- return getArrayElementOrDefault(
- CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE,
- defaultValue,
- "maximum",
- SENSOR_INFO_SENSITIVITY_RANGE_MAX,
- SENSOR_INFO_SENSITIVITY_RANGE_SIZE);
+ Range<Integer> range = getValueFromKeyNonNull(
+ CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE);
+ if (range == null) {
+ failKeyCheck(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE,
+ "had no valid maximum value; using default of " + defaultValue);
+ return defaultValue;
+ }
+ return range.getUpper();
}
/**
@@ -736,12 +742,14 @@
* @return The value reported by the camera device or the defaultValue otherwise.
*/
public long getExposureMinimumOrDefault(long defaultValue) {
- return getArrayElementOrDefault(
- CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE,
- defaultValue,
- "minimum",
- SENSOR_INFO_EXPOSURE_TIME_RANGE_MIN,
- SENSOR_INFO_EXPOSURE_TIME_RANGE_SIZE);
+ Range<Long> range = getValueFromKeyNonNull(
+ CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE);
+ if (range == null) {
+ failKeyCheck(CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE,
+ "had no valid minimum value; using default of " + defaultValue);
+ return defaultValue;
+ }
+ return range.getLower();
}
/**
@@ -767,12 +775,14 @@
* @return The value reported by the camera device or the defaultValue otherwise.
*/
public long getExposureMaximumOrDefault(long defaultValue) {
- return getArrayElementOrDefault(
- CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE,
- defaultValue,
- "maximum",
- SENSOR_INFO_EXPOSURE_TIME_RANGE_MAX,
- SENSOR_INFO_EXPOSURE_TIME_RANGE_SIZE);
+ Range<Long> range = getValueFromKeyNonNull(
+ CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE);
+ if (range == null) {
+ failKeyCheck(CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE,
+ "had no valid maximum value; using default of " + defaultValue);
+ return defaultValue;
+ }
+ return range.getUpper();
}
/**
@@ -796,14 +806,14 @@
* have to abort the execution even the aeMode list is invalid.</p>
* @return AE available modes
*/
- public byte[] getAeAvailableModesChecked() {
- Key<byte[]> modesKey = CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES;
- byte[] modes = getValueFromKeyNonNull(modesKey);
+ public int[] getAeAvailableModesChecked() {
+ Key<int[]> modesKey = CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES;
+ int[] modes = getValueFromKeyNonNull(modesKey);
if (modes == null) {
- modes = new byte[0];
+ modes = new int[0];
}
List<Integer> modeList = new ArrayList<Integer>();
- for (byte mode : modes) {
+ for (int mode : modes) {
modeList.add((int)(mode));
}
checkTrueForKey(modesKey, "value is empty", !modeList.isEmpty());
@@ -842,7 +852,7 @@
checkTrueForKey(modesKey, "Full capability device must have OFF mode", condition);
// Boundary check.
- for (byte mode : modes) {
+ for (int mode : modes) {
checkTrueForKey(modesKey, "Value " + mode + " is out of bound",
mode >= CameraMetadata.CONTROL_AE_MODE_OFF
&& mode <= CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
@@ -857,21 +867,21 @@
* @return array that contains available AWB modes, empty array if awbAvailableModes is
* unavailable.
*/
- public byte[] getAwbAvailableModesChecked() {
- Key<byte[]> key =
+ public int[] getAwbAvailableModesChecked() {
+ Key<int[]> key =
CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES;
- byte[] awbModes = getValueFromKeyNonNull(key);
+ int[] awbModes = getValueFromKeyNonNull(key);
if (awbModes == null) {
- return new byte[0];
+ return new int[0];
}
- List<Byte> modesList = Arrays.asList(CameraTestUtils.toObject(awbModes));
+ List<Integer> modesList = Arrays.asList(CameraTestUtils.toObject(awbModes));
checkTrueForKey(key, " All camera devices must support AUTO mode",
- modesList.contains((byte)CameraMetadata.CONTROL_AWB_MODE_AUTO));
+ modesList.contains(CameraMetadata.CONTROL_AWB_MODE_AUTO));
if (isHardwareLevelFull()) {
checkTrueForKey(key, " Full capability camera devices must support OFF mode",
- modesList.contains((byte)CameraMetadata.CONTROL_AWB_MODE_OFF));
+ modesList.contains(CameraMetadata.CONTROL_AWB_MODE_OFF));
}
return awbModes;
@@ -883,21 +893,21 @@
* @return array that contains available AF modes, empty array if afAvailableModes is
* unavailable.
*/
- public byte[] getAfAvailableModesChecked() {
- Key<byte[]> key =
+ public int[] getAfAvailableModesChecked() {
+ Key<int[]> key =
CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES;
- byte[] afModes = getValueFromKeyNonNull(key);
+ int[] afModes = getValueFromKeyNonNull(key);
if (afModes == null) {
- return new byte[0];
+ return new int[0];
}
- List<Byte> modesList = Arrays.asList(CameraTestUtils.toObject(afModes));
+ List<Integer> modesList = Arrays.asList(CameraTestUtils.toObject(afModes));
checkTrueForKey(key, " All camera devices must support OFF mode",
- modesList.contains((byte)CameraMetadata.CONTROL_AF_MODE_OFF));
+ modesList.contains(CameraMetadata.CONTROL_AF_MODE_OFF));
if (hasFocuser()) {
checkTrueForKey(key, " Camera devices that have focuser units must support AUTO mode",
- modesList.contains((byte)CameraMetadata.CONTROL_AF_MODE_AUTO));
+ modesList.contains(CameraMetadata.CONTROL_AF_MODE_AUTO));
}
return afModes;
@@ -984,27 +994,24 @@
*
* @return Empty int array if aeAvailableTargetFpsRanges is invalid.
*/
- public int[] getAeAvailableTargetFpsRangesChecked() {
- final int NUM_ELEMENTS_IN_FPS_RANGE = 2;
- Key<int[]> key =
+ @SuppressWarnings("raw")
+ public Range<Integer>[] getAeAvailableTargetFpsRangesChecked() {
+ Key<Range<Integer>[]> key =
CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES;
- int[] fpsRanges = getValueFromKeyNonNull(key);
+ Range<Integer>[] fpsRanges = getValueFromKeyNonNull(key);
if (fpsRanges == null) {
- return new int[0];
+ return new Range[0];
}
- checkTrueForKey(key, "array length is invalid", fpsRanges.length
- % NUM_ELEMENTS_IN_FPS_RANGE == 0);
// Round down to 2 boundary if it is not integer times of 2, to avoid array out of bound
// in case the above check fails.
- int fpsRangeLength = (fpsRanges.length / NUM_ELEMENTS_IN_FPS_RANGE)
- * NUM_ELEMENTS_IN_FPS_RANGE;
+ int fpsRangeLength = fpsRanges.length;
int minFps, maxFps;
long maxFrameDuration = getMaxFrameDurationChecked();
- for (int i = 0; i < fpsRangeLength; i += NUM_ELEMENTS_IN_FPS_RANGE) {
- minFps = fpsRanges[i];
- maxFps = fpsRanges[i + 1];
+ for (int i = 0; i < fpsRangeLength; i += 1) {
+ minFps = fpsRanges[i].getLower();
+ maxFps = fpsRanges[i].getUpper();
checkTrueForKey(key, " min fps must be no larger than max fps!",
minFps > 0 && maxFps >= minFps);
long maxDuration = (long) (1e9 / minFps);
@@ -1063,40 +1070,40 @@
return minDurationMap;
}
- public byte[] getAvailableEdgeModesChecked() {
- Key<byte[]> key = CameraCharacteristics.EDGE_AVAILABLE_EDGE_MODES;
- byte[] edgeModes = getValueFromKeyNonNull(key);
+ public int[] getAvailableEdgeModesChecked() {
+ Key<int[]> key = CameraCharacteristics.EDGE_AVAILABLE_EDGE_MODES;
+ int[] edgeModes = getValueFromKeyNonNull(key);
if (edgeModes == null) {
- return new byte[0];
+ return new int[0];
}
// Full device should always include OFF and FAST
if (isHardwareLevelFull()) {
- List<Byte> modeList = Arrays.asList(CameraTestUtils.toObject(edgeModes));
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(edgeModes));
checkTrueForKey(key, "Full device must contain OFF and FAST edge modes",
- modeList.contains((byte)CameraMetadata.EDGE_MODE_OFF) &&
- modeList.contains((byte)CameraMetadata.EDGE_MODE_FAST));
+ modeList.contains(CameraMetadata.EDGE_MODE_OFF) &&
+ modeList.contains(CameraMetadata.EDGE_MODE_FAST));
}
return edgeModes;
}
- public byte[] getAvailableNoiseReductionModesChecked() {
- Key<byte[]> key =
+ public int[] getAvailableNoiseReductionModesChecked() {
+ Key<int[]> key =
CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES;
- byte[] noiseReductionModes = getValueFromKeyNonNull(key);
+ int[] noiseReductionModes = getValueFromKeyNonNull(key);
if (noiseReductionModes == null) {
- return new byte[0];
+ return new int[0];
}
// Full device should always include OFF and FAST
if (isHardwareLevelFull()) {
- List<Byte> modeList = Arrays.asList(CameraTestUtils.toObject(noiseReductionModes));
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(noiseReductionModes));
checkTrueForKey(key, "Full device must contain OFF and FAST noise reduction modes",
- modeList.contains((byte)CameraMetadata.NOISE_REDUCTION_MODE_OFF) &&
- modeList.contains((byte)CameraMetadata.NOISE_REDUCTION_MODE_FAST));
+ modeList.contains(CameraMetadata.NOISE_REDUCTION_MODE_OFF) &&
+ modeList.contains(CameraMetadata.NOISE_REDUCTION_MODE_FAST));
}
return noiseReductionModes;
@@ -1126,26 +1133,21 @@
*
* @return default value if the value is null or malformed.
*/
- public int[] getAeCompensationRangeChecked() {
- Key<int[]> key =
+ public Range<Integer> getAeCompensationRangeChecked() {
+ Key<Range<Integer>> key =
CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE;
- int[] compensationRange = getValueFromKeyNonNull(key);
+ Range<Integer> compensationRange = getValueFromKeyNonNull(key);
float compensationStep = getAeCompensationStepChecked().toFloat();
- final int[] DEFAULT_RANGE = new int[] {
+ final Range<Integer> DEFAULT_RANGE = Range.create(
(int)(CONTROL_AE_COMPENSATION_RANGE_DEFAULT_MIN / compensationStep),
- (int)(CONTROL_AE_COMPENSATION_RANGE_DEFAULT_MAX / compensationStep)};
+ (int)(CONTROL_AE_COMPENSATION_RANGE_DEFAULT_MAX / compensationStep));
if (compensationRange == null) {
return DEFAULT_RANGE;
}
- checkTrueForKey(key, " value must have 2 elements", compensationRange.length == 2);
- if (compensationRange.length != 2) {
- return DEFAULT_RANGE;
- }
-
- checkTrueForKey(key, " range value must be at least " + Arrays.toString(DEFAULT_RANGE),
- compensationRange[0] <= DEFAULT_RANGE[0] &&
- compensationRange[1] >= DEFAULT_RANGE[1]);
+ checkTrueForKey(key, " range value must be at least " + DEFAULT_RANGE,
+ compensationRange.getLower() <= DEFAULT_RANGE.getLower() &&
+ compensationRange.getUpper() >= DEFAULT_RANGE.getUpper());
return compensationRange;
}
@@ -1155,21 +1157,21 @@
*
* @return available video stabilization modes, empty array if it is unavailable.
*/
- public byte[] getAvailableVideoStabilizationModesChecked() {
- Key<byte[]> key =
+ public int[] getAvailableVideoStabilizationModesChecked() {
+ Key<int[]> key =
CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES;
- byte[] modes = getValueFromKeyNonNull(key);
+ int[] modes = getValueFromKeyNonNull(key);
if (modes == null) {
- return new byte[0];
+ return new int[0];
}
- List<Byte> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
checkTrueForKey(key, " All device should support OFF mode",
- modeList.contains((byte)CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF));
+ modeList.contains(CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF));
checkArrayValuesInRange(key, modes,
- (byte)CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF,
- (byte)CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_ON);
+ CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF,
+ CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_ON);
return modes;
}
@@ -1179,18 +1181,18 @@
*
* @return available optical stabilization modes, empty array if it is unavailable.
*/
- public byte[] getAvailableOpticalStabilizationChecked() {
- Key<byte[]> key =
+ public int[] getAvailableOpticalStabilizationChecked() {
+ Key<int[]> key =
CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION;
- byte[] modes = getValueFromKeyNonNull(key);
+ int[] modes = getValueFromKeyNonNull(key);
if (modes == null) {
- return new byte[0];
+ return new int[0];
}
checkArrayValuesInRange(key, modes,
- (byte)CameraMetadata.LENS_OPTICAL_STABILIZATION_MODE_OFF,
- (byte)CameraMetadata.LENS_OPTICAL_STABILIZATION_MODE_ON);
+ CameraMetadata.LENS_OPTICAL_STABILIZATION_MODE_OFF,
+ CameraMetadata.LENS_OPTICAL_STABILIZATION_MODE_ON);
return modes;
}
@@ -1214,38 +1216,38 @@
return maxZoom;
}
- public byte[] getAvailableSceneModesChecked() {
- Key<byte[]> key =
+ public int[] getAvailableSceneModesChecked() {
+ Key<int[]> key =
CameraCharacteristics.CONTROL_AVAILABLE_SCENE_MODES;
- byte[] modes = getValueFromKeyNonNull(key);
+ int[] modes = getValueFromKeyNonNull(key);
if (modes == null) {
- return new byte[0];
+ return new int[0];
}
- List<Byte> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
// FACE_PRIORITY must be included if face detection is supported.
if (getMaxFaceCountChecked() > 0) {
checkTrueForKey(key, " FACE_PRIORITY must be included if face detection is supported",
- modeList.contains((byte)CameraMetadata.CONTROL_SCENE_MODE_FACE_PRIORITY));
+ modeList.contains(CameraMetadata.CONTROL_SCENE_MODE_FACE_PRIORITY));
}
return modes;
}
- public byte[] getAvailableEffectModesChecked() {
- Key<byte[]> key =
+ public int[] getAvailableEffectModesChecked() {
+ Key<int[]> key =
CameraCharacteristics.CONTROL_AVAILABLE_EFFECTS;
- byte[] modes = getValueFromKeyNonNull(key);
+ int[] modes = getValueFromKeyNonNull(key);
if (modes == null) {
- return new byte[0];
+ return new int[0];
}
- List<Byte> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
// OFF must be included.
checkTrueForKey(key, " OFF must be included",
- modeList.contains((byte)CameraMetadata.CONTROL_EFFECT_MODE_OFF));
+ modeList.contains(CameraMetadata.CONTROL_EFFECT_MODE_OFF));
return modes;
}
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2SurfaceViewTestCase.java b/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2SurfaceViewTestCase.java
index 7470f24..a169f7e 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2SurfaceViewTestCase.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2SurfaceViewTestCase.java
@@ -37,6 +37,7 @@
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.util.Size;
+import android.util.Range;
import android.hardware.camera2.CameraDevice.CaptureListener;
import android.hardware.camera2.cts.Camera2SurfaceViewStubActivity;
import android.hardware.camera2.cts.CameraTestUtils;
@@ -523,8 +524,8 @@
* @param fpsRange The fps range the returned size must support.
* @return max size that support the given fps range.
*/
- protected Size getMaxPreviewSizeForFpsRange(int[] fpsRange) {
- if (fpsRange == null || fpsRange[0] <= 0 || fpsRange[1] <= 0) {
+ protected Size getMaxPreviewSizeForFpsRange(Range<Integer> fpsRange) {
+ if (fpsRange == null || fpsRange.getLower() <= 0 || fpsRange.getUpper() <= 0) {
throw new IllegalArgumentException("Invalid fps range argument");
}
if (mOrderedPreviewSizes == null || mMinPreviewFrameDurationMap == null) {
@@ -533,7 +534,7 @@
}
long[] frameDurationRange =
- new long[]{(long) (1e9 / fpsRange[1]), (long) (1e9 / fpsRange[0])};
+ new long[]{(long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())};
for (Size size : mOrderedPreviewSizes) {
long minDuration = mMinPreviewFrameDurationMap.get(size);
if (minDuration <= frameDurationRange[0]) {
diff --git a/tests/tests/media/src/android/media/cts/Vp8CodecTestBase.java b/tests/tests/media/src/android/media/cts/Vp8CodecTestBase.java
index 45e4009..58a61ab 100644
--- a/tests/tests/media/src/android/media/cts/Vp8CodecTestBase.java
+++ b/tests/tests/media/src/android/media/cts/Vp8CodecTestBase.java
@@ -277,6 +277,7 @@
} else {
params.timeoutDequeue = 0;
params.runInLooperThread = true;
+ continue; // FIXME add support for async
}
outputParameters.add(params);
}
@@ -680,7 +681,7 @@
* is configured to run in async mode the function will run in a looper thread.
* Encoded frame can be retrieved by calling getOutput() function.
*/
- protected class MediaEncoderAsync extends Thread implements MediaCodec.NotificationCallback {
+ protected class MediaEncoderAsync extends Thread /* FIXME implements MediaCodec.NotificationCallback */ {
private int mId;
private MediaCodec mCodec;
private MediaFormat mFormat;
@@ -708,7 +709,7 @@
private Handler mHandler;
private boolean mCallbackReceived;
- @Override
+ /* FIXME @Override */
public void onCodecNotify(MediaCodec codec) {
synchronized (mCallbackEvent) {
Log.v(TAG, "MediaEncoder " + mId + " Event Callback");
@@ -795,7 +796,7 @@
mCodec.configure(mFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mCodec.start();
if (mAsync) {
- mCodec.setNotificationCallback(this);
+ /* FIXME mCodec.setNotificationCallback(this); */
}
mInputBuffers = mCodec.getInputBuffers();
mOutputBuffers = mCodec.getOutputBuffers();
diff --git a/tests/tests/media/src/android/media/cts/Vp8EncoderTest.java b/tests/tests/media/src/android/media/cts/Vp8EncoderTest.java
index 19b4caa..7f51a64 100644
--- a/tests/tests/media/src/android/media/cts/Vp8EncoderTest.java
+++ b/tests/tests/media/src/android/media/cts/Vp8EncoderTest.java
@@ -111,7 +111,7 @@
* Checks the PSNR difference between the encoded and decoded output and reference yuv input
* does not change much for two different ways of the encoder call.
*/
- public void testAsyncEncoding() throws Exception {
+ public void FIXME_testAsyncEncoding() throws Exception {
int encodeSeconds = 9;
// First test the encoder running in a looper thread with buffer callbacks enabled.