Merge "Camera: fix CaptureResultTest codegen" into mnc-dev
diff --git a/CtsTestCaseList.mk b/CtsTestCaseList.mk
index 61704a3..ae6c16c 100644
--- a/CtsTestCaseList.mk
+++ b/CtsTestCaseList.mk
@@ -132,6 +132,7 @@
     CtsJobSchedulerDeviceTestCases \
     CtsJniTestCases \
     CtsKeystoreTestCases \
+    CtsLibcoreLegacy22TestCases \
     CtsLocationTestCases \
     CtsLocation2TestCases \
     CtsMediaStressTestCases \
diff --git a/apps/CameraITS/pymodules/its/caps.py b/apps/CameraITS/pymodules/its/caps.py
index b97091b..e57ff88 100644
--- a/apps/CameraITS/pymodules/its/caps.py
+++ b/apps/CameraITS/pymodules/its/caps.py
@@ -133,6 +133,17 @@
     """
     return len(its.objects.get_available_output_sizes("raw10", props)) > 0
 
+def raw12(props):
+    """Returns whether a device supports RAW12 output.
+
+    Args:
+        props: Camera properties object.
+
+    Returns:
+        Boolean.
+    """
+    return len(its.objects.get_available_output_sizes("raw12", props)) > 0
+
 def sensor_fusion(props):
     """Returns whether the camera and motion sensor timestamps for the device
     are in the same time domain and can be compared directly.
diff --git a/apps/CameraITS/pymodules/its/device.py b/apps/CameraITS/pymodules/its/device.py
index 035e70b..e396483 100644
--- a/apps/CameraITS/pymodules/its/device.py
+++ b/apps/CameraITS/pymodules/its/device.py
@@ -368,7 +368,7 @@
 
         The out_surfaces field can specify the width(s), height(s), and
         format(s) of the captured image. The formats may be "yuv", "jpeg",
-        "dng", "raw", or "raw10". The default is a YUV420 frame ("yuv")
+        "dng", "raw", "raw10", or "raw12". The default is a YUV420 frame ("yuv")
         corresponding to a full sensor frame.
 
         Note that one or more surfaces can be specified, allowing a capture to
diff --git a/apps/CameraITS/pymodules/its/image.py b/apps/CameraITS/pymodules/its/image.py
index b3bdb65..03f8ff9 100644
--- a/apps/CameraITS/pymodules/its/image.py
+++ b/apps/CameraITS/pymodules/its/image.py
@@ -64,6 +64,9 @@
     if cap["format"] == "raw10":
         assert(props is not None)
         cap = unpack_raw10_capture(cap, props)
+    if cap["format"] == "raw12":
+        assert(props is not None)
+        cap = unpack_raw12_capture(cap, props)
     if cap["format"] == "yuv":
         y = cap["data"][0:w*h]
         u = cap["data"][w*h:w*h*5/4]
@@ -114,12 +117,12 @@
         raise its.error.Error('Invalid raw-10 buffer width')
     w = img.shape[1]*4/5
     h = img.shape[0]
-    # Cut out the 4x8b MSBs and shift to bits [10:2] in 16b words.
+    # Cut out the 4x8b MSBs and shift to bits [9:2] in 16b words.
     msbs = numpy.delete(img, numpy.s_[4::5], 1)
     msbs = msbs.astype(numpy.uint16)
     msbs = numpy.left_shift(msbs, 2)
     msbs = msbs.reshape(h,w)
-    # Cut out the 4x2b LSBs and put each in bits [2:0] of their own 8b words.
+    # Cut out the 4x2b LSBs and put each in bits [1:0] of their own 8b words.
     lsbs = img[::, 4::5].reshape(h,w/4)
     lsbs = numpy.right_shift(
             numpy.packbits(numpy.unpackbits(lsbs).reshape(h,w/4,4,2),3), 6)
@@ -128,6 +131,56 @@
     img16 = numpy.bitwise_or(msbs, lsbs).reshape(h,w)
     return img16
 
+def unpack_raw12_capture(cap, props):
+    """Unpack a raw-12 capture to a raw-16 capture.
+
+    Args:
+        cap: A raw-12 capture object.
+        props: Camera properties object.
+
+    Returns:
+        New capture object with raw-16 data.
+    """
+    # Data is packed as 4x10b pixels in 5 bytes, with the first 4 bytes holding
+    # the MSBs of the pixels, and the 5th byte holding 4x2b LSBs.
+    w,h = cap["width"], cap["height"]
+    if w % 2 != 0:
+        raise its.error.Error('Invalid raw-12 buffer width')
+    cap = copy.deepcopy(cap)
+    cap["data"] = unpack_raw12_image(cap["data"].reshape(h,w*3/2))
+    cap["format"] = "raw"
+    return cap
+
+def unpack_raw12_image(img):
+    """Unpack a raw-12 image to a raw-16 image.
+
+    Output image will have the 12 LSBs filled in each 16b word, and the 4 MSBs
+    will be set to zero.
+
+    Args:
+        img: A raw-12 image, as a uint8 numpy array.
+
+    Returns:
+        Image as a uint16 numpy array, with all row padding stripped.
+    """
+    if img.shape[1] % 3 != 0:
+        raise its.error.Error('Invalid raw-12 buffer width')
+    w = img.shape[1]*2/3
+    h = img.shape[0]
+    # Cut out the 2x8b MSBs and shift to bits [11:4] in 16b words.
+    msbs = numpy.delete(img, numpy.s_[2::3], 1)
+    msbs = msbs.astype(numpy.uint16)
+    msbs = numpy.left_shift(msbs, 4)
+    msbs = msbs.reshape(h,w)
+    # Cut out the 2x4b LSBs and put each in bits [3:0] of their own 8b words.
+    lsbs = img[::, 2::3].reshape(h,w/2)
+    lsbs = numpy.right_shift(
+            numpy.packbits(numpy.unpackbits(lsbs).reshape(h,w/2,2,4),3), 4)
+    lsbs = lsbs.reshape(h,w)
+    # Fuse the MSBs and LSBs back together
+    img16 = numpy.bitwise_or(msbs, lsbs).reshape(h,w)
+    return img16
+
 def convert_capture_to_planes(cap, props=None):
     """Convert a captured image object to separate image planes.
 
diff --git a/apps/CameraITS/pymodules/its/objects.py b/apps/CameraITS/pymodules/its/objects.py
index 22540b8..f6d2e2d 100644
--- a/apps/CameraITS/pymodules/its/objects.py
+++ b/apps/CameraITS/pymodules/its/objects.py
@@ -142,13 +142,15 @@
     """Return a sorted list of available output sizes for a given format.
 
     Args:
-        fmt: the output format, as a string in ["jpg", "yuv", "raw"].
+        fmt: the output format, as a string in
+            ["jpg", "yuv", "raw", "raw10", "raw12"].
         props: the object returned from its.device.get_camera_properties().
 
     Returns:
         A sorted list of (w,h) tuples (sorted large-to-small).
     """
-    fmt_codes = {"raw":0x20, "raw10":0x25, "yuv":0x23, "jpg":0x100, "jpeg":0x100}
+    fmt_codes = {"raw":0x20, "raw10":0x25, "raw12":0x26, "yuv":0x23,
+                 "jpg":0x100, "jpeg":0x100}
     configs = props['android.scaler.streamConfigurationMap']\
                    ['availableStreamConfigurations']
     fmt_configs = [cfg for cfg in configs if cfg['format'] == fmt_codes[fmt]]
diff --git a/apps/CameraITS/tests/scene1/test_yuv_plus_raw12.py b/apps/CameraITS/tests/scene1/test_yuv_plus_raw12.py
new file mode 100644
index 0000000..bbd9144
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_yuv_plus_raw12.py
@@ -0,0 +1,63 @@
+# Copyright 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import os.path
+import math
+
+def main():
+    """Test capturing a single frame as both RAW12 and YUV outputs.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    THRESHOLD_MAX_RMS_DIFF = 0.035
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.raw12(props) and
+                             its.caps.per_frame_control(props))
+
+        # Use a manual request with a linear tonemap so that the YUV and RAW
+        # should look the same (once converted by the its.image module).
+        e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
+        req = its.objects.manual_capture_request(s, e, True)
+
+        cap_raw, cap_yuv = cam.do_capture(req,
+                [{"format":"raw12"}, {"format":"yuv"}])
+
+        img = its.image.convert_capture_to_rgb_image(cap_yuv)
+        its.image.write_image(img, "%s_yuv.jpg" % (NAME), True)
+        tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+        rgb0 = its.image.compute_image_means(tile)
+
+        # Raw shots are 1/2 x 1/2 smaller after conversion to RGB, so scale the
+        # tile appropriately.
+        img = its.image.convert_capture_to_rgb_image(cap_raw, props=props)
+        its.image.write_image(img, "%s_raw.jpg" % (NAME), True)
+        tile = its.image.get_image_patch(img, 0.475, 0.475, 0.05, 0.05)
+        rgb1 = its.image.compute_image_means(tile)
+
+        rms_diff = math.sqrt(
+                sum([pow(rgb0[i] - rgb1[i], 2.0) for i in range(3)]) / 3.0)
+        print "RMS difference:", rms_diff
+        assert(rms_diff < THRESHOLD_MAX_RMS_DIFF)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CtsVerifier/Android.mk b/apps/CtsVerifier/Android.mk
index 227c6cb..37f1f90 100644
--- a/apps/CtsVerifier/Android.mk
+++ b/apps/CtsVerifier/Android.mk
@@ -31,6 +31,7 @@
                                ctstestrunner \
                                apache-commons-math \
                                androidplot \
+                               ctsverifier-opencv \
 
 LOCAL_PACKAGE_NAME := CtsVerifier
 
@@ -44,6 +45,16 @@
 
 include $(BUILD_PACKAGE)
 
+
+# opencv library
+include $(CLEAR_VARS)
+
+LOCAL_PREBUILT_STATIC_JAVA_LIBRARIES := \
+        ctsverifier-opencv:libs/opencv-android.jar
+
+include $(BUILD_MULTI_PREBUILT)
+
+
 notification-bot := $(call intermediates-dir-for,APPS,NotificationBot)/package.apk
 
 # Builds and launches CTS Verifier on a device.
diff --git a/apps/CtsVerifier/AndroidManifest.xml b/apps/CtsVerifier/AndroidManifest.xml
index 96b5676..5380601 100644
--- a/apps/CtsVerifier/AndroidManifest.xml
+++ b/apps/CtsVerifier/AndroidManifest.xml
@@ -720,6 +720,33 @@
                        android:value="android.hardware.sensor.compass" />
         </activity>
 
+        <activity
+            android:name=".sensors.RVCVXCheckTestActivity"
+            android:keepScreenOn="true"
+            android:label="@string/snsr_rvcvxchk_test"
+            android:screenOrientation="locked" >
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST"/>
+            </intent-filter>
+
+            <meta-data
+                android:name="test_category"
+                android:value="@string/test_category_sensors" />
+            <meta-data
+                android:name="test_required_features"
+                android:value="android.hardware.sensor.accelerometer:android.hardware.sensor.gyroscope:android.hardware.sensor.compass:android.hardware.camera.any" />
+            <meta-data android:name="test_excluded_features"
+                    android:value="android.hardware.type.television" />
+        </activity>
+        <activity
+            android:name=".sensors.RVCVRecordActivity"
+            android:keepScreenOn="true"
+            android:label="@string/snsr_rvcvxchk_test_rec"
+            android:screenOrientation="locked" >
+        </activity>
+
+
         <!-- TODO: enable when a full set of verifications can be implemented -->
         <!--activity android:name=".sensors.RotationVectorTestActivity"
                   android:label="@string/snsr_rot_vec_test"
@@ -1425,6 +1452,17 @@
             <meta-data android:name="test_required_features" android:value="android.hardware.microphone" />
         </activity>
 
+        <activity android:name=".audio.HifiUltrasoundSpeakerTestActivity"
+                android:label="@string/hifi_ultrasound_speaker_test"
+                android:screenOrientation="locked">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_audio" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.microphone" />
+        </activity>
+
         <service android:name=".tv.MockTvInputService"
             android:permission="android.permission.BIND_TV_INPUT">
             <intent-filter>
diff --git a/apps/CtsVerifier/libs/opencv-android.jar b/apps/CtsVerifier/libs/opencv-android.jar
new file mode 100644
index 0000000..1c13eee
--- /dev/null
+++ b/apps/CtsVerifier/libs/opencv-android.jar
Binary files differ
diff --git a/apps/CtsVerifier/libs/opencv-android_LICENSE b/apps/CtsVerifier/libs/opencv-android_LICENSE
new file mode 100644
index 0000000..5e32d88
--- /dev/null
+++ b/apps/CtsVerifier/libs/opencv-android_LICENSE
@@ -0,0 +1,33 @@
+By downloading, copying, installing or using the software you agree to this license.
+If you do not agree to this license, do not download, install,
+copy or use the software.
+
+
+                          License Agreement
+               For Open Source Computer Vision Library
+                       (3-clause BSD License)
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+  * Redistributions of source code must retain the above copyright notice,
+    this list of conditions and the following disclaimer.
+
+  * Redistributions in binary form must reproduce the above copyright notice,
+    this list of conditions and the following disclaimer in the documentation
+    and/or other materials provided with the distribution.
+
+  * Neither the names of the copyright holders nor the names of the contributors
+    may be used to endorse or promote products derived from this software
+    without specific prior written permission.
+
+This software is provided by the copyright holders and contributors "as is" and
+any express or implied warranties, including, but not limited to, the implied
+warranties of merchantability and fitness for a particular purpose are disclaimed.
+In no event shall copyright holders or contributors be liable for any direct,
+indirect, incidental, special, exemplary, or consequential damages
+(including, but not limited to, procurement of substitute goods or services;
+loss of use, data, or profits; or business interruption) however caused
+and on any theory of liability, whether in contract, strict liability,
+or tort (including negligence or otherwise) arising in any way out of
+the use of this software, even if advised of the possibility of such damage.
diff --git a/apps/CtsVerifier/res/drawable/prompt_x.png b/apps/CtsVerifier/res/drawable/prompt_x.png
new file mode 100644
index 0000000..64302dc
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable/prompt_x.png
Binary files differ
diff --git a/apps/CtsVerifier/res/drawable/prompt_y.png b/apps/CtsVerifier/res/drawable/prompt_y.png
new file mode 100644
index 0000000..01926b5
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable/prompt_y.png
Binary files differ
diff --git a/apps/CtsVerifier/res/drawable/prompt_z.png b/apps/CtsVerifier/res/drawable/prompt_z.png
new file mode 100644
index 0000000..f4d86d6
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable/prompt_z.png
Binary files differ
diff --git a/apps/CtsVerifier/res/layout/cam_preview_overlay.xml b/apps/CtsVerifier/res/layout/cam_preview_overlay.xml
new file mode 100644
index 0000000..41bbeb1
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/cam_preview_overlay.xml
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="utf-8"?>
+<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    android:orientation="vertical" android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:keepScreenOn="true">
+    <view
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        class="com.android.cts.verifier.sensors.RVCVCameraPreview"
+        android:id="@+id/cam_preview"
+        android:layout_centerVertical="true"
+        android:layout_centerHorizontal="true" />
+
+    <!--
+    <ImageView
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:layout_centerVertical="true"
+        android:id="@+id/cam_overlay"
+        android:src="@drawable/icon"
+        android:scaleType="fitStart"
+        />
+    -->
+    <view
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        class="com.android.cts.verifier.sensors.MotionIndicatorView"
+        android:id="@+id/cam_indicator"
+        android:layout_centerVertical="true"
+        android:layout_centerHorizontal="true" />
+
+    <ImageView
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_centerInParent="true"
+        android:id="@+id/cam_overlay"
+        android:scaleType="fitStart"
+        />
+</RelativeLayout>
\ No newline at end of file
diff --git a/apps/CtsVerifier/res/layout/test_list_footer.xml b/apps/CtsVerifier/res/layout/test_list_footer.xml
index fdb8e43..cb73ed1 100644
--- a/apps/CtsVerifier/res/layout/test_list_footer.xml
+++ b/apps/CtsVerifier/res/layout/test_list_footer.xml
@@ -17,22 +17,26 @@
   -->
 <GridLayout xmlns:android="http://schemas.android.com/apk/res/android"
     android:orientation="horizontal"
+    android:columnCount="@integer/test_list_footer_button_count"
     android:layout_width="match_parent"
     android:layout_height="wrap_content">
 
     <Button
         android:id="@+id/clear"
         android:text="@string/clear"
+        android:layout_gravity="center"
         android:layout_width="wrap_content"
         android:layout_height="wrap_content" />
     <Button
         android:id="@+id/view"
         android:text="@string/view"
+        android:layout_gravity="center"
         android:layout_width="wrap_content"
         android:layout_height="wrap_content" />
     <Button
         android:id="@+id/export"
         android:text="@string/export"
+        android:layout_gravity="center"
         android:layout_width="wrap_content"
         android:layout_height="wrap_content" />
 </GridLayout>
diff --git a/apps/CtsVerifier/res/raw/next_axis.mp3 b/apps/CtsVerifier/res/raw/next_axis.mp3
new file mode 100644
index 0000000..0a3174d
--- /dev/null
+++ b/apps/CtsVerifier/res/raw/next_axis.mp3
Binary files differ
diff --git a/apps/CtsVerifier/res/values-small/integers.xml b/apps/CtsVerifier/res/values-small/integers.xml
new file mode 100644
index 0000000..274db44
--- /dev/null
+++ b/apps/CtsVerifier/res/values-small/integers.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<resources>
+    <integer name="test_list_footer_button_count">1</integer>
+</resources>
diff --git a/apps/CtsVerifier/res/values/integers.xml b/apps/CtsVerifier/res/values/integers.xml
new file mode 100644
index 0000000..2ced54b
--- /dev/null
+++ b/apps/CtsVerifier/res/values/integers.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<resources>
+    <integer name="test_list_footer_button_count">3</integer>
+</resources>
diff --git a/apps/CtsVerifier/res/values/strings.xml b/apps/CtsVerifier/res/values/strings.xml
index e70fa6d..1b0ca68 100644
--- a/apps/CtsVerifier/res/values/strings.xml
+++ b/apps/CtsVerifier/res/values/strings.xml
@@ -287,20 +287,46 @@
     <string name="empty"></string>
 
     <!-- Strings for HifiUltrasoundTestActivity -->
-    <string name="hifi_ultrasound_test">Hifi Ultrasound Test</string>
-    <string name="hifi_ultrasound_test_info">This is a test for near-ultrasound response.\n
-        This test requires two devices, one as recording device, one as playback device.\n</string>
-    <string name="hifi_ultrasound_test_play">GENERATE</string>
+    <string name="hifi_ultrasound_test">Hifi Ultrasound Test (microphone)</string>
+    <string name="hifi_ultrasound_test_info">
+        This is a test for microphone near-ultrasound (18500Hz - 20000Hz) response.\n
+        This test requires two devices.\n</string>
+    <string name="hifi_ultrasound_test_play">PLAY</string>
     <string name="hifi_ultrasound_test_record">RECORD</string>
-    <string name="hifi_ultrasound_test_playback">PLAY</string>
     <string name="hifi_ultrasound_test_plot">PLOT</string>
     <string name="hifi_ultrasound_test_dismiss">DISMISS</string>
     <string name="hifi_ultrasound_test_instruction1">
-        Set the volume of the playback device at 70% and hold it with one hand.\n
-        Hold the recording device with the other hand\n
-        Press the RECORD button on the recording device, the GENERATE button on the playback device simultaneously.\n</string>
+        Set the volume of the reference device at 70% and hold it with one hand.\n
+        Hold the testing device with the other hand\n
+        Press the RECORD button on the testing device, then the PLAY button on the reference device within one second.\n
+        After the test, report result on the testing device.\n</string>
     <string name="hifi_ultrasound_test_pass">PASS</string>
     <string name="hifi_ultrasound_test_fail">FAIL</string>
+    <string name="hifi_ultrasound_test_default_false_string">false</string>
+    <string name="hifi_ultrasound_test_mic_prop">persist.audio.mic.ultrasound</string>
+    <string name="hifi_ultrasound_test_spkr_prop">persist.audio.spkr.ultrasound</string>
+    <string name="hifi_ultrasound_test_mic_no_support">
+        Device does not support near-ultrasound recording.\n
+        Please click pass if this is the testing device.\n</string>
+    <string name="hifi_ultrasound_test_spkr_no_support">
+        Device does not support near-ultrasound playback.\n
+        If this is your reference device, please use a different reference device.\n</string>
+
+    <string name="hifi_ultrasound_speaker_test">Hifi Ultrasound Test (speaker)</string>
+    <string name="hifi_ultrasound_speaker_test_info">
+        This is a test for speaker near-ultrasound (18500Hz - 20000Hz) response.\n
+        This test requires two devices.\n</string>
+    <string name="hifi_ultrasound_speaker_test_instruction1">
+        Set the volume of the testing device at 70% and hold it with one hand.\n
+        Hold the reference device with the other hand\n
+        Press the RECORD button on the reference device, then the PLAY button on the testing device within one second.\n
+        After the test, report result on the testing device.\n</string>
+    <string name="hifi_ultrasound_speaker_test_mic_no_support">
+        Device does not support near-ultrasound recording.\n
+        If this is your reference device, please use a different reference device.\n</string>
+    <string name="hifi_ultrasound_speaker_test_spkr_no_support">
+        Device does not support near-ultrasound playback.\n
+        Please click pass if this is the testing device.\n</string>
 
     <!-- Strings for Location tests -->
     <string name="location_gps_test">GPS Test</string>
@@ -500,6 +526,8 @@
     <string name="snsr_test_skipped">SKIPPED</string>
     <string name="snsr_test_fail">FAIL</string>
     <string name="snsr_execution_time">Test execution time %1$s sec</string>
+    <string name="snsr_rvcvxchk_test">Rotation Vector CV XCheck</string>
+    <string name="snsr_rvcvxchk_test_rec">Rotation Vector CV XCheck Recording</string>
 
     <!-- Strings to interact with users in Sensor Tests -->
     <string name="snsr_test_play_sound">A sound will be played once the verification is complete...</string>
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioRecordHelper.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioRecordHelper.java
index dafa117..80dd250 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioRecordHelper.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioRecordHelper.java
@@ -1,14 +1,13 @@
 package com.android.cts.verifier.audio;
 
 import android.media.AudioFormat;
+import android.media.AudioManager;
 import android.media.AudioRecord;
+import android.media.AudioTrack;
 import android.media.MediaRecorder;
 import android.util.Log;
 
 import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
 import java.io.IOException;
 
 /**
@@ -16,15 +15,10 @@
  */
 public class AudioRecordHelper {
 
-  // order of preference
-  // MIC 48000Hz
-  // MIC 44100Hz
-  // VOICE_RECOGNITION 48000Hz
-  // VOICE_RECOGNITION 44100Hz
-  // if all these 4 settings failed, it logs an error
   private static final int[] SOURCE = {
       MediaRecorder.AudioSource.MIC, MediaRecorder.AudioSource.VOICE_RECOGNITION};
-  private static final int[] SAMPLE_RATES_HZ = {48000, 44100};
+  private static final int[] SAMPLE_RATES_HZ = {
+    AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC), 48000, 44100};
 
   private static final int CHANNEL = AudioFormat.CHANNEL_CONFIGURATION_MONO;
   private static final int ENCODING = AudioFormat.ENCODING_PCM_16BIT;
@@ -147,18 +141,4 @@
   public byte[] getByte() {
     return os.toByteArray();
   }
-
-  /**
-   * Writes data to file
-   */
-  public void writeToFile() {
-    try {
-      FileOutputStream fos = new FileOutputStream(new File(Common.PCM_FILE));
-      fos.write(os.toByteArray());
-    } catch (FileNotFoundException e) {
-      e.printStackTrace();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-  }
 }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/Common.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/Common.java
index f30b990..df7460a 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/Common.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/Common.java
@@ -1,5 +1,8 @@
 package com.android.cts.verifier.audio;
 
+import android.media.AudioManager;
+import android.media.AudioTrack;
+
 import java.util.ArrayList;
 import java.util.Random;
 
@@ -8,51 +11,39 @@
  */
 public class Common {
 
+  public static final int RECORDING_SAMPLE_RATE_HZ
+      = AudioRecordHelper.getInstance().getSampleRate();
+  public static final int PLAYING_SAMPLE_RATE_HZ
+      = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC);
+
   // Default constants.
+  public static final double PASSING_THRESHOLD_DB = -40.0;
   public static final double PIP_DURATION_S = 0.004;
   public static final double PAUSE_DURATION_S = 0.016;
   public static final int PREFIX_NUM_CHIPS = 1023;
   public static final int PREFIX_SAMPLES_PER_CHIP = 4;
-  public static final int PREFIX_LENGTH =
-      PREFIX_NUM_CHIPS * PREFIX_SAMPLES_PER_CHIP;
-  public static final double PAUSE_BEFORE_PREFIX_DURATION_S = 1.0;
-  public static final double PAUSE_AFTER_PREFIX_DURATION_S = 0.5;
+  public static final double PREFIX_LENGTH_S = 0.1;
+  public static final double PAUSE_BEFORE_PREFIX_DURATION_S = 0.5;
+  public static final double PAUSE_AFTER_PREFIX_DURATION_S = 0.4;
   public static final double MIN_FREQUENCY_HZ = 500;
   public static final double MAX_FREQUENCY_HZ = 21000;
   public static final double FREQUENCY_STEP_HZ = 100;
-  public static final int AUDIBLE_SIGNAL_MIN_STRENGTH_DB = 10;
-  public static final int ULTRASOUND_SIGNAL_MIN_STRENGTH_RATIO = 2;
-  // Variables defined for convenience.
+  public static final int SIGNAL_MIN_STRENGTH_DB_ABOVE_NOISE = 10;
   public static final int REPETITIONS = 5;
-  public static final double[] PREFIX = prefix();
+  public static final int NOISE_SAMPLES = 3;
+
   public static final double[] FREQUENCIES_ORIGINAL = originalFrequencies();
   public static final int PIP_NUM = FREQUENCIES_ORIGINAL.length;
   public static final int[] ORDER = order();
   public static final double[] FREQUENCIES = frequencies();
-  // A PCM file is just raw monaural sample data. Samples are serialized as little endian signed
-  // 16-bit integers. The sample rate is determined by AudioRecordHelper.getSampleRate().
-  public static final String PCM_FILE = "/sdcard/sound_self_tester.pcm";
-  private static int recordingSampleRateHz = -1;
-  private static double[] window;
-  private static double[] generateWindow;
 
-  public static int getSampleRate() {
-    return recordingSampleRateHz;
-  }
+  public static final double[] WINDOW_FOR_RECORDER =
+      hann(Util.toLength(PIP_DURATION_S, RECORDING_SAMPLE_RATE_HZ));
+  public static final double[] WINDOW_FOR_PLAYER =
+      hann(Util.toLength(PIP_DURATION_S, PLAYING_SAMPLE_RATE_HZ));
 
-  public static void setSampleRate(int sampleRate) {
-    recordingSampleRateHz = sampleRate;
-    window = hann(Util.toLength(PIP_DURATION_S, recordingSampleRateHz));
-    generateWindow = hann(Util.toLength(PIP_DURATION_S, recordingSampleRateHz));
-  }
-
-  public static double[] window() {
-    return window;
-  }
-
-  public static double[] generateWindow() {
-    return generateWindow;
-  }
+  public static final double[] PREFIX_FOR_RECORDER = prefix(RECORDING_SAMPLE_RATE_HZ);
+  public static final double[] PREFIX_FOR_PLAYER = prefix(PLAYING_SAMPLE_RATE_HZ);
 
   /**
    * Get a Hann window.
@@ -69,7 +60,7 @@
   /**
    * Get a maximum length sequence, used as prefix to indicate start of signal.
    */
-  private static double[] prefix() {
+  private static double[] prefix(int rate) {
     double[] codeSequence = new double[PREFIX_NUM_CHIPS];
     for (int i = 0; i < PREFIX_NUM_CHIPS; i++) {
       if (i < 10) {
@@ -79,7 +70,7 @@
             * codeSequence[i - 9] * codeSequence[i - 10];
       }
     }
-    double[] prefixArray = new double[PREFIX_LENGTH];
+    double[] prefixArray = new double[PREFIX_NUM_CHIPS * PREFIX_SAMPLES_PER_CHIP];
     int offset = 0;
     for (int i = 0; i < PREFIX_NUM_CHIPS; i++) {
       double value = codeSequence[i];
@@ -88,7 +79,14 @@
       }
       offset += PREFIX_SAMPLES_PER_CHIP;
     }
-    return prefixArray;
+    int prefixLength = (int) Math.round(PREFIX_LENGTH_S * rate);
+    double[] samplePrefixArray = new double[prefixLength];
+    for (int i = 0; i < prefixLength; i++) {
+      double index = (double) i / prefixLength * (prefixArray.length - 1);
+      samplePrefixArray[i] = (1 - index + Math.floor(index)) * prefixArray[(int) Math.floor(index)]
+          + (1 + index - Math.ceil(index)) * prefixArray[(int) Math.ceil(index)];
+    }
+    return samplePrefixArray;
   }
 
   /**
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundSpeakerTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundSpeakerTestActivity.java
new file mode 100644
index 0000000..fa5ad81
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundSpeakerTestActivity.java
@@ -0,0 +1,372 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.audio;
+
+import com.android.cts.verifier.PassFailButtons;
+import com.android.cts.verifier.R;
+
+import android.content.Context;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+import android.os.AsyncTask;
+import android.os.Bundle;
+import android.text.method.ScrollingMovementMethod;
+import android.util.Log;
+import android.view.Gravity;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.widget.Button;
+import android.widget.LinearLayout;
+import android.widget.LinearLayout.LayoutParams;
+import android.widget.PopupWindow;
+import android.widget.TextView;
+import java.util.Arrays;
+
+import com.androidplot.xy.SimpleXYSeries;
+import com.androidplot.xy.XYSeries;
+import com.androidplot.xy.*;
+
+public class HifiUltrasoundSpeakerTestActivity extends PassFailButtons.Activity {
+
+    public enum Status {
+        START, RECORDING, DONE, PLAYER
+    }
+
+    private static final String TAG = "HifiUltrasoundTestActivity";
+
+    private Status status = Status.START;
+    private boolean onPlotScreen = false;
+    private TextView info;
+    private Button playerButton;
+    private Button recorderButton;
+    private AudioTrack audioTrack;
+    private LayoutInflater layoutInflater;
+    private View popupView;
+    private PopupWindow popupWindow;
+    private boolean micSupport = true;
+    private boolean spkrSupport = true;
+
+    @Override
+    public void onBackPressed () {
+        if (onPlotScreen) {
+            popupWindow.dismiss();
+            onPlotScreen = false;
+            recorderButton.setEnabled(true);
+        } else {
+            super.onBackPressed();
+        }
+    }
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        setContentView(R.layout.hifi_ultrasound);
+        setInfoResources(R.string.hifi_ultrasound_speaker_test,
+            R.string.hifi_ultrasound_speaker_test_info, -1);
+        setPassFailButtonClickListeners();
+        getPassButton().setEnabled(false);
+
+        info = (TextView) findViewById(R.id.info_text);
+        info.setMovementMethod(new ScrollingMovementMethod());
+        info.setText(R.string.hifi_ultrasound_speaker_test_instruction1);
+
+        AudioManager audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+        String micSupportString = audioManager.getProperty(
+            getResources().getString(R.string.hifi_ultrasound_test_mic_prop));
+        String spkrSupportString = audioManager.getProperty(
+            getResources().getString(R.string.hifi_ultrasound_test_spkr_prop));
+
+        if (micSupportString == null) {
+          micSupportString = "null";
+        }
+        if (spkrSupportString == null) {
+          spkrSupportString = "null";
+        }
+        if (micSupportString.equalsIgnoreCase(getResources().getString(
+            R.string.hifi_ultrasound_test_default_false_string))) {
+          micSupport = false;
+          getPassButton().setEnabled(true);
+          info.append(getResources().getString(R.string.hifi_ultrasound_speaker_test_mic_no_support));
+        }
+        if (spkrSupportString.equalsIgnoreCase(getResources().getString(
+            R.string.hifi_ultrasound_test_default_false_string))) {
+          spkrSupport = false;
+          info.append(getResources().getString(R.string.hifi_ultrasound_speaker_test_spkr_no_support));
+        }
+
+        layoutInflater = (LayoutInflater) getBaseContext().getSystemService(
+            LAYOUT_INFLATER_SERVICE);
+        popupView = layoutInflater.inflate(R.layout.hifi_ultrasound_popup, null);
+        popupWindow = new PopupWindow(
+            popupView, LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
+
+        final AudioRecordHelper audioRecorder = AudioRecordHelper.getInstance();
+        final int recordRate = audioRecorder.getSampleRate();
+
+        recorderButton = (Button) findViewById(R.id.recorder_button);
+        recorderButton.setEnabled(micSupport);
+        recorderButton.setOnClickListener(new View.OnClickListener() {
+          private WavAnalyzerTask wavAnalyzerTask = null;
+          private void stopRecording() {
+            audioRecorder.stop();
+            wavAnalyzerTask = new WavAnalyzerTask(audioRecorder.getByte());
+            wavAnalyzerTask.execute();
+            status = Status.DONE;
+          }
+          @Override
+          public void onClick(View v) {
+            switch (status) {
+              case START:
+                info.append("Recording at " + recordRate + "Hz using ");
+                final int source = audioRecorder.getAudioSource();
+                switch (source) {
+                  case 1:
+                    info.append("MIC");
+                    break;
+                  case 6:
+                    info.append("VOICE_RECOGNITION");
+                    break;
+                  default:
+                    info.append("UNEXPECTED " + source);
+                    break;
+                }
+                info.append("\n");
+                status = Status.RECORDING;
+                playerButton.setEnabled(false);
+                recorderButton.setEnabled(false);
+                audioRecorder.start();
+
+                final View finalV = v;
+                new Thread() {
+                  @Override
+                  public void run() {
+                    Double recordingDuration_millis = new Double(1000 * (2.5
+                          + Common.PREFIX_LENGTH_S
+                          + Common.PAUSE_BEFORE_PREFIX_DURATION_S
+                          + Common.PAUSE_AFTER_PREFIX_DURATION_S
+                          + Common.PIP_NUM * (Common.PIP_DURATION_S + Common.PAUSE_DURATION_S)
+                          * Common.REPETITIONS));
+                    Log.d(TAG, "Recording for " + recordingDuration_millis + "ms");
+                    try {
+                      Thread.sleep(recordingDuration_millis.intValue());
+                    } catch (InterruptedException e) {
+                      throw new RuntimeException(e);
+                    }
+                    runOnUiThread(new Runnable() {
+                      @Override
+                      public void run() {
+                        stopRecording();
+                      }
+                    });
+                  }
+                }.start();
+
+                break;
+
+              case DONE:
+                plotResponse(wavAnalyzerTask);
+                break;
+
+              default: break;
+            }
+          }
+        });
+
+        playerButton = (Button) findViewById(R.id.player_button);
+        playerButton.setEnabled(spkrSupport);
+        playerButton.setOnClickListener(new View.OnClickListener() {
+          @Override
+          public void onClick(View v) {
+              recorderButton.setEnabled(false);
+              status = Status.PLAYER;
+              play();
+              getPassButton().setEnabled(true);
+          }
+        });
+    }
+
+
+    private void plotResponse(WavAnalyzerTask wavAnalyzerTask) {
+      Button dismissButton = (Button)popupView.findViewById(R.id.dismiss);
+      dismissButton.setOnClickListener(new Button.OnClickListener(){
+        @Override
+        public void onClick(View v) {
+          popupWindow.dismiss();
+          onPlotScreen = false;
+          recorderButton.setEnabled(true);
+        }});
+      popupWindow.showAtLocation(info, Gravity.CENTER, 0, 0);
+      onPlotScreen = true;
+
+      recorderButton.setEnabled(false);
+
+      XYPlot plot = (XYPlot) popupView.findViewById(R.id.responseChart);
+      plot.setDomainStep(XYStepMode.INCREMENT_BY_VAL, 2000);
+
+      Double[] frequencies = new Double[Common.PIP_NUM];
+      for (int i = 0; i < Common.PIP_NUM; i++) {
+        frequencies[i] = new Double(Common.FREQUENCIES_ORIGINAL[i]);
+      }
+
+      if (wavAnalyzerTask != null) {
+
+        double[][] power = wavAnalyzerTask.getPower();
+        for(int i = 0; i < Common.REPETITIONS; i++) {
+          Double[] powerWrap = new Double[Common.PIP_NUM];
+          for (int j = 0; j < Common.PIP_NUM; j++) {
+            powerWrap[j] = new Double(10 * Math.log10(power[j][i]));
+          }
+          XYSeries series = new SimpleXYSeries(
+              Arrays.asList(frequencies),
+              Arrays.asList(powerWrap),
+              "");
+          LineAndPointFormatter seriesFormat = new LineAndPointFormatter();
+          seriesFormat.configure(getApplicationContext(),
+              R.xml.ultrasound_line_formatter_trials);
+          seriesFormat.setPointLabelFormatter(null);
+          plot.addSeries(series, seriesFormat);
+        }
+
+        double[] noiseDB = wavAnalyzerTask.getNoiseDB();
+        Double[] noiseDBWrap = new Double[Common.PIP_NUM];
+        for (int i = 0; i < Common.PIP_NUM; i++) {
+          noiseDBWrap[i] = new Double(noiseDB[i]);
+        }
+
+        XYSeries noiseSeries = new SimpleXYSeries(
+            Arrays.asList(frequencies),
+            Arrays.asList(noiseDBWrap),
+            "background noise");
+        LineAndPointFormatter noiseSeriesFormat = new LineAndPointFormatter();
+        noiseSeriesFormat.configure(getApplicationContext(),
+            R.xml.ultrasound_line_formatter_noise);
+        noiseSeriesFormat.setPointLabelFormatter(null);
+        plot.addSeries(noiseSeries, noiseSeriesFormat);
+
+        double[] dB = wavAnalyzerTask.getDB();
+        Double[] dBWrap = new Double[Common.PIP_NUM];
+        for (int i = 0; i < Common.PIP_NUM; i++) {
+          dBWrap[i] = new Double(dB[i]);
+        }
+
+        XYSeries series = new SimpleXYSeries(
+            Arrays.asList(frequencies),
+            Arrays.asList(dBWrap),
+            "median");
+        LineAndPointFormatter seriesFormat = new LineAndPointFormatter();
+        seriesFormat.configure(getApplicationContext(),
+            R.xml.ultrasound_line_formatter_median);
+        seriesFormat.setPointLabelFormatter(null);
+        plot.addSeries(series, seriesFormat);
+
+        Double[] passX = new Double[] {Common.MIN_FREQUENCY_HZ, Common.MAX_FREQUENCY_HZ};
+        Double[] passY = new Double[] {wavAnalyzerTask.getThreshold(), wavAnalyzerTask.getThreshold()};
+        XYSeries passSeries = new SimpleXYSeries(
+            Arrays.asList(passX), Arrays.asList(passY), "passing");
+        LineAndPointFormatter passSeriesFormat = new LineAndPointFormatter();
+        passSeriesFormat.configure(getApplicationContext(),
+            R.xml.ultrasound_line_formatter_pass);
+        passSeriesFormat.setPointLabelFormatter(null);
+        plot.addSeries(passSeries, passSeriesFormat);
+      }
+    }
+
+    /**
+     * Plays the generated pips.
+     */
+    private void play() {
+      play(SoundGenerator.getInstance().getByte(), Common.PLAYING_SAMPLE_RATE_HZ);
+    }
+
+    /**
+     * Plays the sound data.
+     */
+    private void play(byte[] data, int sampleRate) {
+      if (audioTrack != null) {
+        audioTrack.stop();
+        audioTrack.release();
+      }
+      audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
+          sampleRate, AudioFormat.CHANNEL_OUT_MONO,
+          AudioFormat.ENCODING_PCM_16BIT, Math.max(data.length, AudioTrack.getMinBufferSize(
+          sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT)),
+          AudioTrack.MODE_STATIC);
+      audioTrack.write(data, 0, data.length);
+      audioTrack.play();
+    }
+
+  /**
+   * AsyncTask class for the analyzing.
+   */
+  private class WavAnalyzerTask extends AsyncTask<Void, String, String>
+      implements WavAnalyzer.Listener {
+
+    private static final String TAG = "WavAnalyzerTask";
+    WavAnalyzer wavAnalyzer;
+
+    public WavAnalyzerTask(byte[] recording) {
+      wavAnalyzer = new WavAnalyzer(recording, Common.RECORDING_SAMPLE_RATE_HZ,
+          WavAnalyzerTask.this);
+    }
+
+    double[] getDB() {
+      return wavAnalyzer.getDB();
+    }
+
+    double[][] getPower() {
+      return wavAnalyzer.getPower();
+    }
+
+    double[] getNoiseDB() {
+      return wavAnalyzer.getNoiseDB();
+    }
+
+    double getThreshold() {
+      return wavAnalyzer.getThreshold();
+    }
+
+    @Override
+    protected String doInBackground(Void... params) {
+      boolean result = wavAnalyzer.doWork();
+      if (result) {
+        return getString(R.string.hifi_ultrasound_test_pass);
+      }
+      return getString(R.string.hifi_ultrasound_test_fail);
+    }
+
+    @Override
+    protected void onPostExecute(String result) {
+      info.append(result);
+      recorderButton.setEnabled(true);
+      recorderButton.setText(R.string.hifi_ultrasound_test_plot);
+    }
+
+    @Override
+    protected void onProgressUpdate(String... values) {
+      for (String message : values) {
+        info.append(message);
+        Log.d(TAG, message);
+      }
+    }
+
+    @Override
+    public void sendMessage(String message) {
+      publishProgress(message);
+    }
+  }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundTestActivity.java
index 01eb4b0..690e109 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundTestActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundTestActivity.java
@@ -50,10 +50,27 @@
     private static final String TAG = "HifiUltrasoundTestActivity";
 
     private Status status = Status.START;
+    private boolean onPlotScreen = false;
     private TextView info;
     private Button playerButton;
     private Button recorderButton;
     private AudioTrack audioTrack;
+    private LayoutInflater layoutInflater;
+    private View popupView;
+    private PopupWindow popupWindow;
+    private boolean micSupport = true;
+    private boolean spkrSupport = true;
+
+    @Override
+    public void onBackPressed () {
+        if (onPlotScreen) {
+            popupWindow.dismiss();
+            onPlotScreen = false;
+            recorderButton.setEnabled(true);
+        } else {
+            super.onBackPressed();
+        }
+    }
 
     @Override
     protected void onCreate(Bundle savedInstanceState) {
@@ -63,22 +80,49 @@
         setPassFailButtonClickListeners();
         getPassButton().setEnabled(false);
 
-        Common.setSampleRate(AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC));
-
         info = (TextView) findViewById(R.id.info_text);
         info.setMovementMethod(new ScrollingMovementMethod());
         info.setText(R.string.hifi_ultrasound_test_instruction1);
 
+        AudioManager audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+        String micSupportString = audioManager.getProperty(
+            getResources().getString(R.string.hifi_ultrasound_test_mic_prop));
+        String spkrSupportString = audioManager.getProperty(
+            getResources().getString(R.string.hifi_ultrasound_test_spkr_prop));
+
+        if (micSupportString == null) {
+          micSupportString = "null";
+        }
+        if (spkrSupportString == null) {
+          spkrSupportString = "null";
+        }
+        if (micSupportString.equalsIgnoreCase(getResources().getString(
+            R.string.hifi_ultrasound_test_default_false_string))) {
+          micSupport = false;
+          getPassButton().setEnabled(true);
+          info.append(getResources().getString(R.string.hifi_ultrasound_test_mic_no_support));
+        }
+        if (spkrSupportString.equalsIgnoreCase(getResources().getString(
+            R.string.hifi_ultrasound_test_default_false_string))) {
+          spkrSupport = false;
+          info.append(getResources().getString(R.string.hifi_ultrasound_test_spkr_no_support));
+        }
+
+        layoutInflater = (LayoutInflater) getBaseContext().getSystemService(
+            LAYOUT_INFLATER_SERVICE);
+        popupView = layoutInflater.inflate(R.layout.hifi_ultrasound_popup, null);
+        popupWindow = new PopupWindow(
+            popupView, LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
+
         final AudioRecordHelper audioRecorder = AudioRecordHelper.getInstance();
         final int recordRate = audioRecorder.getSampleRate();
 
         recorderButton = (Button) findViewById(R.id.recorder_button);
+        recorderButton.setEnabled(micSupport);
         recorderButton.setOnClickListener(new View.OnClickListener() {
           private WavAnalyzerTask wavAnalyzerTask = null;
           private void stopRecording() {
             audioRecorder.stop();
-            playerButton.setText(R.string.hifi_ultrasound_test_playback);
-            playerButton.setEnabled(true);
             wavAnalyzerTask = new WavAnalyzerTask(audioRecorder.getByte());
             wavAnalyzerTask.execute();
             status = Status.DONE;
@@ -86,7 +130,6 @@
           @Override
           public void onClick(View v) {
             switch (status) {
-
               case START:
                 info.append("Recording at " + recordRate + "Hz using ");
                 final int source = audioRecorder.getAudioSource();
@@ -111,12 +154,12 @@
                 new Thread() {
                   @Override
                   public void run() {
-                    Double recordingDuration_millis = new Double(1000 * (2.5 +
-                        Common.PREFIX_LENGTH / Common.getSampleRate() +
-                        Common.PAUSE_BEFORE_PREFIX_DURATION_S +
-                        Common.PAUSE_AFTER_PREFIX_DURATION_S +
-                        Common.PIP_NUM * (Common.PIP_DURATION_S + Common.PAUSE_DURATION_S)
-                        * Common.REPETITIONS));
+                    Double recordingDuration_millis = new Double(1000 * (2.5
+                          + Common.PREFIX_LENGTH_S
+                          + Common.PAUSE_BEFORE_PREFIX_DURATION_S
+                          + Common.PAUSE_AFTER_PREFIX_DURATION_S
+                          + Common.PIP_NUM * (Common.PIP_DURATION_S + Common.PAUSE_DURATION_S)
+                          * Common.REPETITIONS));
                     Log.d(TAG, "Recording for " + recordingDuration_millis + "ms");
                     try {
                       Thread.sleep(recordingDuration_millis.intValue());
@@ -144,40 +187,29 @@
         });
 
         playerButton = (Button) findViewById(R.id.player_button);
+        playerButton.setEnabled(spkrSupport);
         playerButton.setOnClickListener(new View.OnClickListener() {
           @Override
           public void onClick(View v) {
-            switch (status) {
-              case START:
-                playerButton.setEnabled(false);
-                recorderButton.setEnabled(false);
-                status = Status.PLAYER;
-                play();
-                break;
-              default:
-                play(audioRecorder.getByte(), recordRate);
-                break;
-            }
+              recorderButton.setEnabled(false);
+              status = Status.PLAYER;
+              play();
           }
         });
     }
 
 
     private void plotResponse(WavAnalyzerTask wavAnalyzerTask) {
-      LayoutInflater layoutInflater
-          = (LayoutInflater) getBaseContext().getSystemService(LAYOUT_INFLATER_SERVICE);
-      View popupView = layoutInflater.inflate(R.layout.hifi_ultrasound_popup, null);
-      final PopupWindow popupWindow = new PopupWindow(
-          popupView, LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
-
       Button dismissButton = (Button)popupView.findViewById(R.id.dismiss);
       dismissButton.setOnClickListener(new Button.OnClickListener(){
         @Override
         public void onClick(View v) {
           popupWindow.dismiss();
+          onPlotScreen = false;
           recorderButton.setEnabled(true);
         }});
       popupWindow.showAtLocation(info, Gravity.CENTER, 0, 0);
+      onPlotScreen = true;
 
       recorderButton.setEnabled(false);
 
@@ -256,7 +288,7 @@
      * Plays the generated pips.
      */
     private void play() {
-      play(SoundGenerator.getInstance().getByte(), Common.getSampleRate());
+      play(SoundGenerator.getInstance().getByte(), Common.PLAYING_SAMPLE_RATE_HZ);
     }
 
     /**
@@ -286,7 +318,7 @@
     WavAnalyzer wavAnalyzer;
 
     public WavAnalyzerTask(byte[] recording) {
-      wavAnalyzer = new WavAnalyzer(recording, AudioRecordHelper.getInstance().getSampleRate(),
+      wavAnalyzer = new WavAnalyzer(recording, Common.RECORDING_SAMPLE_RATE_HZ,
           WavAnalyzerTask.this);
     }
 
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/SoundGenerator.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/SoundGenerator.java
index f7318ed..0ad9371 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/SoundGenerator.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/SoundGenerator.java
@@ -13,25 +13,25 @@
   private SoundGenerator() {
     // Initialize sample.
     int pipNum = Common.PIP_NUM;
-    int prefixTotalLength = Common.PREFIX.length
-        + Util.toLength(Common.PAUSE_BEFORE_PREFIX_DURATION_S, Common.getSampleRate())
-        + Util.toLength(Common.PAUSE_AFTER_PREFIX_DURATION_S, Common.getSampleRate());
+    int prefixTotalLength = Util.toLength(Common.PREFIX_LENGTH_S, Common.PLAYING_SAMPLE_RATE_HZ)
+        + Util.toLength(Common.PAUSE_BEFORE_PREFIX_DURATION_S, Common.PLAYING_SAMPLE_RATE_HZ)
+        + Util.toLength(Common.PAUSE_AFTER_PREFIX_DURATION_S, Common.PLAYING_SAMPLE_RATE_HZ);
     int repetitionLength = pipNum * Util.toLength(
-        Common.PIP_DURATION_S + Common.PAUSE_DURATION_S, Common.getSampleRate());
+        Common.PIP_DURATION_S + Common.PAUSE_DURATION_S, Common.PLAYING_SAMPLE_RATE_HZ);
     int sampleLength = prefixTotalLength + Common.REPETITIONS * repetitionLength;
     sample = new double[sampleLength];
 
     // Fill sample with prefix.
-    System.arraycopy(Common.PREFIX, 0, sample,
-        Util.toLength(Common.PAUSE_BEFORE_PREFIX_DURATION_S, Common.getSampleRate()),
-        Common.PREFIX.length);
+    System.arraycopy(Common.PREFIX_FOR_PLAYER, 0, sample,
+        Util.toLength(Common.PAUSE_BEFORE_PREFIX_DURATION_S, Common.PLAYING_SAMPLE_RATE_HZ),
+        Common.PREFIX_FOR_PLAYER.length);
 
     // Fill the sample.
     for (int i = 0; i < pipNum * Common.REPETITIONS; i++) {
-      double[] pip = getPip(Common.generateWindow(), Common.FREQUENCIES[i]);
+      double[] pip = getPip(Common.WINDOW_FOR_PLAYER, Common.FREQUENCIES[i]);
       System.arraycopy(pip, 0, sample,
           prefixTotalLength + i * Util.toLength(
-              Common.PIP_DURATION_S + Common.PAUSE_DURATION_S, Common.getSampleRate()),
+              Common.PIP_DURATION_S + Common.PAUSE_DURATION_S, Common.PLAYING_SAMPLE_RATE_HZ),
           pip.length);
     }
 
@@ -58,7 +58,7 @@
   private static double[] getPip(double[] window, double frequency) {
     int pipArrayLength = window.length;
     double[] pipArray = new double[pipArrayLength];
-    double radPerSample = 2 * Math.PI / (Common.getSampleRate() / frequency);
+    double radPerSample = 2 * Math.PI / (Common.PLAYING_SAMPLE_RATE_HZ / frequency);
     for (int i = 0; i < pipArrayLength; i++) {
       pipArray[i] = window[i] * Math.sin(i * radPerSample);
     }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/WavAnalyzer.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/WavAnalyzer.java
index 84d59d6..b75c40b 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/WavAnalyzer.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/WavAnalyzer.java
@@ -15,7 +15,8 @@
   private double[] dB;  // Average response
   private double[][] power;  // power of each trial
   private double[] noiseDB;  // background noise
-  private double threshold;  // threshold of passing
+  private double[][] noisePower;
+  private double threshold;  // threshold of passing, drop off compared to 2000 kHz
   private boolean result = false;  // result of the test
 
   /**
@@ -41,12 +42,12 @@
       return false;
     }
     // Calculating the pip strength.
-    listener.sendMessage("Calculating...\n");
+    listener.sendMessage("Calculating... Please wait...\n");
     try {
       dB = measurePipStrength();
     } catch (IndexOutOfBoundsException e) {
       listener.sendMessage("WARNING: May have missed the prefix."
-          + " Turn up the volume or move to a quieter location.\n");
+          + " Turn up the volume of the playback device or move to a quieter location.\n");
       return false;
     }
     if (!isConsistent()) {
@@ -63,7 +64,7 @@
     for (int i = 1; i < data.length; i++) {
       if ((Math.abs(data[i]) >= Short.MAX_VALUE) && (Math.abs(data[i - 1]) >= Short.MAX_VALUE)) {
         listener.sendMessage("WARNING: Data is clipped."
-            + " Turn the volume down and redo the procedure.\n");
+            + " Turn down the volume of the playback device and redo the procedure.\n");
         return true;
       }
     }
@@ -84,7 +85,7 @@
     }
     if (Util.mean(coeffOfVar) > 1.0) {
       listener.sendMessage("WARNING: Inconsistent result across trials."
-          + " Turn up the volume or move to a quieter location.\n");
+          + " Turn up the volume of the playback device or move to a quieter location.\n");
       return false;
     }
     return true;
@@ -103,80 +104,80 @@
       }
     }
 
-    int indexOf4kHz = Util.findClosest(Common.FREQUENCIES_ORIGINAL, 4000.0);
-    double[] responseBelow4kHz = new double[indexOf4kHz];
-    System.arraycopy(dB, 0, responseBelow4kHz, 0, indexOf4kHz);
-    double medianResponseBelow4kHz = Util.median(responseBelow4kHz);
-    double[] noiseBelow4kHz = new double[indexOf4kHz];
-    System.arraycopy(noiseDB, 0, noiseBelow4kHz, 0, indexOf4kHz);
-    double medianNoiseBelow4kHz = Util.median(noiseBelow4kHz);
-    if ((medianResponseBelow4kHz - medianNoiseBelow4kHz) < Common.AUDIBLE_SIGNAL_MIN_STRENGTH_DB) {
+    if (Util.mean(dB) - Util.mean(noiseDB) < Common.SIGNAL_MIN_STRENGTH_DB_ABOVE_NOISE) {
       listener.sendMessage("WARNING: Signal is too weak or background noise is too strong."
-          + " Turn up the volume or move to a quieter location.\n");
+          + " Turn up the volume of the playback device or move to a quieter location.\n");
       return false;
     }
 
+    int indexOf2000Hz = Util.findClosest(Common.FREQUENCIES_ORIGINAL, 2000.0);
+    threshold = dB[indexOf2000Hz] + Common.PASSING_THRESHOLD_DB;
     int indexOf18500Hz = Util.findClosest(Common.FREQUENCIES_ORIGINAL, 18500.0);
     int indexOf20000Hz = Util.findClosest(Common.FREQUENCIES_ORIGINAL, 20000.0);
     double[] responseInRange = new double[indexOf20000Hz - indexOf18500Hz];
     System.arraycopy(dB, indexOf18500Hz, responseInRange, 0, responseInRange.length);
-    if (Util.mean(responseInRange) > threshold) {
-      return true;
+    if (Util.mean(responseInRange) < threshold) {
+      listener.sendMessage(
+          "WARNING: Failed. Retry with different orientations or report failed.\n");
+      return false;
     }
-    return false;
+    return true;
   }
 
   /**
    * Calculate the Fourier Coefficient at the pip frequency to calculate the frequency response.
-   * dB relative to background noise.
    * Package visible for unit testing.
    */
   double[] measurePipStrength() {
-    listener.sendMessage("Aligning data\n");
+    listener.sendMessage("Aligning data... Please wait...\n");
     final int dataStartI = alignData();
-    final int prefixTotalLength = dataStartI + Common.PREFIX.length
-        + Util.toLength(Common.PAUSE_AFTER_PREFIX_DURATION_S, sampleRate);
+    final int prefixTotalLength = dataStartI
+        + Util.toLength(Common.PREFIX_LENGTH_S + Common.PAUSE_AFTER_PREFIX_DURATION_S, sampleRate);
     listener.sendMessage("Done.\n");
     listener.sendMessage("Prefix starts at " + (double) dataStartI / sampleRate + " s \n");
-    if (dataStartI > Math.round(sampleRate
-          * (Common.PAUSE_BEFORE_PREFIX_DURATION_S + Common.PAUSE_AFTER_PREFIX_DURATION_S))
-        + Common.PREFIX_LENGTH) {
-      listener.sendMessage("WARNING: Unexpected prefix start time. May have missed the prefix."
-          + " Turn up the volume or move to a quieter location.\n");
+    if (dataStartI > Math.round(sampleRate * (Common.PREFIX_LENGTH_S
+            + Common.PAUSE_BEFORE_PREFIX_DURATION_S + Common.PAUSE_AFTER_PREFIX_DURATION_S))) {
+      listener.sendMessage("WARNING: Unexpected prefix start time. May have missed the prefix.\n"
+          + "PLAY button should be pressed on the playback device within one second"
+          + " after RECORD is pressed on the recording device.\n"
+          + "If this happens repeatedly,"
+          + " turn up the volume of the playback device or move to a quieter location.\n");
     }
 
-    double[] noisePoints = new double[Common.window().length];
-    System.arraycopy(data, dataStartI - noisePoints.length - 1, noisePoints, 0, noisePoints.length);
-    for (int j = 0; j < noisePoints.length; j++) {
-      noisePoints[j] = noisePoints[j] * Common.window()[j];
-    }
-
+    listener.sendMessage("Analyzing noise strength... Please wait...\n");
+    noisePower = new double[Common.PIP_NUM][Common.NOISE_SAMPLES];
     noiseDB = new double[Common.PIP_NUM];
-    listener.sendMessage("Analyzing noise strength...\n");
-    for (int i = 0; i < Common.PIP_NUM; i++) {
-      double freq = Common.FREQUENCIES_ORIGINAL[i];
-      Complex fourierCoeff = new Complex(0, 0);
-      final Complex rotator = new Complex(0,
-          -2.0 * Math.PI * freq / sampleRate).exp();
-      Complex phasor = new Complex(1, 0);
+    for (int s = 0; s < Common.NOISE_SAMPLES; s++) {
+      double[] noisePoints = new double[Common.WINDOW_FOR_RECORDER.length];
+      System.arraycopy(data, dataStartI - (s + 1) * noisePoints.length - 1,
+          noisePoints, 0, noisePoints.length);
       for (int j = 0; j < noisePoints.length; j++) {
-        fourierCoeff = fourierCoeff.add(phasor.multiply(noisePoints[j]));
-        phasor = phasor.multiply(rotator);
+        noisePoints[j] = noisePoints[j] * Common.WINDOW_FOR_RECORDER[j];
       }
-      fourierCoeff = fourierCoeff.multiply(1.0 / noisePoints.length);
-      double noisePower = fourierCoeff.multiply(fourierCoeff.conjugate()).abs();
-      noiseDB[i] = 10 * Math.log10(noisePower);
+      for (int i = 0; i < Common.PIP_NUM; i++) {
+        double freq = Common.FREQUENCIES_ORIGINAL[i];
+        Complex fourierCoeff = new Complex(0, 0);
+        final Complex rotator = new Complex(0,
+            -2.0 * Math.PI * freq / sampleRate).exp();
+        Complex phasor = new Complex(1, 0);
+        for (int j = 0; j < noisePoints.length; j++) {
+          fourierCoeff = fourierCoeff.add(phasor.multiply(noisePoints[j]));
+          phasor = phasor.multiply(rotator);
+        }
+        fourierCoeff = fourierCoeff.multiply(1.0 / noisePoints.length);
+        noisePower[i][s] = fourierCoeff.multiply(fourierCoeff.conjugate()).abs();
+      }
+    }
+    for (int i = 0; i < Common.PIP_NUM; i++) {
+      double meanNoisePower = 0;
+      for (int j = 0; j < Common.NOISE_SAMPLES; j++) {
+        meanNoisePower += noisePower[i][j];
+      }
+      meanNoisePower /= Common.NOISE_SAMPLES;
+      noiseDB[i] = 10 * Math.log10(meanNoisePower);
     }
 
-    int indexOf18500Hz = Util.findClosest(Common.FREQUENCIES_ORIGINAL, 18500.0);
-    int indexOf20000Hz = Util.findClosest(Common.FREQUENCIES_ORIGINAL, 20000.0);
-    double[] noiseInRange = new double[indexOf20000Hz - indexOf18500Hz + 1];
-    System.arraycopy(noiseDB, indexOf18500Hz, noiseInRange, 0, indexOf20000Hz - indexOf18500Hz + 1);
-    double medianNoiseInRange = Util.median(noiseInRange);
-    double stdNoiseInRange = Util.std(noiseInRange);
-    threshold = medianNoiseInRange + Common.ULTRASOUND_SIGNAL_MIN_STRENGTH_RATIO * stdNoiseInRange;
-
-    listener.sendMessage("Analyzing pips...\n");
+    listener.sendMessage("Analyzing pips... Please wait...\n");
     power = new double[Common.PIP_NUM][Common.REPETITIONS];
     for (int i = 0; i < Common.PIP_NUM * Common.REPETITIONS; i++) {
       if (i % Common.PIP_NUM == 0) {
@@ -187,10 +188,10 @@
       pipExpectedStartI = prefixTotalLength
           + Util.toLength(i * (Common.PIP_DURATION_S + Common.PAUSE_DURATION_S), sampleRate);
       // Cut out the data points for the current pip.
-      double[] pipPoints = new double[Common.window().length];
+      double[] pipPoints = new double[Common.WINDOW_FOR_RECORDER.length];
       System.arraycopy(data, pipExpectedStartI, pipPoints, 0, pipPoints.length);
-      for (int j = 0; j < Common.window().length; j++) {
-        pipPoints[j] = pipPoints[j] * Common.window()[j];
+      for (int j = 0; j < Common.WINDOW_FOR_RECORDER.length; j++) {
+        pipPoints[j] = pipPoints[j] * Common.WINDOW_FOR_RECORDER[j];
       }
       Complex fourierCoeff = new Complex(0, 0);
       final Complex rotator = new Complex(0,
@@ -219,18 +220,16 @@
    */
   int alignData() {
     // Zeropadding samples to add in the correlation to avoid FFT wraparound.
-    final int zeroPad = Common.PREFIX_LENGTH - 1;
-    int fftSize = Util.nextPowerOfTwo(
-        (int) Math.round(sampleRate
-          * (Common.PAUSE_BEFORE_PREFIX_DURATION_S + Common.PAUSE_AFTER_PREFIX_DURATION_S))
-        + Common.PREFIX_LENGTH
+    final int zeroPad = Util.toLength(Common.PREFIX_LENGTH_S, Common.RECORDING_SAMPLE_RATE_HZ) - 1;
+    int fftSize = Util.nextPowerOfTwo((int) Math.round(sampleRate * (Common.PREFIX_LENGTH_S
+            + Common.PAUSE_BEFORE_PREFIX_DURATION_S + Common.PAUSE_AFTER_PREFIX_DURATION_S + 0.5))
         + zeroPad);
 
     double[] dataCut = new double[fftSize - zeroPad];
     System.arraycopy(data, 0, dataCut, 0, fftSize - zeroPad);
     double[] xCorrDataPrefix = Util.computeCrossCorrelation(
         Util.padZeros(Util.toComplex(dataCut), fftSize),
-        Util.padZeros(Util.toComplex(Common.PREFIX), fftSize));
+        Util.padZeros(Util.toComplex(Common.PREFIX_FOR_RECORDER), fftSize));
     return Util.findMaxIndex(xCorrDataPrefix);
   }
 
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java
index e3d0b6d..58b51a5 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java
@@ -146,6 +146,7 @@
     private AtomicInteger mCountCallbacksRemaining = new AtomicInteger();
     private AtomicInteger mCountRawOrDng = new AtomicInteger();
     private AtomicInteger mCountRaw10 = new AtomicInteger();
+    private AtomicInteger mCountRaw12 = new AtomicInteger();
     private AtomicInteger mCountJpg = new AtomicInteger();
     private AtomicInteger mCountYuv = new AtomicInteger();
     private AtomicInteger mCountCapRes = new AtomicInteger();
@@ -658,6 +659,8 @@
                         jsonSurface.put("format", "raw");
                     } else if (format == ImageFormat.RAW10) {
                         jsonSurface.put("format", "raw10");
+                    } else if (format == ImageFormat.RAW12) {
+                        jsonSurface.put("format", "raw12");
                     } else if (format == ImageFormat.JPEG) {
                         jsonSurface.put("format", "jpeg");
                     } else if (format == ImageFormat.YUV_420_888) {
@@ -1004,6 +1007,7 @@
                 mCountJpg.set(0);
                 mCountYuv.set(0);
                 mCountRaw10.set(0);
+                mCountRaw12.set(0);
                 mCountCapRes.set(0);
                 mCaptureRawIsDng = false;
                 mCaptureResults = new CaptureResult[requests.size()];
@@ -1028,13 +1032,16 @@
                             sizes = ItsUtils.getJpegOutputSizes(mCameraCharacteristics);
                         } else if ("raw".equals(sformat)) {
                             formats[i] = ImageFormat.RAW_SENSOR;
-                            sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
+                            sizes = ItsUtils.getRaw16OutputSizes(mCameraCharacteristics);
                         } else if ("raw10".equals(sformat)) {
                             formats[i] = ImageFormat.RAW10;
-                            sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
+                            sizes = ItsUtils.getRaw10OutputSizes(mCameraCharacteristics);
+                        } else if ("raw12".equals(sformat)) {
+                            formats[i] = ImageFormat.RAW12;
+                            sizes = ItsUtils.getRaw12OutputSizes(mCameraCharacteristics);
                         } else if ("dng".equals(sformat)) {
                             formats[i] = ImageFormat.RAW_SENSOR;
-                            sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
+                            sizes = ItsUtils.getRaw16OutputSizes(mCameraCharacteristics);
                             mCaptureRawIsDng = true;
                         } else {
                             throw new ItsException("Unsupported format: " + sformat);
@@ -1170,6 +1177,12 @@
                     ByteBuffer buf = ByteBuffer.wrap(img);
                     int count = mCountRaw10.getAndIncrement();
                     mSocketRunnableObj.sendResponseCaptureBuffer("raw10Image", buf);
+                } else if (format == ImageFormat.RAW12) {
+                    Logt.i(TAG, "Received RAW12 capture");
+                    byte[] img = ItsUtils.getDataFromImage(capture);
+                    ByteBuffer buf = ByteBuffer.wrap(img);
+                    int count = mCountRaw12.getAndIncrement();
+                    mSocketRunnableObj.sendResponseCaptureBuffer("raw12Image", buf);
                 } else if (format == ImageFormat.RAW_SENSOR) {
                     Logt.i(TAG, "Received RAW16 capture");
                     int count = mCountRawOrDng.getAndIncrement();
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsUtils.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsUtils.java
index 2011314..b09b90c 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsUtils.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsUtils.java
@@ -91,11 +91,21 @@
         }
     }
 
-    public static Size[] getRawOutputSizes(CameraCharacteristics ccs)
+    public static Size[] getRaw16OutputSizes(CameraCharacteristics ccs)
             throws ItsException {
         return getOutputSizes(ccs, ImageFormat.RAW_SENSOR);
     }
 
+    public static Size[] getRaw10OutputSizes(CameraCharacteristics ccs)
+            throws ItsException {
+        return getOutputSizes(ccs, ImageFormat.RAW10);
+    }
+
+    public static Size[] getRaw12OutputSizes(CameraCharacteristics ccs)
+            throws ItsException {
+        return getOutputSizes(ccs, ImageFormat.RAW12);
+    }
+
     public static Size[] getJpegOutputSizes(CameraCharacteristics ccs)
             throws ItsException {
         return getOutputSizes(ccs, ImageFormat.JPEG);
@@ -139,7 +149,7 @@
             buffer.get(data);
             return data;
         } else if (format == ImageFormat.YUV_420_888 || format == ImageFormat.RAW_SENSOR
-                || format == ImageFormat.RAW10) {
+                || format == ImageFormat.RAW10 || format == ImageFormat.RAW12) {
             int offset = 0;
             data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
             int maxRowSize = planes[0].getRowStride();
@@ -213,6 +223,7 @@
                 return 3 == planes.length;
             case ImageFormat.RAW_SENSOR:
             case ImageFormat.RAW10:
+            case ImageFormat.RAW12:
             case ImageFormat.JPEG:
                 return 1 == planes.length;
             default:
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaOutputSurface.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaOutputSurface.java
new file mode 100644
index 0000000..b28e06b
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaOutputSurface.java
@@ -0,0 +1,322 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.sensors;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLSurface;
+import android.util.Log;
+import android.view.Surface;
+
+
+//
+// This file is copied from android.hardware.cts.media
+//
+
+/**
+ * Holds state associated with a Surface used for MediaCodec decoder output.
+ * <p>
+ * The (width,height) constructor for this class will prepare GL, create a SurfaceTexture,
+ * and then create a Surface for that SurfaceTexture.  The Surface can be passed to
+ * MediaCodec.configure() to receive decoder output.  When a frame arrives, we latch the
+ * texture with updateTexImage, then render the texture with GL to a pbuffer.
+ * <p>
+ * The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer.
+ * Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives
+ * we just draw it on whatever surface is current.
+ * <p>
+ * By default, the Surface will be using a BufferQueue in asynchronous mode, so we
+ * can potentially drop frames.
+ */
+class CtsMediaOutputSurface implements SurfaceTexture.OnFrameAvailableListener {
+    private static final String TAG = "OutputSurface";
+    private static final boolean VERBOSE = false;
+
+    private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+    private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
+    private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
+
+    private SurfaceTexture mSurfaceTexture;
+    private Surface mSurface;
+
+    private Object mFrameSyncObject = new Object();     // guards mFrameAvailable
+    private boolean mFrameAvailable;
+
+    private CtsMediaTextureRender mTextureRender;
+
+    /**
+     * Creates an OutputSurface backed by a pbuffer with the specifed dimensions.  The new
+     * EGL context and surface will be made current.  Creates a Surface that can be passed
+     * to MediaCodec.configure().
+     */
+    public CtsMediaOutputSurface(int width, int height) {
+        if (width <= 0 || height <= 0) {
+            throw new IllegalArgumentException();
+        }
+
+        eglSetup(width, height);
+        makeCurrent();
+
+        setup(this);
+    }
+
+    /**
+     * Creates an OutputSurface using the current EGL context (rather than establishing a
+     * new one).  Creates a Surface that can be passed to MediaCodec.configure().
+     */
+    public CtsMediaOutputSurface() {
+        setup(this);
+    }
+
+    public CtsMediaOutputSurface(final SurfaceTexture.OnFrameAvailableListener listener) {
+        setup(listener);
+    }
+
+    /**
+     * Creates instances of TextureRender and SurfaceTexture, and a Surface associated
+     * with the SurfaceTexture.
+     */
+    private void setup(SurfaceTexture.OnFrameAvailableListener listener) {
+        mTextureRender = new CtsMediaTextureRender();
+        mTextureRender.surfaceCreated();
+
+        // Even if we don't access the SurfaceTexture after the constructor returns, we
+        // still need to keep a reference to it.  The Surface doesn't retain a reference
+        // at the Java level, so if we don't either then the object can get GCed, which
+        // causes the native finalizer to run.
+        if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId());
+        mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
+
+        // This doesn't work if OutputSurface is created on the thread that CTS started for
+        // these test cases.
+        //
+        // The CTS-created thread has a Looper, and the SurfaceTexture constructor will
+        // create a Handler that uses it.  The "frame available" message is delivered
+        // there, but since we're not a Looper-based thread we'll never see it.  For
+        // this to do anything useful, OutputSurface must be created on a thread without
+        // a Looper, so that SurfaceTexture uses the main application Looper instead.
+        //
+        // Java language note: passing "this" out of a constructor is generally unwise,
+        // but we should be able to get away with it here.
+        mSurfaceTexture.setOnFrameAvailableListener(listener);
+
+        mSurface = new Surface(mSurfaceTexture);
+    }
+
+    /**
+     * Prepares EGL.  We want a GLES 2.0 context and a surface that supports pbuffer.
+     */
+    private void eglSetup(int width, int height) {
+        mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+        if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
+            throw new RuntimeException("unable to get EGL14 display");
+        }
+        int[] version = new int[2];
+        if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
+            mEGLDisplay = null;
+            throw new RuntimeException("unable to initialize EGL14");
+        }
+
+        // Configure EGL for pbuffer and OpenGL ES 2.0.  We want enough RGB bits
+        // to be able to tell if the frame is reasonable.
+        int[] attribList = {
+                EGL14.EGL_RED_SIZE, 8,
+                EGL14.EGL_GREEN_SIZE, 8,
+                EGL14.EGL_BLUE_SIZE, 8,
+                EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
+                EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT,
+                EGL14.EGL_NONE
+        };
+        EGLConfig[] configs = new EGLConfig[1];
+        int[] numConfigs = new int[1];
+        if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
+                numConfigs, 0)) {
+            throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
+        }
+
+        // Configure context for OpenGL ES 2.0.
+        int[] attrib_list = {
+                EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
+                EGL14.EGL_NONE
+        };
+        mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
+                attrib_list, 0);
+        checkEglError("eglCreateContext");
+        if (mEGLContext == null) {
+            throw new RuntimeException("null context");
+        }
+
+        // Create a pbuffer surface.  By using this for output, we can use glReadPixels
+        // to test values in the output.
+        int[] surfaceAttribs = {
+                EGL14.EGL_WIDTH, width,
+                EGL14.EGL_HEIGHT, height,
+                EGL14.EGL_NONE
+        };
+        mEGLSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs, 0);
+        checkEglError("eglCreatePbufferSurface");
+        if (mEGLSurface == null) {
+            throw new RuntimeException("surface was null");
+        }
+    }
+
+    /**
+     * Discard all resources held by this class, notably the EGL context.
+     */
+    public void release() {
+        if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
+            EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
+            EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
+            EGL14.eglReleaseThread();
+            EGL14.eglTerminate(mEGLDisplay);
+        }
+
+        mSurface.release();
+
+        // this causes a bunch of warnings that appear harmless but might confuse someone:
+        //  W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
+        //mSurfaceTexture.release();
+
+        mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+        mEGLContext = EGL14.EGL_NO_CONTEXT;
+        mEGLSurface = EGL14.EGL_NO_SURFACE;
+
+        mTextureRender = null;
+        mSurface = null;
+        mSurfaceTexture = null;
+    }
+
+    /**
+     * Makes our EGL context and surface current.
+     */
+    public void makeCurrent() {
+        if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
+            throw new RuntimeException("eglMakeCurrent failed");
+        }
+    }
+
+    /**
+     * Returns the Surface that we draw onto.
+     */
+    public Surface getSurface() {
+        return mSurface;
+    }
+
+    /**
+     * Replaces the fragment shader.
+     */
+    public void changeFragmentShader(String fragmentShader) {
+        mTextureRender.changeFragmentShader(fragmentShader);
+    }
+
+    /**
+     * Latches the next buffer into the texture.  Must be called from the thread that created
+     * the OutputSurface object, after the onFrameAvailable callback has signaled that new
+     * data is available.
+     */
+    public void awaitNewImage() {
+        final int TIMEOUT_MS = 500;
+
+        synchronized (mFrameSyncObject) {
+            while (!mFrameAvailable) {
+                try {
+                    // Wait for onFrameAvailable() to signal us.  Use a timeout to avoid
+                    // stalling the test if it doesn't arrive.
+                    mFrameSyncObject.wait(TIMEOUT_MS);
+                    if (!mFrameAvailable) {
+                        // TODO: if "spurious wakeup", continue while loop
+                        throw new RuntimeException("Surface frame wait timed out");
+                    }
+                } catch (InterruptedException ie) {
+                    // shouldn't happen
+                    throw new RuntimeException(ie);
+                }
+            }
+            mFrameAvailable = false;
+        }
+
+        // Latch the data.
+        mTextureRender.checkGlError("before updateTexImage");
+        mSurfaceTexture.updateTexImage();
+    }
+
+    /**
+     * Wait up to given timeout until new image become available.
+     * @param timeoutMs
+     * @return true if new image is available. false for no new image until timeout.
+     */
+    public boolean checkForNewImage(int timeoutMs) {
+        synchronized (mFrameSyncObject) {
+            while (!mFrameAvailable) {
+                try {
+                    // Wait for onFrameAvailable() to signal us.  Use a timeout to avoid
+                    // stalling the test if it doesn't arrive.
+                    mFrameSyncObject.wait(timeoutMs);
+                    if (!mFrameAvailable) {
+                        return false;
+                    }
+                } catch (InterruptedException ie) {
+                    // shouldn't happen
+                    throw new RuntimeException(ie);
+                }
+            }
+            mFrameAvailable = false;
+        }
+
+        // Latch the data.
+        mTextureRender.checkGlError("before updateTexImage");
+        mSurfaceTexture.updateTexImage();
+        return true;
+    }
+
+    /**
+     * Draws the data from SurfaceTexture onto the current EGL surface.
+     */
+    public void drawImage() {
+        mTextureRender.drawFrame(mSurfaceTexture);
+    }
+
+    public void latchImage() {
+        mTextureRender.checkGlError("before updateTexImage");
+        mSurfaceTexture.updateTexImage();
+    }
+
+    @Override
+    public void onFrameAvailable(SurfaceTexture st) {
+        if (VERBOSE) Log.d(TAG, "new frame available");
+        synchronized (mFrameSyncObject) {
+            if (mFrameAvailable) {
+                throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
+            }
+            mFrameAvailable = true;
+            mFrameSyncObject.notifyAll();
+        }
+    }
+
+    /**
+     * Checks for EGL errors.
+     */
+    private void checkEglError(String msg) {
+        int error;
+        if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
+            throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
+        }
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaTextureRender.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaTextureRender.java
new file mode 100644
index 0000000..a96033d
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaTextureRender.java
@@ -0,0 +1,306 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.sensors;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+import android.graphics.Bitmap;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.opengl.Matrix;
+import android.util.Log;
+
+
+//
+// This file is copied from android.hardware.cts.media
+//
+
+/**
+ * Code for rendering a texture onto a surface using OpenGL ES 2.0.
+ */
+class CtsMediaTextureRender {
+    private static final String TAG = "TextureRender";
+
+    private static final int FLOAT_SIZE_BYTES = 4;
+    private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
+    private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
+    private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
+    private final float[] mTriangleVerticesData = {
+        // X, Y, Z, U, V
+        -1.0f, -1.0f, 0, 0.f, 0.f,
+         1.0f, -1.0f, 0, 1.f, 0.f,
+        -1.0f,  1.0f, 0, 0.f, 1.f,
+         1.0f,  1.0f, 0, 1.f, 1.f,
+    };
+
+    private FloatBuffer mTriangleVertices;
+
+    private static final String VERTEX_SHADER =
+            "uniform mat4 uMVPMatrix;\n" +
+            "uniform mat4 uSTMatrix;\n" +
+            "attribute vec4 aPosition;\n" +
+            "attribute vec4 aTextureCoord;\n" +
+            "varying vec2 vTextureCoord;\n" +
+            "void main() {\n" +
+            "  gl_Position = uMVPMatrix * aPosition;\n" +
+            "  vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
+            "}\n";
+
+    private static final String FRAGMENT_SHADER =
+            "#extension GL_OES_EGL_image_external : require\n" +
+            "precision mediump float;\n" +      // highp here doesn't seem to matter
+            "varying vec2 vTextureCoord;\n" +
+            "uniform samplerExternalOES sTexture;\n" +
+            "void main() {\n" +
+            "  gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
+            "}\n";
+
+    private float[] mMVPMatrix = new float[16];
+    private float[] mSTMatrix = new float[16];
+
+    private int mProgram;
+    private int mTextureID = -12345;
+    private int muMVPMatrixHandle;
+    private int muSTMatrixHandle;
+    private int maPositionHandle;
+    private int maTextureHandle;
+
+    public CtsMediaTextureRender() {
+        mTriangleVertices = ByteBuffer.allocateDirect(
+            mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
+                .order(ByteOrder.nativeOrder()).asFloatBuffer();
+        mTriangleVertices.put(mTriangleVerticesData).position(0);
+
+        Matrix.setIdentityM(mSTMatrix, 0);
+    }
+
+    public int getTextureId() {
+        return mTextureID;
+    }
+
+    public void drawFrame(SurfaceTexture st) {
+        checkGlError("onDrawFrame start");
+        st.getTransformMatrix(mSTMatrix);
+
+        GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
+        GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
+
+        GLES20.glUseProgram(mProgram);
+        checkGlError("glUseProgram");
+
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
+
+        mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
+        GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
+            TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+        checkGlError("glVertexAttribPointer maPosition");
+        GLES20.glEnableVertexAttribArray(maPositionHandle);
+        checkGlError("glEnableVertexAttribArray maPositionHandle");
+
+        mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
+        GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
+            TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+        checkGlError("glVertexAttribPointer maTextureHandle");
+        GLES20.glEnableVertexAttribArray(maTextureHandle);
+        checkGlError("glEnableVertexAttribArray maTextureHandle");
+
+        Matrix.setIdentityM(mMVPMatrix, 0);
+        GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
+        GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
+
+        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+        checkGlError("glDrawArrays");
+        GLES20.glFinish();
+    }
+
+    /**
+     * Initializes GL state.  Call this after the EGL surface has been created and made current.
+     */
+    public void surfaceCreated() {
+        mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
+        if (mProgram == 0) {
+            throw new RuntimeException("failed creating program");
+        }
+        maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
+        checkGlError("glGetAttribLocation aPosition");
+        if (maPositionHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for aPosition");
+        }
+        maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
+        checkGlError("glGetAttribLocation aTextureCoord");
+        if (maTextureHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for aTextureCoord");
+        }
+
+        muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
+        checkGlError("glGetUniformLocation uMVPMatrix");
+        if (muMVPMatrixHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for uMVPMatrix");
+        }
+
+        muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
+        checkGlError("glGetUniformLocation uSTMatrix");
+        if (muSTMatrixHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for uSTMatrix");
+        }
+
+
+        int[] textures = new int[1];
+        GLES20.glGenTextures(1, textures, 0);
+
+        mTextureID = textures[0];
+        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
+        checkGlError("glBindTexture mTextureID");
+
+        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
+                GLES20.GL_NEAREST);
+        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
+                GLES20.GL_LINEAR);
+        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
+                GLES20.GL_CLAMP_TO_EDGE);
+        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
+                GLES20.GL_CLAMP_TO_EDGE);
+        checkGlError("glTexParameter");
+    }
+
+    /**
+     * Replaces the fragment shader.
+     */
+    public void changeFragmentShader(String fragmentShader) {
+        GLES20.glDeleteProgram(mProgram);
+        mProgram = createProgram(VERTEX_SHADER, fragmentShader);
+        if (mProgram == 0) {
+            throw new RuntimeException("failed creating program");
+        }
+    }
+
+    private int loadShader(int shaderType, String source) {
+        int shader = GLES20.glCreateShader(shaderType);
+        checkGlError("glCreateShader type=" + shaderType);
+        GLES20.glShaderSource(shader, source);
+        GLES20.glCompileShader(shader);
+        int[] compiled = new int[1];
+        GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+        if (compiled[0] == 0) {
+            Log.e(TAG, "Could not compile shader " + shaderType + ":");
+            Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
+            GLES20.glDeleteShader(shader);
+            shader = 0;
+        }
+        return shader;
+    }
+
+    private int createProgram(String vertexSource, String fragmentSource) {
+        int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+        if (vertexShader == 0) {
+            return 0;
+        }
+        int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+        if (pixelShader == 0) {
+            return 0;
+        }
+
+        int program = GLES20.glCreateProgram();
+        checkGlError("glCreateProgram");
+        if (program == 0) {
+            Log.e(TAG, "Could not create program");
+        }
+        GLES20.glAttachShader(program, vertexShader);
+        checkGlError("glAttachShader");
+        GLES20.glAttachShader(program, pixelShader);
+        checkGlError("glAttachShader");
+        GLES20.glLinkProgram(program);
+        int[] linkStatus = new int[1];
+        GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+        if (linkStatus[0] != GLES20.GL_TRUE) {
+            Log.e(TAG, "Could not link program: ");
+            Log.e(TAG, GLES20.glGetProgramInfoLog(program));
+            GLES20.glDeleteProgram(program);
+            program = 0;
+        }
+        return program;
+    }
+
+    public void checkGlError(String op) {
+        int error;
+        while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+            Log.e(TAG, op + ": glError " + error);
+            throw new RuntimeException(op + ": glError " + error);
+        }
+    }
+
+    /**
+     * Saves the current frame to disk as a PNG image.  Frame starts from (0,0).
+     * <p>
+     * Useful for debugging.
+     */
+    public static void saveFrame(String filename, int width, int height) {
+        // glReadPixels gives us a ByteBuffer filled with what is essentially big-endian RGBA
+        // data (i.e. a byte of red, followed by a byte of green...).  We need an int[] filled
+        // with native-order ARGB data to feed to Bitmap.
+        //
+        // If we implement this as a series of buf.get() calls, we can spend 2.5 seconds just
+        // copying data around for a 720p frame.  It's better to do a bulk get() and then
+        // rearrange the data in memory.  (For comparison, the PNG compress takes about 500ms
+        // for a trivial frame.)
+        //
+        // So... we set the ByteBuffer to little-endian, which should turn the bulk IntBuffer
+        // get() into a straight memcpy on most Android devices.  Our ints will hold ABGR data.
+        // Swapping B and R gives us ARGB.  We need about 30ms for the bulk get(), and another
+        // 270ms for the color swap.
+        //
+        // Making this even more interesting is the upside-down nature of GL, which means we
+        // may want to flip the image vertically here.
+
+        ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
+        buf.order(ByteOrder.LITTLE_ENDIAN);
+        GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
+        buf.rewind();
+
+        int pixelCount = width * height;
+        int[] colors = new int[pixelCount];
+        buf.asIntBuffer().get(colors);
+        for (int i = 0; i < pixelCount; i++) {
+            int c = colors[i];
+            colors[i] = (c & 0xff00ff00) | ((c & 0x00ff0000) >> 16) | ((c & 0x000000ff) << 16);
+        }
+
+        FileOutputStream fos = null;
+        try {
+            fos = new FileOutputStream(filename);
+            Bitmap bmp = Bitmap.createBitmap(colors, width, height, Bitmap.Config.ARGB_8888);
+            bmp.compress(Bitmap.CompressFormat.PNG, 90, fos);
+            bmp.recycle();
+        } catch (IOException ioe) {
+            throw new RuntimeException("Failed to write file " + filename, ioe);
+        } finally {
+            try {
+                if (fos != null) fos.close();
+            } catch (IOException ioe2) {
+                throw new RuntimeException("Failed to close file " + filename, ioe2);
+            }
+        }
+        Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/MotionIndicatorView.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/MotionIndicatorView.java
new file mode 100644
index 0000000..12d4582
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/MotionIndicatorView.java
@@ -0,0 +1,409 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.sensors;
+
+import android.content.Context;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.graphics.PorterDuff;
+import android.graphics.PorterDuffXfermode;
+import android.graphics.RectF;
+import android.hardware.SensorManager;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.View;
+
+/**
+ * A view class that draws the user prompt
+ *
+ * The following piece of code should show how to use this view.
+ *
+ *  public void testUI()  {
+ *     final int MAX_TILT_ANGLE = 70; // +/- 70
+ *
+ *     final int TILT_ANGLE_STEP = 5; // 5 degree(s) per step
+ *     final int YAW_ANGLE_STEP = 10; // 10 degree(s) per step
+ *
+ *     RangeCoveredRegister xCovered, yCovered, zCovered;
+ *     xCovered = new RangeCoveredRegister(-MAX_TILT_ANGLE, +MAX_TILT_ANGLE, TILT_ANGLE_STEP);
+ *
+ *     yCovered = new RangeCoveredRegister(-MAX_TILT_ANGLE, +MAX_TILT_ANGLE, TILT_ANGLE_STEP);
+ *     zCovered = new RangeCoveredRegister(YAW_ANGLE_STEP);
+ *
+ *     xCovered.update(40);
+ *     xCovered.update(-40);
+ *     xCovered.update(12);
+ *
+ *     yCovered.update(50);
+ *     yCovered.update(-51);
+ *
+ *     zCovered.update(150);
+ *     zCovered.update(42);
+ *
+ *     setDataProvider(xCovered, yCovered, zCovered);
+ *     enableAxis(RVCVRecordActivity.AXIS_ALL); //debug mode, show all three axis
+ * }
+ */
+public class MotionIndicatorView extends View {
+    private final String TAG = "MotionIndicatorView";
+    private final boolean LOCAL_LOGV = false;
+
+    private Paint mCursorPaint;
+    private Paint mLimitPaint;
+    private Paint mCoveredPaint;
+    private Paint mRangePaint;
+    private Paint mEraserPaint;
+
+    // UI settings
+    private final int XBAR_WIDTH = 50;
+    private final int XBAR_MARGIN = 50;
+    private final int XBAR_CURSOR_ADD = 20;
+
+    private final int YBAR_WIDTH = 50;
+    private final int YBAR_MARGIN = 50;
+    private final int YBAR_CURSOR_ADD = 20;
+
+    private final int ZRING_WIDTH = 50;
+    private final int ZRING_CURSOR_ADD = 30;
+
+
+    private int mXSize, mYSize;
+    private RectF mZBoundOut, mZBoundOut2, mZBoundIn, mZBoundIn2;
+
+    private RangeCoveredRegister mXCovered, mYCovered, mZCovered;
+
+    private boolean mXEnabled, mYEnabled, mZEnabled;
+
+    /**
+     * Constructor
+     * @param context
+     */
+    public MotionIndicatorView(Context context) {
+        super(context);
+        init();
+    }
+
+    /**
+     * Constructor
+     * @param context Application context
+     * @param attrs
+     */
+    public MotionIndicatorView(Context context, AttributeSet attrs) {
+        super(context, attrs);
+        init();
+    }
+
+    /**
+     * Initialize the Paint objects
+     */
+    private void init() {
+
+        mCursorPaint = new Paint();
+        mCursorPaint.setColor(Color.BLUE);
+
+        mLimitPaint = new Paint();
+        mLimitPaint.setColor(Color.YELLOW);
+
+        mCoveredPaint = new Paint();
+        mCoveredPaint.setColor(Color.CYAN);
+
+        mRangePaint = new Paint();
+        mRangePaint.setColor(Color.DKGRAY);
+
+        mEraserPaint = new Paint();
+        mEraserPaint.setColor(Color.TRANSPARENT);
+        // ensure the erasing effect
+        mEraserPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC));
+    }
+
+    /**
+     * Connect the view to certain data provider objects
+     * @param x Data provider for x direction tilt angle
+     * @param y Data provider for y direction tilt angle
+     * @param z Data provider for z rotation
+     */
+    public void setDataProvider(RangeCoveredRegister x,
+                                RangeCoveredRegister y,
+                                RangeCoveredRegister z)    {
+        mXCovered = x;
+        mYCovered = y;
+        mZCovered = z;
+    }
+
+    /**
+     * Set the active axis for display
+     *
+     * @param axis AXIS_X, AXIS_Y, AXIS_Z for x, y, z axis indicators, or AXIS_ALL for all three.
+     */
+    public void enableAxis(int axis)  {
+        mXEnabled = mYEnabled = mZEnabled = false;
+
+        switch(axis)
+        {
+            case SensorManager.AXIS_X:
+                mXEnabled = true;
+                break;
+            case SensorManager.AXIS_Y:
+                mYEnabled = true;
+                break;
+            case SensorManager.AXIS_Z:
+                mZEnabled = true;
+                break;
+            case RVCVRecordActivity.AXIS_ALL:
+                mXEnabled = mYEnabled = mZEnabled = true;
+        }
+    }
+
+    /**
+     * Doing some pre-calculation that only changes when view dimensions are changed.
+     * @param w
+     * @param h
+     * @param oldw
+     * @param oldh
+     */
+    @Override
+    protected void onSizeChanged (int w, int h, int oldw, int oldh) {
+        mXSize = w;
+        mYSize = h;
+
+        mZBoundOut = new RectF(w/2-w/2.5f, h/2-w/2.5f, w/2+w/2.5f, h/2+w/2.5f);
+        mZBoundOut2 = new RectF(
+                w/2-w/2.5f-ZRING_CURSOR_ADD, h/2-w/2.5f-ZRING_CURSOR_ADD,
+                w/2+w/2.5f+ZRING_CURSOR_ADD, h/2+w/2.5f+ZRING_CURSOR_ADD);
+        mZBoundIn = new RectF(
+                w/2-w/2.5f+ZRING_WIDTH, h/2-w/2.5f+ZRING_WIDTH,
+                w/2+w/2.5f-ZRING_WIDTH, h/2+w/2.5f-ZRING_WIDTH);
+        mZBoundIn2 = new RectF(
+                w/2-w/2.5f+ZRING_WIDTH+ZRING_CURSOR_ADD, h/2-w/2.5f+ZRING_WIDTH+ZRING_CURSOR_ADD,
+                w/2+w/2.5f-ZRING_WIDTH-ZRING_CURSOR_ADD, h/2+w/2.5f-ZRING_WIDTH-ZRING_CURSOR_ADD);
+
+        if (LOCAL_LOGV) Log.v(TAG, "New view size = ("+w+", "+h+")");
+    }
+
+    /**
+     * Draw UI depends on the selected axis and registered value
+     *
+     * @param canvas the canvas to draw on
+     */
+    @Override
+    protected void onDraw(Canvas canvas) {
+        super.onDraw(canvas);
+        int i,t;
+
+        Paint p = new Paint();
+        p.setColor(Color.YELLOW);
+        canvas.drawRect(10,10, 50, 50, p);
+
+        if (mXEnabled && mXCovered != null) {
+            int xNStep = mXCovered.getNSteps() + 4; // two on each side as a buffer
+            int xStepSize = mXSize * 3/4 / xNStep;
+            int xLeft = mXSize * 1/8 + (mXSize * 3/4 % xNStep)/2;
+
+            // base bar
+            canvas.drawRect(xLeft, XBAR_MARGIN,
+                    xLeft+xStepSize*xNStep-1, XBAR_WIDTH+XBAR_MARGIN, mRangePaint);
+
+            // covered range
+            for (i=0; i<mXCovered.getNSteps(); ++i) {
+                if (mXCovered.isCovered(i)) {
+                    canvas.drawRect(
+                            xLeft+xStepSize*(i+2), XBAR_MARGIN,
+                            xLeft+xStepSize*(i+3)-1, XBAR_WIDTH + XBAR_MARGIN,
+                            mCoveredPaint);
+                }
+            }
+
+            // limit
+            canvas.drawRect(xLeft+xStepSize*2-4, XBAR_MARGIN,
+                    xLeft+xStepSize*2+3, XBAR_WIDTH+XBAR_MARGIN, mLimitPaint);
+            canvas.drawRect(xLeft+xStepSize*(xNStep-2)-4, XBAR_MARGIN,
+                    xLeft+xStepSize*(xNStep-2)+3, XBAR_WIDTH+XBAR_MARGIN, mLimitPaint);
+
+            // cursor
+            t = (int)(xLeft+xStepSize*(mXCovered.getLastValue()+2));
+            canvas.drawRect(t-4, XBAR_MARGIN-XBAR_CURSOR_ADD, t+3,
+                    XBAR_WIDTH+XBAR_MARGIN+XBAR_CURSOR_ADD, mCursorPaint);
+        }
+        if (mYEnabled && mYCovered != null) {
+            int yNStep = mYCovered.getNSteps() + 4; // two on each side as a buffer
+            int yStepSize = mYSize * 3/4 / yNStep;
+            int yLeft = mYSize * 1/8 + (mYSize * 3/4 % yNStep)/2;
+
+            // base bar
+            canvas.drawRect(YBAR_MARGIN, yLeft,
+                    YBAR_WIDTH+YBAR_MARGIN, yLeft+yStepSize*yNStep-1, mRangePaint);
+
+            // covered range
+            for (i=0; i<mYCovered.getNSteps(); ++i) {
+                if (mYCovered.isCovered(i)) {
+                    canvas.drawRect(
+                            YBAR_MARGIN, yLeft+yStepSize*(i+2),
+                            YBAR_WIDTH + YBAR_MARGIN, yLeft+yStepSize*(i+3)-1,
+                            mCoveredPaint);
+                }
+            }
+
+            // limit
+            canvas.drawRect(YBAR_MARGIN, yLeft + yStepSize * 2 - 4,
+                    YBAR_WIDTH + YBAR_MARGIN, yLeft + yStepSize * 2 + 3, mLimitPaint);
+            canvas.drawRect(YBAR_MARGIN, yLeft + yStepSize * (yNStep - 2) - 4,
+                    YBAR_WIDTH + YBAR_MARGIN, yLeft + yStepSize * (yNStep - 2) + 3, mLimitPaint);
+
+            // cursor
+            t = (int)(yLeft+yStepSize*(mYCovered.getLastValue()+2));
+            canvas.drawRect( YBAR_MARGIN-YBAR_CURSOR_ADD, t-4,
+                    YBAR_WIDTH+YBAR_MARGIN+YBAR_CURSOR_ADD, t+3, mCursorPaint);
+        }
+
+        if (mZEnabled && mZCovered != null) {
+            float stepSize  = 360.0f/mZCovered.getNSteps();
+
+            // base bar
+            canvas.drawArc(mZBoundOut,0, 360, true, mRangePaint);
+
+            // covered range
+            for (i=0; i<mZCovered.getNSteps(); ++i) {
+                if (mZCovered.isCovered(i)) {
+                    canvas.drawArc(mZBoundOut,i*stepSize-0.2f, stepSize+0.4f,
+                            true, mCoveredPaint);
+                }
+            }
+            // clear center
+            canvas.drawArc(mZBoundIn, 0, 360, true, mEraserPaint);
+            // cursor
+            canvas.drawArc(mZBoundOut2, mZCovered.getLastValue()*stepSize- 1, 2,
+                    true, mCursorPaint);
+            canvas.drawArc(mZBoundIn2, mZCovered.getLastValue()*stepSize-1.5f, 3,
+                    true, mEraserPaint);
+        }
+    }
+}
+
+/**
+ *  A range register class for the RVCVRecord Activity
+ */
+class RangeCoveredRegister {
+    enum MODE {
+        LINEAR,
+        ROTATE2D
+    }
+
+    private boolean[] mCovered;
+    private MODE mMode;
+    private int mStep;
+    private int mLow, mHigh;
+    private int mLastData;
+
+    // high is not inclusive
+    RangeCoveredRegister(int low, int high, int step) {
+        mMode = MODE.LINEAR;
+        mStep = step;
+        mLow = low;
+        mHigh = high;
+        init();
+    }
+
+    RangeCoveredRegister(int step) {
+        mMode = MODE.ROTATE2D;
+        mStep = step;
+        mLow = 0;
+        mHigh = 360;
+        init();
+    }
+
+    private void init() {
+        if (mMode == MODE.LINEAR) {
+            mCovered = new boolean[(mHigh-mLow)/mStep];
+        }else {
+            mCovered = new boolean[360/mStep];
+        }
+    }
+
+    /**
+     * Test if the range defined is fully covered.
+     *
+     * @return if the range is fully covered, return true; otherwise false.
+     */
+    public boolean isFullyCovered() {
+        for (boolean i:mCovered) {
+            if (!i) return false;
+        }
+        return true;
+    }
+
+    /**
+     * Test if a specific step is covered.
+     *
+     * @param i the step number
+     * @return if the step specified is covered, return true; otherwise false.
+     */
+    public boolean isCovered(int i) {
+        return mCovered[i];
+    }
+
+    /**
+     *
+     *
+     * @param data
+     * @return if this update changes the status of
+     */
+    public boolean update(int data) {
+        mLastData = data;
+
+        if (mMode == MODE.ROTATE2D) {
+            data %= 360;
+        }
+
+        int iStep = (data - mLow)/mStep;
+
+        if (iStep>=0 && iStep<getNSteps()) {
+            // only record valid data
+            mLastData = data;
+
+            if (mCovered[iStep]) {
+                return false;
+            } else {
+                mCovered[iStep] = true;
+                return true;
+            }
+        }
+        return false;
+    }
+
+    /**
+     * Get the number of steps in this register
+     *
+     * @return The number of steps in this register
+     */
+    public int getNSteps() {
+        //if (mCovered == null) {
+        //return 0;
+        //}
+        return mCovered.length;
+    }
+
+    /**
+     * Get the last value updated
+     *
+     * @return The last value updated
+     */
+    public float getLastValue() {
+        // ensure float division
+        return ((float)(mLastData - mLow))/mStep;
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVCameraPreview.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVCameraPreview.java
new file mode 100644
index 0000000..a5b58f6
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVCameraPreview.java
@@ -0,0 +1,120 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.cts.verifier.sensors;
+
+// ----------------------------------------------------------------------
+
+import android.content.Context;
+import android.hardware.Camera;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+import java.io.IOException;
+import java.util.List;
+
+/** Camera preview class */
+public class RVCVCameraPreview extends SurfaceView implements SurfaceHolder.Callback {
+    private static final String TAG = "RVCVCameraPreview";
+    private static final boolean LOCAL_LOGD = true;
+
+    private SurfaceHolder mHolder;
+    private Camera mCamera;
+
+    /**
+     * Constructor
+     * @param context Activity context
+     * @param camera Camera object to be previewed
+     */
+    public RVCVCameraPreview(Context context, Camera camera) {
+        super(context);
+        mCamera = camera;
+        initSurface();
+    }
+
+    /**
+     * Constructor
+     * @param context Activity context
+     * @param attrs
+     */
+    public RVCVCameraPreview(Context context, AttributeSet attrs) {
+        super(context, attrs);
+    }
+
+    public void init(Camera camera) {
+        this.mCamera = camera;
+        initSurface();
+    }
+
+    private void initSurface() {
+        // Install a SurfaceHolder.Callback so we get notified when the
+        // underlying surface is created and destroyed.
+        mHolder = getHolder();
+        mHolder.addCallback(this);
+
+        // deprecated
+        // TODO: update this code to match new API level.
+        mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
+    }
+
+    /**
+     *  SurfaceHolder.Callback
+     *  Surface is created, it is OK to start the camera preview now.
+     */
+    public void surfaceCreated(SurfaceHolder holder) {
+        // The Surface has been created, now tell the camera where to draw the preview.
+
+        if (mCamera == null) {
+            // preview camera does not exist
+            return;
+        }
+
+        try {
+            mCamera.setPreviewDisplay(holder);
+            mCamera.startPreview();
+        } catch (IOException e) {
+            if (LOCAL_LOGD) Log.d(TAG, "Error when starting camera preview: " + e.getMessage());
+        }
+    }
+    /**
+     *  SurfaceHolder.Callback
+     */
+    public void surfaceDestroyed(SurfaceHolder holder) {
+        // empty. Take care of releasing the Camera preview in your activity.
+    }
+
+    /**
+     *  SurfaceHolder.Callback
+     *  Restart camera preview if surface changed
+     */
+    public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
+
+        if (mHolder.getSurface() == null || mCamera == null){
+            // preview surface or camera does not exist
+            return;
+        }
+
+        // stop preview before making changes
+        mCamera.stopPreview();
+
+        // the activity using this view is locked to this orientation, so hard code is fine
+        mCamera.setDisplayOrientation(90);
+
+        //do the same as if it is created again
+        surfaceCreated(holder);
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVRecordActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVRecordActivity.java
new file mode 100644
index 0000000..9011619
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVRecordActivity.java
@@ -0,0 +1,903 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.sensors;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.Intent;
+import android.hardware.Camera;
+import android.hardware.Sensor;
+import android.hardware.SensorEvent;
+import android.hardware.SensorEventListener;
+import android.hardware.SensorManager;
+import android.media.AudioManager;
+import android.media.CamcorderProfile;
+import android.media.MediaRecorder;
+import android.media.SoundPool;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.Environment;
+import android.util.JsonWriter;
+import android.util.Log;
+import android.view.Window;
+import android.widget.ImageView;
+import android.widget.Toast;
+
+import com.android.cts.verifier.R;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+
+// ----------------------------------------------------------------------
+
+/**
+ *  An activity that does recording of the camera video and rotation vector data at the same time.
+ */
+public class RVCVRecordActivity extends Activity {
+    private static final String TAG = "RVCVRecordActivity";
+    //private static final boolean LOCAL_LOGD = true;
+    private static final boolean LOCAL_LOGV = false;
+
+    private MotionIndicatorView mIndicatorView;
+
+    private SoundPool mSoundPool;
+    private int [] mSoundPoolLookup;
+
+    private File mRecordDir;
+    private RecordProcedureController mController;
+    private VideoRecorder           mVideoRecorder;
+    private RVSensorLogger          mRVSensorLogger;
+    private CoverageManager         mCoverManager;
+    private CameraPreviewer         mPreviewer;
+
+    public static final int AXIS_NONE = 0;
+    public static final int AXIS_ALL = SensorManager.AXIS_X +
+                                       SensorManager.AXIS_Y +
+                                       SensorManager.AXIS_Z;
+
+    // For Rotation Vector algorithm research use
+    private final static boolean     LOG_RAW_SENSORS = false;
+    private RawSensorLogger          mRawSensorLogger;
+
+    @Override
+    public void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+
+        // Hide the window title.
+        requestWindowFeature(Window.FEATURE_NO_TITLE);
+
+        // inflate xml
+        setContentView(R.layout.cam_preview_overlay);
+
+        // locate views
+        mIndicatorView = (MotionIndicatorView) findViewById(R.id.cam_indicator);
+
+        initStoragePath();
+    }
+
+    @Override
+    protected void onPause() {
+        super.onPause();
+        mController.quit();
+
+        mPreviewer.end();
+        endSoundPool();
+    }
+
+    @Override
+    protected void onResume() {
+        super.onResume();
+        // delay the initialization as much as possible
+        init();
+    }
+
+    /** display toast message
+     *
+     * @param msg Message content
+     */
+    private void message(String msg) {
+
+        Context context = getApplicationContext();
+        int duration = Toast.LENGTH_SHORT;
+
+        Toast toast = Toast.makeText(context, msg, duration);
+        toast.show();
+    }
+
+    /**
+     *  Initialize components
+     *
+     */
+    private void init() {
+        mPreviewer = new CameraPreviewer();
+        mPreviewer.init();
+
+        mCoverManager = new CoverageManager();
+        mIndicatorView.setDataProvider(
+                mCoverManager.getAxis(SensorManager.AXIS_X),
+                mCoverManager.getAxis(SensorManager.AXIS_Y),
+                mCoverManager.getAxis(SensorManager.AXIS_Z)  );
+
+        initSoundPool();
+        mRVSensorLogger = new RVSensorLogger(this);
+
+        mVideoRecorder = new VideoRecorder(mPreviewer.getCamera());
+
+        if (LOG_RAW_SENSORS) {
+            mRawSensorLogger = new RawSensorLogger(mRecordDir);
+        }
+
+        mController = new RecordProcedureController(this);
+    }
+
+    /**
+     * Notify recording is completed. This is the successful exit.
+     */
+    public void notifyComplete() {
+        message("Capture completed!");
+
+        Uri resultUri = Uri.fromFile(mRecordDir);
+        Intent result = new Intent();
+        result.setData(resultUri);
+        setResult(Activity.RESULT_OK, result);
+
+        finish();
+    }
+
+    /**
+     * Notify the user what to do next in text
+     *
+     * @param axis SensorManager.AXIS_X or SensorManager.AXIS_Y or SensorManager.AXIS_Z
+     */
+    private void notifyPrompt(int axis) {
+        // It is not XYZ because of earlier design have different definition of
+        // X and Y
+        final String axisName = "YXZ";
+
+        message("Manipulate the device in " + axisName.charAt(axis-1) + " axis (as illustrated) about the pattern.");
+    }
+
+    /**
+     *  Ask indicator view to redraw
+     */
+    private void redrawIndicator() {
+        mIndicatorView.invalidate();
+    }
+
+    /**
+     * Switch to a different axis for display and logging
+     * @param axis
+     */
+    private void switchAxis(int axis) {
+        ImageView imageView = (ImageView) findViewById(R.id.cam_overlay);
+
+        final int [] prompts = {R.drawable.prompt_x, R.drawable.prompt_y, R.drawable.prompt_z};
+
+        if (axis >=SensorManager.AXIS_X && axis <=SensorManager.AXIS_Z) {
+            imageView.setImageResource(prompts[axis-1]);
+            mIndicatorView.enableAxis(axis);
+            mRVSensorLogger.updateRegister(mCoverManager.getAxis(axis), axis);
+            notifyPrompt(axis);
+        } else {
+            imageView.setImageDrawable(null);
+            mIndicatorView.enableAxis(AXIS_NONE);
+        }
+        redrawIndicator();
+    }
+
+    /**
+     * Asynchronized way to call switchAxis. Use this if caller is not on UI thread.
+     * @param axis @param axis SensorManager.AXIS_X or SensorManager.AXIS_Y or SensorManager.AXIS_Z
+     */
+    public void switchAxisAsync(int axis) {
+        // intended to be called from a non-UI thread
+        final int fAxis = axis;
+        runOnUiThread(new Runnable() {
+            public void run() {
+                // UI code goes here
+                switchAxis(fAxis);
+            }
+        });
+    }
+
+    /**
+     * Initialize sound pool for user notification
+     */
+    private void initSoundPool() {
+        final int MAX_STREAM = 10;
+        int i=0;
+        mSoundPool = new SoundPool(MAX_STREAM, AudioManager.STREAM_MUSIC, 0);
+        mSoundPoolLookup = new int[MAX_STREAM];
+
+        // TODO: add different sound into this
+        mSoundPoolLookup[i++] = mSoundPool.load(this, R.raw.next_axis, 1);
+        mSoundPoolLookup[i++] = mSoundPool.load(this, R.raw.next_axis, 1);
+        mSoundPoolLookup[i++] = mSoundPool.load(this, R.raw.next_axis, 1);
+
+    }
+    private void endSoundPool() {
+        mSoundPool.release();
+    }
+
+    /**
+     * Play notify sound to user
+     * @param id ID of the sound to be played
+     */
+    public void playNotifySound(int id) {
+        mSoundPool.play(mSoundPoolLookup[id], 1, 1, 0, 0, 1);
+    }
+
+    /**
+     * Start the sensor recording
+     */
+    public void startRecordSensor() {
+        mRVSensorLogger.init();
+        if (LOG_RAW_SENSORS) {
+            mRawSensorLogger.init();
+        }
+    }
+
+    /**
+     * Stop the sensor recording
+     */
+    public void stopRecordSensor() {
+        mRVSensorLogger.end();
+        if (LOG_RAW_SENSORS) {
+            mRawSensorLogger.end();
+        }
+    }
+
+    /**
+     * Start video recording
+     */
+    public void startRecordVideo() {
+        mVideoRecorder.init();
+    }
+
+    /**
+     * Stop video recording
+     */
+    public void stopRecordVideo() {
+        mVideoRecorder.end();
+    }
+
+    /**
+     * Wait until a sensor recording for a certain axis is fully covered
+     * @param axis
+     */
+    public void waitUntilCovered(int axis) {
+        mCoverManager.waitUntilCovered(axis);
+    }
+
+
+    /**
+     *
+     */
+    private void initStoragePath() {
+        File rxcvRecDataDir = new File(Environment.getExternalStorageDirectory(),"RVCVRecData");
+
+        // Create the storage directory if it does not exist
+        if (! rxcvRecDataDir.exists()) {
+            if (! rxcvRecDataDir.mkdirs()) {
+                Log.e(TAG, "failed to create main data directory");
+            }
+        }
+
+        mRecordDir = new File(rxcvRecDataDir, new SimpleDateFormat("yyMMdd-hhmmss").format(new Date()));
+
+        if (! mRecordDir.mkdirs()) {
+            Log.e(TAG, "failed to create rec data directory");
+        }
+    }
+
+    /**
+     * Get the sensor log file path
+     * @return Path of the sensor log file
+     */
+    public String getSensorLogFilePath() {
+        return new File(mRecordDir, "sensor.log").getPath();
+    }
+
+    /**
+     * Get the video recording file path
+     * @return Path of the video recording file
+     */
+    public String getVideoRecFilePath() {
+        return new File(mRecordDir, "video.mp4").getPath();
+    }
+
+    /**
+     * Write out important camera/video information to a JSON file
+     * @param width         width of frame
+     * @param height        height of frame
+     * @param frameRate     frame rate in fps
+     * @param fovW          field of view in width direction
+     * @param fovH          field of view in height direction
+     */
+    public void writeVideoMetaInfo(int width, int height, float frameRate, float fovW, float fovH) {
+        try {
+            JsonWriter writer =
+                    new JsonWriter(
+                        new OutputStreamWriter(
+                                new FileOutputStream(
+                                        new File(mRecordDir, "videometa.json").getPath()
+                                )
+                        )
+                    );
+            writer.beginObject();
+            writer.name("fovW").value(fovW);
+            writer.name("fovH").value(fovH);
+            writer.name("width").value(width);
+            writer.name("height").value(height);
+            writer.name("frameRate").value(frameRate);
+            writer.endObject();
+
+            writer.close();
+        }catch (FileNotFoundException e) {
+            // Not very likely to happen
+            e.printStackTrace();
+        }catch (IOException e) {
+            // do nothing
+            e.printStackTrace();
+            Log.e(TAG, "Writing video meta data failed.");
+        }
+    }
+
+    /**
+     * Camera preview control class
+     */
+    class CameraPreviewer {
+        private Camera mCamera;
+
+        CameraPreviewer() {
+            try {
+                mCamera = Camera.open(); // attempt to get a default Camera instance
+            }
+            catch (Exception e) {
+                // Camera is not available (in use or does not exist)
+                Log.e(TAG, "Cannot obtain Camera!");
+            }
+        }
+
+        /**
+         * Get the camera to be previewed
+         * @return Reference to Camera used
+         */
+        public Camera getCamera() {
+            return mCamera;
+        }
+
+        /**
+         * Setup the camera
+         */
+        public void init() {
+            if (mCamera != null) {
+                double alpha = mCamera.getParameters().getHorizontalViewAngle()*Math.PI/180.0;
+                int width = 1920;
+                double fx = width/2/Math.tan(alpha/2.0);
+
+                if (LOCAL_LOGV) Log.v(TAG, "View angle="
+                        + mCamera.getParameters().getHorizontalViewAngle() +"  Estimated fx = "+fx);
+
+                RVCVCameraPreview cameraPreview =
+                        (RVCVCameraPreview) findViewById(R.id.cam_preview);
+                cameraPreview.init(mCamera);
+            } else {
+                message("Cannot open camera!");
+                finish();
+            }
+        }
+
+        /**
+         * End the camera preview
+         */
+        public void end() {
+            if (mCamera != null) {
+                mCamera.release();        // release the camera for other applications
+                mCamera = null;
+            }
+        }
+    }
+
+    /**
+     * Manage a set of RangeCoveredRegister objects
+     */
+    class CoverageManager {
+        // settings
+        private final int MAX_TILT_ANGLE = 60; // +/- 60
+        //private final int REQUIRED_TILT_ANGLE = 50; // +/- 50
+        private final int TILT_ANGLE_STEP = 5; // 5 degree(s) per step
+        private final int YAW_ANGLE_STEP = 10; // 10 degree(s) per step
+
+        RangeCoveredRegister[] mAxisCovered;
+
+        CoverageManager() {
+            mAxisCovered = new RangeCoveredRegister[3];
+            // X AXIS
+            mAxisCovered[0] = new RangeCoveredRegister(-MAX_TILT_ANGLE, +MAX_TILT_ANGLE, TILT_ANGLE_STEP);
+            // Y AXIS
+            mAxisCovered[1] = new RangeCoveredRegister(-MAX_TILT_ANGLE, +MAX_TILT_ANGLE, TILT_ANGLE_STEP);
+            // Z AXIS
+            mAxisCovered[2] = new RangeCoveredRegister(YAW_ANGLE_STEP);
+        }
+
+        public RangeCoveredRegister getAxis(int axis) {
+            // SensorManager.AXIS_X = 1, need offset -1 for mAxisCovered array
+            return mAxisCovered[axis-1];
+        }
+
+        public void waitUntilCovered(int axis) {
+            // SensorManager.AXIS_X = 1, need offset -1 for mAxisCovered array
+            while(!mAxisCovered[axis-1].isFullyCovered()) {
+                try {
+                    Thread.sleep(500);
+                } catch (InterruptedException e) {
+                    if (LOCAL_LOGV) {
+                        Log.v(TAG, "waitUntilCovered axis = "+ axis + " is interrupted");
+                    }
+                }
+            }
+        }
+    }
+    ////////////////////////////////////////////////////////////////////////////////////////////////
+
+    /**
+     * A class controls the video recording
+     */
+    class VideoRecorder
+    {
+        private MediaRecorder mRecorder;
+        private Camera mCamera;
+        private boolean mRunning = false;
+
+        private int [] mPreferredProfiles = {   CamcorderProfile.QUALITY_480P,  // smaller -> faster
+                                        CamcorderProfile.QUALITY_720P,
+                                        CamcorderProfile.QUALITY_1080P,
+                                        CamcorderProfile.QUALITY_HIGH // existence guaranteed
+                                    };
+
+
+        VideoRecorder(Camera camera) {
+            mCamera = camera;
+        }
+
+        /**
+         * Initialize and start recording
+         */
+        public void init() {
+            float fovW =  mCamera.getParameters().getHorizontalViewAngle();
+            float fovH =  mCamera.getParameters().getVerticalViewAngle();
+
+            mRecorder = new MediaRecorder();
+
+            mCamera.unlock();
+
+            mRecorder.setCamera(mCamera);
+
+            mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
+            mRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
+
+            CamcorderProfile profile = null;
+            for (int i: mPreferredProfiles) {
+                if (CamcorderProfile.hasProfile(i)) {
+                    profile = CamcorderProfile.get(i);
+                    mRecorder.setProfile(profile);
+                    break;
+                }
+            }
+
+            writeVideoMetaInfo(profile.videoFrameWidth, profile.videoFrameHeight,
+                    profile.videoFrameRate, fovW, fovH);
+
+            try {
+                mRecorder.setOutputFile(getVideoRecFilePath());
+                mRecorder.prepare();
+            } catch (IOException e) {
+                Log.e(TAG, "Preparation for recording failed.");
+            }
+
+            try {
+                mRecorder.start();
+            } catch (RuntimeException e) {
+                Log.e(TAG, "Starting recording failed.");
+                mRecorder.reset();
+                mRecorder.release();
+                mCamera.lock();
+            }
+            mRunning = true;
+        }
+
+        /**
+         * Stop recording
+         */
+        public void end() {
+            if (mRunning) {
+                try {
+                    mRecorder.stop();
+                    mRecorder.reset();
+                    mRecorder.release();
+                    mCamera.lock();
+                } catch (RuntimeException e) {
+                    e.printStackTrace();
+                    Log.e(TAG, "Runtime error in stopping recording.");
+                }
+            }
+            mRecorder = null;
+        }
+
+    }
+
+    ////////////////////////////////////////////////////////////////////////////////////////////////
+
+    /**
+     *  Log all raw sensor readings, for Rotation Vector sensor algorithms research
+     */
+    class RawSensorLogger implements SensorEventListener {
+        private final String TAG = "RawSensorLogger";
+
+        private final static int SENSOR_RATE = SensorManager.SENSOR_DELAY_FASTEST;
+        private File mRecPath;
+
+        SensorManager mSensorManager;
+        Sensor mAccSensor, mGyroSensor, mMagSensor;
+        OutputStreamWriter mAccLogWriter, mGyroLogWriter, mMagLogWriter;
+
+        private float[] mRTemp = new float[16];
+
+        RawSensorLogger(File recPath) {
+            mRecPath = recPath;
+        }
+
+        /**
+         * Initialize and start recording
+         */
+        public void init() {
+            mSensorManager = (SensorManager)getSystemService(SENSOR_SERVICE);
+
+            mAccSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
+            mGyroSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE_UNCALIBRATED);
+            mMagSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD_UNCALIBRATED);
+
+            mSensorManager.registerListener(this, mAccSensor, SENSOR_RATE);
+            mSensorManager.registerListener(this, mGyroSensor, SENSOR_RATE);
+            mSensorManager.registerListener(this, mMagSensor, SENSOR_RATE);
+
+            try {
+                mAccLogWriter= new OutputStreamWriter(
+                        new FileOutputStream(new File(mRecPath, "raw_acc.log")));
+                mGyroLogWriter= new OutputStreamWriter(
+                        new FileOutputStream(new File(mRecPath, "raw_uncal_gyro.log")));
+                mMagLogWriter= new OutputStreamWriter(
+                        new FileOutputStream(new File(mRecPath, "raw_uncal_mag.log")));
+
+            } catch (FileNotFoundException e) {
+                Log.e(TAG, "Sensor log file open failed: " + e.toString());
+            }
+        }
+
+        /**
+         * Stop recording and clean up
+         */
+        public void end() {
+            mSensorManager.flush(this);
+            mSensorManager.unregisterListener(this);
+
+            try {
+                if (mAccLogWriter != null) {
+                    OutputStreamWriter writer = mAccLogWriter;
+                    mAccLogWriter = null;
+                    writer.close();
+                }
+                if (mGyroLogWriter != null) {
+                    OutputStreamWriter writer = mGyroLogWriter;
+                    mGyroLogWriter = null;
+                    writer.close();
+                }
+                if (mMagLogWriter != null) {
+                    OutputStreamWriter writer = mMagLogWriter;
+                    mMagLogWriter = null;
+                    writer.close();
+                }
+
+            } catch (IOException e) {
+                Log.e(TAG, "Sensor log file close failed: " + e.toString());
+            }
+        }
+
+        @Override
+        public void onAccuracyChanged(Sensor sensor, int i) {
+            // do not care
+        }
+
+        @Override
+        public void onSensorChanged(SensorEvent event) {
+            OutputStreamWriter writer=null;
+            switch(event.sensor.getType()) {
+                case Sensor.TYPE_ACCELEROMETER:
+                    writer = mAccLogWriter;
+                    break;
+                case Sensor.TYPE_GYROSCOPE_UNCALIBRATED:
+                    writer = mGyroLogWriter;
+                    break;
+                case Sensor.TYPE_MAGNETIC_FIELD_UNCALIBRATED:
+                    writer = mMagLogWriter;
+                    break;
+
+            }
+            if (writer!=null)  {
+                float[] data = event.values;
+                try {
+                    if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
+                        writer.write(String.format("%d %f %f %f\r\n",
+                                event.timestamp, data[0], data[1], data[2]));
+                    }else // TYPE_GYROSCOPE_UNCALIBRATED and TYPE_MAGNETIC_FIELD_UNCALIBRATED
+                    {
+                        writer.write(String.format("%d %f %f %f %f %f %f\r\n", event.timestamp,
+                                data[0], data[1], data[2], data[3], data[4], data[5]));
+                    }
+                }catch (IOException e)
+                {
+                    Log.e(TAG, "Write to raw sensor log file failed.");
+                }
+
+            }
+        }
+    }
+    
+    /**
+     *  Rotation sensor logger class
+     */
+    class RVSensorLogger implements SensorEventListener {
+        private final String TAG = "RVSensorLogger";
+
+        private final static int SENSOR_RATE = 100;
+        RangeCoveredRegister mRegister;
+        int mAxis;
+        RVCVRecordActivity mActivity;
+
+        SensorManager mSensorManager;
+        Sensor mRVSensor;
+        OutputStreamWriter mLogWriter;
+
+        private float[] mRTemp = new float[16];
+
+        RVSensorLogger(RVCVRecordActivity activity) {
+            mActivity = activity;
+        }
+
+        /**
+         * Initialize and start recording
+         */
+        public void init() {
+            mSensorManager = (SensorManager)getSystemService(SENSOR_SERVICE);
+            mRVSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ROTATION_VECTOR);
+            mSensorManager.registerListener(this, mRVSensor, SENSOR_RATE);
+
+            try {
+                mLogWriter= new OutputStreamWriter(
+                        new FileOutputStream(mActivity.getSensorLogFilePath()));
+            } catch (FileNotFoundException e) {
+                Log.e(TAG, "Sensor log file open failed: " + e.toString());
+            }
+        }
+
+        /**
+         * Stop recording and clean up
+         */
+        public void end() {
+            mSensorManager.flush(this);
+            mSensorManager.unregisterListener(this);
+
+            try {
+                if (mLogWriter != null) {
+                    OutputStreamWriter writer = mLogWriter;
+                    mLogWriter = null;
+                    writer.close();
+                }
+            } catch (IOException e) {
+                Log.e(TAG, "Sensor log file close failed: " + e.toString());
+            }
+
+            updateRegister(null, AXIS_NONE);
+        }
+
+        private void onNewData(float[] data, long timestamp) {
+            // LOG
+            try {
+                if (mLogWriter != null) {
+                    mLogWriter.write(String.format("%d %f %f %f %f\r\n", timestamp,
+                            data[3], data[0], data[1], data[2]));
+                }
+            } catch (IOException e) {
+                Log.e(TAG, "Sensor log file write failed: " + e.toString());
+            }
+
+            // Update UI
+            if (mRegister != null) {
+                int d = 0;
+                int dx, dy, dz;
+                boolean valid = false;
+                SensorManager.getRotationMatrixFromVector(mRTemp, data);
+
+                dx = (int)(Math.asin(mRTemp[8])*(180.0/Math.PI));
+                dy = (int)(Math.asin(mRTemp[9])*(180.0/Math.PI));
+                dz = (int)((Math.atan2(mRTemp[4], mRTemp[0])+Math.PI)*(180.0/Math.PI));
+
+                switch(mAxis) {
+                    case SensorManager.AXIS_X:
+                        d = dx;
+                        valid = (Math.abs(dy) < 30);
+                        break;
+                    case SensorManager.AXIS_Y:
+                        d = dy;
+                        valid = (Math.abs(dx) < 30);
+                        break;
+                    case SensorManager.AXIS_Z:
+                        d = dz;
+                        valid = (Math.abs(dx) < 20 && Math.abs(dy) < 20);
+                        break;
+                }
+
+                if (valid) {
+                    mRegister.update(d);
+                    mActivity.redrawIndicator();
+                }
+            }
+
+        }
+
+        public void updateRegister(RangeCoveredRegister reg, int axis) {
+            mRegister = reg;
+            mAxis = axis;
+        }
+
+
+        @Override
+        public void onAccuracyChanged(Sensor sensor, int i) {
+            // do not care
+        }
+
+        @Override
+        public void onSensorChanged(SensorEvent event) {
+            if (event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR) {
+                onNewData(event.values, event.timestamp);
+            }
+        }
+    }
+
+
+    ////////////////////////////////////////////////////////////////////////////////////////////////
+
+    /**
+     * Controls the over all logic of record procedure: first x-direction, then y-direction and
+     * then z-direction.
+     */
+    class RecordProcedureController implements Runnable {
+        private static final boolean LOCAL_LOGV = false;
+
+        private final RVCVRecordActivity mActivity;
+        private Thread mThread = null;
+
+        RecordProcedureController(RVCVRecordActivity activity) {
+            mActivity = activity;
+            mThread = new Thread(this);
+            mThread.start();
+        }
+
+        /**
+         * Run the record procedure
+         */
+        public void run() {
+            if (LOCAL_LOGV) Log.v(TAG, "Controller Thread Started.");
+            //start recording & logging
+            delay(2000);
+
+            init();
+            if (LOCAL_LOGV) Log.v(TAG, "Controller Thread init() finished.");
+
+            // test 3 axis
+            // It is in YXZ order because UI element design use opposite definition
+            // of XY axis. To ensure the user see X Y Z, it is flipped here.
+            recordAxis(SensorManager.AXIS_Y);
+            if (LOCAL_LOGV) Log.v(TAG, "Controller Thread axis 0 finished.");
+
+            recordAxis(SensorManager.AXIS_X);
+            if (LOCAL_LOGV) Log.v(TAG, "Controller Thread axis 1 finished.");
+
+            recordAxis(SensorManager.AXIS_Z);
+            if (LOCAL_LOGV) Log.v(TAG, "Controller Thread axis 2 finished.");
+
+            delay(1000);
+            end();
+            if (LOCAL_LOGV) Log.v(TAG, "Controller Thread End.");
+        }
+
+        private void delay(int milli) {
+            try{
+                Thread.sleep(milli);
+            } catch(InterruptedException e) {
+                if (LOCAL_LOGV) Log.v(TAG, "Controller Thread Interrupted.");
+            }
+        }
+        private void init() {
+            // start video recording
+            mActivity.startRecordVideo();
+
+            // start sensor logging & listening
+            mActivity.startRecordSensor();
+        }
+
+        private void end() {
+            // stop video recording
+            mActivity.stopRecordVideo();
+
+            // stop sensor logging
+            mActivity.stopRecordSensor();
+
+            // notify ui complete
+            runOnUiThread(new Runnable(){
+                public void run() {
+                    mActivity.notifyComplete();
+                }
+            });
+        }
+
+        private void recordAxis(int axis) {
+            // delay 2 seconds?
+            delay(1000);
+
+            // change ui
+            mActivity.switchAxisAsync(axis);
+
+            // play start sound
+            mActivity.playNotifySound(0);
+
+            // wait until axis covered
+            mActivity.waitUntilCovered(axis);
+
+            // play stop sound
+            mActivity.playNotifySound(1);
+        }
+
+        /**
+         * Force quit
+         */
+        public void quit() {
+            mThread.interrupt();
+            try {
+                if (LOCAL_LOGV) Log.v(TAG, "Wait for controller to end");
+
+                // stop video recording
+                mActivity.stopRecordVideo();
+
+                // stop sensor logging
+                mActivity.stopRecordSensor();
+
+            } catch (Exception e)
+            {
+                e.printStackTrace();
+            }
+        }
+    }
+
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckAnalyzer.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckAnalyzer.java
new file mode 100644
index 0000000..128aaa3
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckAnalyzer.java
@@ -0,0 +1,1290 @@
+package com.android.cts.verifier.sensors;
+
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import android.media.MediaCodec;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.os.Debug;
+import android.os.Environment;
+import android.util.JsonWriter;
+import android.util.Log;
+
+import org.opencv.core.Mat;
+import org.opencv.core.CvType;
+import org.opencv.core.MatOfDouble;
+import org.opencv.core.MatOfFloat;
+import org.opencv.core.MatOfPoint2f;
+import org.opencv.core.MatOfPoint3f;
+import org.opencv.core.Size;
+import org.opencv.highgui.Highgui;
+import org.opencv.imgproc.Imgproc;
+import org.opencv.calib3d.Calib3d;
+import org.opencv.core.Core;
+
+import org.json.JSONObject;
+import org.json.JSONException;
+
+import java.io.BufferedReader;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+
+import android.opengl.GLES20;
+import javax.microedition.khronos.opengles.GL10;
+
+/**
+ *  This class does analysis on the recorded RVCVCXCheck data sets.
+ */
+public class RVCVXCheckAnalyzer {
+    private static final String TAG = "RVCXAnalysis";
+    private static final boolean LOCAL_LOGV = false;
+    private static final boolean LOCAL_LOGD = true;
+    private final String mPath;
+
+    private static final boolean OUTPUT_DEBUG_IMAGE = false;
+    private static final double VALID_FRAME_THRESHOLD = 0.8;
+    private static final double REPROJECTION_THREASHOLD = 4.0;
+    private static final boolean FORCE_CV_ANALYSIS  = false;
+    private static final boolean TRACE_VIDEO_ANALYSIS = false;
+    private static final double DECIMATION_FPS_TARGET = 15.0;
+
+    RVCVXCheckAnalyzer(String path)
+    {
+        mPath = path;
+    }
+
+    /**
+     * A class that contains  the analysis results
+     *
+     */
+    class AnalyzeReport {
+        public boolean error=true;
+        public String reason = "incomplete";
+
+        // roll pitch yaw RMS error ( \sqrt{\frac{1}{n} \sum e_i^2 })
+        // unit in rad
+        public double roll_rms_error;
+        public double pitch_rms_error;
+        public double yaw_rms_error;
+
+        // roll pitch yaw max error
+        // unit in rad
+        public double roll_max_error;
+        public double pitch_max_error;
+        public double yaw_max_error;
+
+        // optimal t delta between sensor and camera data set to make best match
+        public double optimal_delta_t;
+        // the associate yaw offset based on initial values
+        public double yaw_offset;
+
+        public int n_of_frame;
+        public int n_of_valid_frame;
+
+        // both data below are in [sec]
+        public double sensor_period_avg;
+        public double sensor_period_stdev;
+
+        /**
+         * write Json format serialization to a file in case future processing need the data
+         */
+        public void writeToFile(File file) {
+            try {
+                writeJSONToStream(new FileOutputStream(file));
+            } catch (FileNotFoundException e) {
+                e.printStackTrace();
+                Log.e(TAG, "Cannot create analyze report file.");
+            }
+        }
+
+        /**
+         * Get the JSON format serialization
+         *@return Json format serialization as String
+         */
+        @Override
+        public String toString() {
+            ByteArrayOutputStream s = new ByteArrayOutputStream();
+            writeJSONToStream(s);
+            return new String(s.toByteArray(),  java.nio.charset.StandardCharsets.UTF_8);
+        }
+
+        private void writeJSONToStream(OutputStream s) {
+            try{
+                JsonWriter writer =
+                        new JsonWriter(
+                                new OutputStreamWriter( s )
+                        );
+                writer.beginObject();
+                writer.setLenient(true);
+
+                writer.name("roll_rms_error").value(roll_rms_error);
+                writer.name("pitch_rms_error").value(pitch_rms_error);
+                writer.name("yaw_rms_error").value(yaw_rms_error);
+                writer.name("roll_max_error").value(roll_max_error);
+                writer.name("pitch_max_error").value(pitch_max_error);
+                writer.name("yaw_max_error").value(yaw_max_error);
+                writer.name("optimal_delta_t").value(optimal_delta_t);
+                writer.name("yaw_offset").value(yaw_offset);
+                writer.name("n_of_frame").value(n_of_frame);
+                writer.name("n_of_valid_frame").value(n_of_valid_frame);
+                writer.name("sensor_period_avg").value(sensor_period_avg);
+                writer.name("sensor_period_stdev").value(sensor_period_stdev);
+
+                writer.endObject();
+
+                writer.close();
+            } catch (IOException e) {
+                // do nothing
+                Log.e(TAG, "Error in serialize analyze report to JSON");
+            } catch (IllegalArgumentException e) {
+                e.printStackTrace();
+                Log.e(TAG, "Invalid parameter to write into JSON format");
+            }
+        }
+    }
+
+    /**
+     *  Process data set stored in the path specified in constructor
+     *  and return an analyze report to caller
+     *
+     *  @return An AnalyzeReport that contains detailed information about analysis
+     */
+    public AnalyzeReport processDataSet() {
+        int nframe;// number of frames in video
+        int nslog; // number of sensor log
+        int nvlog; // number of video generated log
+
+
+        AnalyzeReport report = new AnalyzeReport();
+
+        ArrayList<AttitudeRec> srecs = new ArrayList<>();
+        ArrayList<AttitudeRec> vrecs = new ArrayList<>();
+        ArrayList<AttitudeRec> srecs2 = new ArrayList<>();
+
+
+        final boolean use_solved = new File(mPath, "vision_rpy.log").exists() && !FORCE_CV_ANALYSIS;
+
+        if (use_solved) {
+            nframe = nvlog = loadAttitudeRecs(new File(mPath, "vision_rpy.log"), vrecs);
+            nslog = loadAttitudeRecs(new File(mPath, "sensor_rpy.log"),srecs);
+        }else {
+            nframe = analyzeVideo(vrecs);
+            nvlog = vrecs.size();
+
+            if (LOCAL_LOGV) {
+                Log.v(TAG, "Post video analysis nvlog = " + nvlog + " nframe=" + nframe);
+            }
+            if (nvlog <= 0 || nframe <= 0) {
+                // invalid results
+                report.reason = "Unable to to load recorded video.";
+                return report;
+            }
+            if ((double) nvlog / nframe < VALID_FRAME_THRESHOLD) {
+                // to many invalid frames
+                report.reason = "Too many invalid frames.";
+                return null;
+            }
+
+            fixFlippedAxis(vrecs);
+
+            nslog = loadSensorLog(srecs);
+        }
+
+        // Gradient descent will have faster performance than this simple search,
+        // but the performance is dominated by the vision part, so it is not very necessary.
+        double delta_t;
+        double min_rms = Double.MAX_VALUE;
+        double min_delta_t =0.;
+        double min_yaw_offset =0.;
+
+        // pre-allocation
+        for (AttitudeRec i: vrecs) {
+            srecs2.add(new AttitudeRec(0,0,0,0));
+        }
+
+        // find optimal offset
+        for (delta_t = -2.0; delta_t<2.0; delta_t +=0.01) {
+            double rms;
+            resampleSensorLog(srecs, vrecs, delta_t, 0.0, srecs2);
+            rms = Math.sqrt(calcSqrErr(vrecs, srecs2, 0)+ calcSqrErr(vrecs, srecs2, 1));
+            if (rms < min_rms) {
+                min_rms = rms;
+                min_delta_t = delta_t;
+                min_yaw_offset = vrecs.get(0).yaw - srecs2.get(0).yaw;
+            }
+        }
+        // sample at optimal offset
+        resampleSensorLog(srecs, vrecs, min_delta_t, min_yaw_offset, srecs2);
+
+        if (!use_solved) {
+            dumpAttitudeRecs(new File(mPath, "vision_rpy.log"), vrecs);
+            dumpAttitudeRecs(new File(mPath, "sensor_rpy.log"), srecs);
+        }
+        dumpAttitudeRecs(new File(mPath, "sensor_rpy_resampled.log"), srecs2);
+        dumpAttitudeError(new File(mPath, "attitude_error.log"), vrecs, srecs2);
+
+        // fill report fields
+        report.roll_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 0));
+        report.pitch_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 1));
+        report.yaw_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 2));
+
+        report.roll_max_error = calcMaxErr(vrecs, srecs2, 0);
+        report.pitch_max_error = calcMaxErr(vrecs, srecs2, 1);
+        report.yaw_max_error = calcMaxErr(vrecs, srecs2, 2);
+
+        report.optimal_delta_t = min_delta_t;
+        report.yaw_offset = (min_yaw_offset);
+
+        report.n_of_frame = nframe;
+        report.n_of_valid_frame = nvlog;
+
+        double [] sensor_period_stat = calcSensorPeriodStat(srecs);
+        report.sensor_period_avg = sensor_period_stat[0];
+        report.sensor_period_stdev = sensor_period_stat[1];
+
+        // output report to file and log in JSON format as well
+        report.writeToFile(new File(mPath, "report.json"));
+        if (LOCAL_LOGV)    Log.v(TAG, "Report in JSON:" + report.toString());
+
+        report.reason = "Completed";
+        report.error = false;
+        return report;
+    }
+
+    /**
+     * Generate pattern geometry like this one
+     * http://docs.opencv.org/trunk/_downloads/acircles_pattern.png
+     *
+     * @return Array of 3D points
+     */
+    private MatOfPoint3f asymmetricalCircleGrid(Size size) {
+        final int cn = 3;
+
+        int n = (int)(size.width * size.height);
+        float positions[] = new float[n * cn];
+        float unit=0.02f;
+        MatOfPoint3f grid = new MatOfPoint3f();
+
+        for (int i = 0; i < size.height; i++) {
+            for (int j = 0; j < size.width * cn; j += cn) {
+                positions[(int) (i * size.width * cn + j + 0)] =
+                        (2 * (j / cn) + i % 2) * (float) unit;
+                positions[(int) (i * size.width * cn + j + 1)] =
+                        i * unit;
+                positions[(int) (i * size.width * cn + j + 2)] = 0;
+            }
+        }
+        grid.create(n, 1, CvType.CV_32FC3);
+        grid.put(0, 0, positions);
+        return grid;
+    }
+
+    /**
+     *  Create a camera intrinsic matrix using input parameters
+     *
+     *  The camera intrinsic matrix will be like:
+     *
+     *       +-                       -+
+     *       |  f   0    center.width  |
+     *   A = |  0   f    center.height |
+     *       |  0   0         1        |
+     *       +-                       -+
+     *
+     *  @return An approximated (not actually calibrated) camera matrix
+     */
+    private static Mat cameraMatrix(float f, Size center) {
+        final double [] data = {f, 0, center.width, 0, f, center.height, 0, 0, 1f};
+        Mat m = new Mat(3,3, CvType.CV_64F);
+        m.put(0, 0, data);
+        return m;
+    }
+
+    /**
+     *  Attitude record in time roll pitch yaw format.
+     *
+     */
+    private class AttitudeRec {
+        public double time;
+        public double roll;
+        public double pitch;
+        public double yaw;
+
+        // ctor
+        AttitudeRec(double atime, double aroll, double apitch, double ayaw) {
+            time = atime;
+            roll = aroll;
+            pitch = apitch;
+            yaw = ayaw;
+        }
+
+        // ctor
+        AttitudeRec(double atime, double [] rpy) {
+            time = atime;
+            roll = rpy[0];
+            pitch = rpy[1];
+            yaw = rpy[2];
+        }
+
+        // copy value of another to this
+        void assign(AttitudeRec rec) {
+            time = rec.time;
+            roll = rec.time;
+            pitch = rec.pitch;
+            yaw = rec.yaw;
+        }
+
+        // copy roll-pitch-yaw value but leave the time specified by atime
+        void assign(AttitudeRec rec, double atime) {
+            time = atime;
+            roll = rec.time;
+            pitch = rec.pitch;
+            yaw = rec.yaw;
+        }
+
+        // set each field separately
+        void set(double atime, double aroll, double apitch, double ayaw) {
+            time = atime;
+            roll = aroll;
+            pitch = apitch;
+            yaw = ayaw;
+        }
+    }
+
+
+    /**
+     *  Load the sensor log in (time Roll-pitch-yaw) format to a ArrayList<AttitudeRec>
+     *
+     *  @return the number of sensor log items
+     */
+    private int loadSensorLog(ArrayList<AttitudeRec> recs) {
+        //ArrayList<AttitudeRec> recs = new ArrayList<AttitudeRec>();
+        File csvFile = new File(mPath, "sensor.log");
+        BufferedReader br=null;
+        String line;
+
+        // preallocate and reuse
+        double [] quat = new double[4];
+        double [] rpy = new double[3];
+
+        double t0 = -1;
+
+        try {
+            br = new BufferedReader(new FileReader(csvFile));
+            while ((line = br.readLine()) != null) {
+                //space separator
+                String[] items = line.split(" ");
+
+                if (items.length != 5) {
+                    recs.clear();
+                    return -1;
+                }
+
+                quat[0] = Double.parseDouble(items[1]);
+                quat[1] = Double.parseDouble(items[2]);
+                quat[2] = Double.parseDouble(items[3]);
+                quat[3] = Double.parseDouble(items[4]);
+
+                //
+                quat2rpy(quat, rpy);
+
+                if (t0 < 0) {
+                    t0 = Long.parseLong(items[0])/1e9;
+                }
+                recs.add(new AttitudeRec(Long.parseLong(items[0])/1e9-t0, rpy));
+            }
+
+        } catch (FileNotFoundException e) {
+            e.printStackTrace();
+            Log.e(TAG, "Cannot find sensor logging data");
+        } catch (IOException e) {
+            e.printStackTrace();
+            Log.e(TAG, "Cannot read sensor logging data");
+        } finally {
+            if (br != null) {
+                try {
+                    br.close();
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+
+        return recs.size();
+    }
+
+    /**
+     * Read video meta info
+     */
+    private class VideoMetaInfo {
+        public double fps;
+        public int frameWidth;
+        public int frameHeight;
+        public double fovWidth;
+        public double fovHeight;
+        public boolean valid = false;
+
+        VideoMetaInfo(File file) {
+
+            BufferedReader br=null;
+            String line;
+            String content="";
+            try {
+                br = new BufferedReader(new FileReader(file));
+                while ((line = br.readLine()) != null) {
+                    content = content +line;
+                }
+
+            } catch (FileNotFoundException e) {
+                e.printStackTrace();
+                Log.e(TAG, "Cannot find video meta info file");
+            } catch (IOException e) {
+                e.printStackTrace();
+                Log.e(TAG, "Cannot read video meta info file");
+            } finally {
+                if (br != null) {
+                    try {
+                        br.close();
+                    } catch (IOException e) {
+                        e.printStackTrace();
+                    }
+                }
+            }
+
+            if (content.isEmpty()) {
+                return;
+            }
+
+            try {
+                JSONObject json = new JSONObject(content);
+                frameWidth = json.getInt("width");
+                frameHeight = json.getInt("height");
+                fps = json.getDouble("frameRate");
+                fovWidth = json.getDouble("fovW")*Math.PI/180.0;
+                fovHeight = json.getDouble("fovH")*Math.PI/180.0;
+            } catch (JSONException e) {
+                return;
+            }
+
+            valid = true;
+
+        }
+    }
+
+
+
+    /**
+     * Debugging helper function, load ArrayList<AttitudeRec> from a file dumped out by
+     * dumpAttitudeRecs
+     */
+    private int loadAttitudeRecs(File file, ArrayList<AttitudeRec> recs) {
+        BufferedReader br=null;
+        String line;
+        double time;
+        double [] rpy = new double[3];
+
+        try {
+            br = new BufferedReader(new FileReader(file));
+            while ((line = br.readLine()) != null) {
+                //space separator
+                String[] items = line.split(" ");
+
+                if (items.length != 4) {
+                    recs.clear();
+                    return -1;
+                }
+
+                time = Double.parseDouble(items[0]);
+                rpy[0] = Double.parseDouble(items[1]);
+                rpy[1] = Double.parseDouble(items[2]);
+                rpy[2] = Double.parseDouble(items[3]);
+
+                recs.add(new AttitudeRec(time, rpy));
+            }
+
+        } catch (FileNotFoundException e) {
+            e.printStackTrace();
+            Log.e(TAG, "Cannot find AttitudeRecs file specified.");
+        } catch (IOException e) {
+            e.printStackTrace();
+            Log.e(TAG, "Read AttitudeRecs file failure");
+        } finally {
+            if (br != null) {
+                try {
+                    br.close();
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+
+        return recs.size();
+    }
+    /**
+     * Debugging helper function, Dump an ArrayList<AttitudeRec> to a file
+     */
+    private void dumpAttitudeRecs(File file, ArrayList<AttitudeRec> recs) {
+        OutputStreamWriter w=null;
+        try {
+            w = new OutputStreamWriter(new FileOutputStream(file));
+
+            for (AttitudeRec r : recs) {
+                w.write(String.format("%f %f %f %f\r\n", r.time, r.roll, r.pitch, r.yaw));
+            }
+            w.close();
+        } catch(FileNotFoundException e) {
+            e.printStackTrace();
+            Log.e(TAG, "Cannot create AttitudeRecs file.");
+        } catch (IOException e) {
+            Log.e(TAG, "Write AttitudeRecs file failure");
+        } finally {
+            if (w!=null) {
+                try {
+                    w.close();
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+    }
+
+    /**
+     *  Read the sensor log in ArrayList<AttitudeRec> format and find out the sensor sample time
+     *  statistics: mean and standard deviation.
+     *
+     *  @return The returned value will be a double array with exact 2 items, first [0] will be
+     *  mean and the second [1]  will be the standard deviation.
+     *
+     */
+    private double [] calcSensorPeriodStat(ArrayList<AttitudeRec> srec)   {
+        double tp = srec.get(0).time;
+        int i;
+        double sum = 0.0;
+        double sumsq = 0.0;
+        for(i=1; i<srec.size(); ++i) {
+            double dt;
+            dt = srec.get(i).time - tp;
+            sum += dt;
+            sumsq += dt*dt;
+            tp += dt;
+        }
+        double [] ret = new double[2];
+        ret[0] = sum/srec.size();
+        ret[1] = Math.sqrt(sumsq/srec.size() - ret[0]*ret[0]);
+        return ret;
+    }
+
+    /**
+     * Flipping the axis as the image are flipped upside down in OpenGL frames
+     */
+    private void fixFlippedAxis(ArrayList<AttitudeRec> vrecs)   {
+        for (AttitudeRec i: vrecs) {
+            i.yaw = -i.yaw;
+        }
+    }
+
+    /**
+     *  Calculate the maximum error on the specified axis between two time aligned (resampled)
+     *  ArrayList<AttitudeRec>. Yaw axis needs special treatment as 0 and 2pi error are same thing
+     *
+     * @param ra  one ArrayList of AttitudeRec
+     * @param rb  the other ArrayList of AttitudeRec
+     * @param axis axis id for the comparison (0 = roll, 1 = pitch, 2 = yaw)
+     * @return Maximum error
+     */
+    private double calcMaxErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis)  {
+        // check if they are valid and comparable data
+        if (ra.size() != rb.size()) {
+            throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
+        }
+        // check input parameter validity
+        if (axis<0 || axis > 2) {
+            throw new IllegalArgumentException("Invalid data axis.");
+        }
+
+        int i;
+        double max = 0.0;
+        double diff = 0.0;
+        for(i=0; i<ra.size(); ++i) {
+            // make sure they are aligned data
+            if (ra.get(i).time != rb.get(i).time) {
+                throw new IllegalArgumentException("Element "+i+
+                        " of two inputs has different time.");
+            }
+            switch(axis) {
+                case 0:
+                    diff = ra.get(i).roll - rb.get(i).roll; // they always opposite of each other..
+                    break;
+                case 1:
+                    diff = ra.get(i).pitch - rb.get(i).pitch;
+                    break;
+                case 2:
+                    diff = Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))
+                            -Math.PI)-Math.PI;
+                    break;
+            }
+            diff = Math.abs(diff);
+            if (diff>max) {
+                max = diff;
+            }
+        }
+        return max;
+    }
+
+    /**
+     *  Calculate the RMS error on the specified axis between two time aligned (resampled)
+     *  ArrayList<AttitudeRec>. Yaw axis needs special treatment as 0 and 2pi error are same thing
+     *
+     * @param ra  one ArrayList of AttitudeRec
+     * @param rb  the other ArrayList of AttitudeRec
+     * @param axis axis id for the comparison (0 = roll, 1 = pitch, 2 = yaw)
+     * @return Mean square error
+     */
+    private double calcSqrErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis) {
+        // check if they are valid and comparable data
+        if (ra.size() != rb.size()) {
+            throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
+        }
+        // check input parameter validity
+        if (axis<0 || axis > 2) {
+            throw new IllegalArgumentException("Invalid data axis.");
+        }
+
+        int i;
+        double sum = 0.0;
+        double diff = 0.0;
+        for(i=0; i<ra.size(); ++i) {
+            // check input data validity
+            if (ra.get(i).time != rb.get(i).time) {
+                throw new IllegalArgumentException("Element "+i+
+                        " of two inputs has different time.");
+            }
+
+            switch(axis) {
+                case 0:
+                    diff = ra.get(i).roll - rb.get(i).roll;
+                    break;
+                case 1:
+                    diff = ra.get(i).pitch - rb.get(i).pitch;
+                    break;
+                case 2:
+                    diff = Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))-
+                            Math.PI)-Math.PI;
+                    break;
+            }
+
+            sum += diff*diff;
+        }
+        return sum/ra.size();
+    }
+
+    /**
+     * Debugging helper function. Dump the error between two time aligned ArrayList<AttitudeRec>'s
+     *
+     * @param file File to write to
+     * @param ra  one ArrayList of AttitudeRec
+     * @param rb  the other ArrayList of AttitudeRec
+     */
+    private void dumpAttitudeError(File file, ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb){
+        if (ra.size() != rb.size()) {
+            throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
+        }
+
+        int i;
+
+        ArrayList<AttitudeRec> rerr = new ArrayList<>();
+        for(i=0; i<ra.size(); ++i) {
+            if (ra.get(i).time != rb.get(i).time) {
+                throw new IllegalArgumentException("Element "+ i
+                        + " of two inputs has different time.");
+            }
+
+            rerr.add(new AttitudeRec(ra.get(i).time, ra.get(i).roll - rb.get(i).roll,
+                    ra.get(i).pitch - rb.get(i).pitch,
+                    (Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))
+                            -Math.PI)-Math.PI)));
+
+        }
+        dumpAttitudeRecs(file, rerr);
+    }
+
+    /**
+     * Resample one ArrayList<AttitudeRec> with respect to another ArrayList<AttitudeRec>
+     *
+     * @param rec           the ArrayList of AttitudeRec to be sampled
+     * @param timebase      the other ArrayList of AttitudeRec that serves as time base
+     * @param delta_t       offset in time before resample
+     * @param yaw_offset    offset in yaw axis
+     * @param resampled     output ArrayList of AttitudeRec
+     */
+
+    private void resampleSensorLog(ArrayList<AttitudeRec> rec, ArrayList<AttitudeRec> timebase,
+            double delta_t, double yaw_offset, ArrayList<AttitudeRec> resampled)    {
+        int i;
+        int j = -1;
+        for(i=0; i<timebase.size(); i++) {
+            double time = timebase.get(i).time + delta_t;
+
+            while(j<rec.size()-1 && rec.get(j+1).time < time) j++;
+
+            if (j == -1) {
+                //use first
+                resampled.get(i).assign(rec.get(0), timebase.get(i).time);
+            } else if (j == rec.size()-1) {
+                // use last
+                resampled.get(i).assign(rec.get(j), timebase.get(i).time);
+            } else {
+                // do linear resample
+                double alpha = (time - rec.get(j).time)/((rec.get(j+1).time - rec.get(j).time));
+                double roll = (1-alpha) * rec.get(j).roll + alpha * rec.get(j+1).roll;
+                double pitch = (1-alpha) * rec.get(j).pitch + alpha * rec.get(j+1).pitch;
+                double yaw = (1-alpha) * rec.get(j).yaw + alpha * rec.get(j+1).yaw + yaw_offset;
+                resampled.get(i).set(timebase.get(i).time, roll, pitch, yaw);
+            }
+        }
+    }
+
+    /**
+     * Analyze video frames using computer vision approach and generate a ArrayList<AttitudeRec>
+     *
+     * @param recs  output ArrayList of AttitudeRec
+     * @return total number of frame of the video
+     */
+    private int analyzeVideo(ArrayList<AttitudeRec> recs) {
+        VideoMetaInfo meta = new VideoMetaInfo(new File(mPath, "videometa.json"));
+
+        int decimation = 1;
+
+        if (meta.fps > DECIMATION_FPS_TARGET) {
+            decimation = (int)(meta.fps / DECIMATION_FPS_TARGET);
+            meta.fps /=decimation;
+        }
+
+        VideoDecoderForOpenCV videoDecoder = new VideoDecoderForOpenCV(
+                new File(mPath, "video.mp4"), decimation); // every 3 frame process 1 frame
+
+
+        Mat frame;
+        Mat gray = new Mat();
+        int i = -1;
+
+        Size frameSize = videoDecoder.getSize();
+
+        if (frameSize.width != meta.frameWidth || frameSize.height != meta.frameHeight) {
+            // this is very unlikely
+            return -1;
+        }
+
+        if (TRACE_VIDEO_ANALYSIS) {
+            Debug.startMethodTracing("cvprocess");
+        }
+
+        Size patternSize = new Size(4,11);
+
+        float fc = (float)(meta.frameWidth/2.0/Math.tan(meta.fovWidth/2.0));
+        Mat camMat = cameraMatrix(fc, new Size(frameSize.width/2, frameSize.height/2));
+        MatOfDouble coeff = new MatOfDouble(); // dummy
+
+        MatOfPoint2f centers = new MatOfPoint2f();
+        MatOfPoint3f grid = asymmetricalCircleGrid(patternSize);
+        Mat rvec = new MatOfFloat();
+        Mat tvec = new MatOfFloat();
+
+        MatOfPoint2f reprojCenters = new MatOfPoint2f();
+
+        if (LOCAL_LOGV) {
+            Log.v(TAG, "Camera Mat = \n" + camMat.dump());
+        }
+
+        long startTime = System.nanoTime();
+
+        while ((frame = videoDecoder.getFrame()) !=null) {
+            if (LOCAL_LOGV) {
+                Log.v(TAG, "got a frame " + i);
+            }
+
+            // has to be in front, as there are cases where execution
+            // will skip the later part of this while
+            i++;
+
+            // convert to gray manually as by default findCirclesGridDefault uses COLOR_BGR2GRAY
+            Imgproc.cvtColor(frame, gray, Imgproc.COLOR_RGB2GRAY);
+
+            boolean foundPattern = Calib3d.findCirclesGridDefault(
+                    gray,  patternSize, centers, Calib3d.CALIB_CB_ASYMMETRIC_GRID);
+
+            if (!foundPattern) {
+                // skip to next frame
+                continue;
+            }
+
+            if (OUTPUT_DEBUG_IMAGE) {
+                Calib3d.drawChessboardCorners(frame, patternSize, centers, true);
+            }
+
+            // figure out the extrinsic parameters using real ground truth 3D points and the pixel
+            // position of blobs found in findCircleGrid, an estimated camera matrix and
+            // no-distortion are assumed.
+            boolean foundSolution =
+                    Calib3d.solvePnP(grid, centers, camMat, coeff, rvec, tvec,
+                            false, Calib3d.CV_ITERATIVE);
+
+            if (!foundSolution) {
+                // skip to next frame
+                continue;
+            }
+
+            // reproject points to for evaluation of result accuracy of solvePnP
+            Calib3d.projectPoints(grid, rvec, tvec, camMat, coeff, reprojCenters);
+
+            // error is evaluated in norm2, which is real error in pixel distance / sqrt(2)
+            double error = Core.norm(centers, reprojCenters, Core.NORM_L2);
+
+            if (LOCAL_LOGV) {
+                Log.v(TAG, "Found attitude, re-projection error = " + error);
+            }
+
+            // if error is reasonable, add it into the results
+            if (error < REPROJECTION_THREASHOLD) {
+                double [] rv = new double[3];
+                rvec.get(0,0, rv);
+                recs.add(new AttitudeRec((double) i / meta.fps, rodr2rpy(rv)));
+            }
+
+            if (OUTPUT_DEBUG_IMAGE) {
+                Calib3d.drawChessboardCorners(frame, patternSize, reprojCenters, true);
+                Highgui.imwrite(Environment.getExternalStorageDirectory().getPath()
+                        + "/RVCVRecData/DebugCV/img" + i + ".png", frame);
+            }
+        }
+
+        if (LOCAL_LOGV) {
+            Log.v(TAG, "Finished decoding");
+        }
+
+        if (TRACE_VIDEO_ANALYSIS) {
+            Debug.stopMethodTracing();
+        }
+
+        if (LOCAL_LOGV) {
+            // time analysis
+            double totalTime = (System.nanoTime()-startTime)/1e9;
+            Log.i(TAG, "Total time: "+totalTime +"s, Per frame time: "+totalTime/i );
+        }
+        return i;
+    }
+
+    /**
+     * OpenCV for Android have not support the VideoCapture from file
+     * This is a make shift solution before it is supported.
+     * One issue right now is that the glReadPixels is quite slow .. around 6.5ms for a 720p frame
+     */
+    private class VideoDecoderForOpenCV implements Runnable {
+        private MediaExtractor extractor=null;
+        private MediaCodec decoder=null;
+        private CtsMediaOutputSurface surface=null;
+
+        private MatBuffer mMatBuffer;
+
+        private final File mVideoFile;
+
+        private boolean valid;
+        private Object setupSignal;
+
+        private Thread mThread;
+        private int mDecimation;
+
+        /**
+         * Constructor
+         * @param file video file
+         * @param decimation process every "decimation" number of frame
+         */
+        VideoDecoderForOpenCV(File file, int decimation) {
+            mVideoFile = file;
+            mDecimation = decimation;
+            valid = false;
+
+            start();
+        }
+
+        /**
+         * Constructor
+         * @param file video file
+         */
+        VideoDecoderForOpenCV(File file)   {
+            this(file, 1);
+        }
+
+        /**
+         * Test if video decoder is in valid states ready to output video.
+         * @return true of force.
+         */
+        public boolean isValid() {
+            return valid;
+        }
+
+        private void start() {
+            setupSignal = new Object();
+            mThread = new Thread(this);
+            mThread.start();
+
+            synchronized (setupSignal) {
+                try {
+                    setupSignal.wait();
+                } catch (InterruptedException e) {
+                    Log.e(TAG, "Interrupted when waiting for video decoder setup ready");
+                }
+            }
+        }
+        private void stop() {
+            if (mThread != null) {
+                mThread.interrupt();
+                try {
+                    mThread.join();
+                } catch (InterruptedException e) {
+                    Log.e(TAG, "Interrupted when waiting for video decoder thread to stop");
+                }
+                try {
+                    decoder.stop();
+                }catch (IllegalStateException e) {
+                    Log.e(TAG, "Video decoder is not in a state that can be stopped");
+                }
+            }
+            mThread = null;
+        }
+
+        void teardown() {
+            if (decoder!=null) {
+                decoder.release();
+                decoder = null;
+            }
+            if (surface!=null) {
+                surface.release();
+                surface = null;
+            }
+            if (extractor!=null) {
+                extractor.release();
+                extractor = null;
+            }
+        }
+
+        void setup() {
+            int width=0, height=0;
+
+            extractor = new MediaExtractor();
+
+            try {
+                extractor.setDataSource(mVideoFile.getPath());
+            } catch (IOException e) {
+                return;
+            }
+
+            for (int i = 0; i < extractor.getTrackCount(); i++) {
+                MediaFormat format = extractor.getTrackFormat(i);
+                String mime = format.getString(MediaFormat.KEY_MIME);
+                width = format.getInteger(MediaFormat.KEY_WIDTH);
+                height = format.getInteger(MediaFormat.KEY_HEIGHT);
+
+                if (mime.startsWith("video/")) {
+                    extractor.selectTrack(i);
+                    try {
+                        decoder = MediaCodec.createDecoderByType(mime);
+                    }catch (IOException e) {
+                        continue;
+                    }
+                    // Decode to surface
+                    //decoder.configure(format, surface, null, 0);
+
+                    // Decode to offscreen surface
+                    surface = new CtsMediaOutputSurface(width, height);
+                    mMatBuffer = new MatBuffer(width, height);
+
+                    decoder.configure(format, surface.getSurface(), null, 0);
+                    break;
+                }
+            }
+
+            if (decoder == null) {
+                Log.e("VideoDecoderForOpenCV", "Can't find video info!");
+                return;
+            }
+            valid = true;
+        }
+
+        @Override
+        public void run() {
+            setup();
+
+            synchronized (setupSignal) {
+                setupSignal.notify();
+            }
+
+            if (!valid) {
+                return;
+            }
+
+            decoder.start();
+
+            ByteBuffer[] inputBuffers = decoder.getInputBuffers();
+            ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
+            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+
+            boolean isEOS = false;
+            long startMs = System.currentTimeMillis();
+            long timeoutUs = 10000;
+
+            int iframe = 0;
+
+            while (!Thread.interrupted()) {
+                if (!isEOS) {
+                    int inIndex = decoder.dequeueInputBuffer(10000);
+                    if (inIndex >= 0) {
+                        ByteBuffer buffer = inputBuffers[inIndex];
+                        int sampleSize = extractor.readSampleData(buffer, 0);
+                        if (sampleSize < 0) {
+                            if (LOCAL_LOGD) {
+                                Log.d("VideoDecoderForOpenCV",
+                                        "InputBuffer BUFFER_FLAG_END_OF_STREAM");
+                            }
+                            decoder.queueInputBuffer(inIndex, 0, 0, 0,
+                                    MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+                            isEOS = true;
+                        } else {
+                            decoder.queueInputBuffer(inIndex, 0, sampleSize,
+                                    extractor.getSampleTime(), 0);
+                            extractor.advance();
+                        }
+                    }
+                }
+
+                int outIndex = decoder.dequeueOutputBuffer(info, 10000);
+                MediaFormat outFormat;
+                switch (outIndex) {
+                    case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
+                        if (LOCAL_LOGD) {
+                            Log.d("VideoDecoderForOpenCV", "INFO_OUTPUT_BUFFERS_CHANGED");
+                        }
+                        outputBuffers = decoder.getOutputBuffers();
+                        break;
+                    case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
+                        outFormat = decoder.getOutputFormat();
+                        if (LOCAL_LOGD) {
+                            Log.d("VideoDecoderForOpenCV", "New format " + outFormat);
+                        }
+                        break;
+                    case MediaCodec.INFO_TRY_AGAIN_LATER:
+                        if (LOCAL_LOGD) {
+                            Log.d("VideoDecoderForOpenCV", "dequeueOutputBuffer timed out!");
+                        }
+                        break;
+                    default:
+
+                        ByteBuffer buffer = outputBuffers[outIndex];
+                        boolean doRender = (info.size != 0);
+
+                        // As soon as we call releaseOutputBuffer, the buffer will be forwarded
+                        // to SurfaceTexture to convert to a texture.  The API doesn't
+                        // guarantee that the texture will be available before the call
+                        // returns, so we need to wait for the onFrameAvailable callback to
+                        // fire.  If we don't wait, we risk rendering from the previous frame.
+                        decoder.releaseOutputBuffer(outIndex, doRender);
+
+                        if (doRender) {
+                            surface.awaitNewImage();
+                            surface.drawImage();
+                            if (LOCAL_LOGD) {
+                                Log.d("VideoDecoderForOpenCV", "Finish drawing a frame!");
+                            }
+                            if ((iframe++ % mDecimation) == 0) {
+                                //Send the frame for processing
+                                mMatBuffer.put();
+                            }
+                        }
+                        break;
+                }
+
+                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+                    if (LOCAL_LOGD) {
+                        Log.d("VideoDecoderForOpenCV", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
+                    }
+                    break;
+                }
+            }
+            mMatBuffer.invalidate();
+
+            decoder.stop();
+
+            teardown();
+            mThread = null;
+        }
+
+
+        /**
+         * Get next valid frame
+         * @return Frame in OpenCV mat
+         */
+        public Mat getFrame() {
+            return mMatBuffer.get();
+        }
+
+        /**
+         * Get the size of the frame
+         * @return size of the frame
+         */
+        Size getSize() {
+            return mMatBuffer.getSize();
+        }
+
+        /**
+         * A synchronized buffer
+         */
+        class MatBuffer {
+            private Mat mat;
+            private byte[] bytes;
+            private ByteBuffer buf;
+            private boolean full;
+
+            private int mWidth, mHeight;
+            private boolean mValid = false;
+
+            MatBuffer(int width, int height) {
+                mWidth = width;
+                mHeight = height;
+
+                mat = new Mat(height, width, CvType.CV_8UC4); //RGBA
+                buf = ByteBuffer.allocateDirect(width*height*4);
+                bytes = new byte[width*height*4];
+
+                mValid = true;
+                full = false;
+            }
+
+            public synchronized void invalidate() {
+                mValid = false;
+                notifyAll();
+            }
+
+            public synchronized Mat get() {
+
+                if (!mValid) return null;
+                while (full == false) {
+                    try {
+                        wait();
+                        if (!mValid) return null;
+                    } catch (InterruptedException e) {
+                        return null;
+                    }
+                }
+                mat.put(0,0, bytes);
+                full = false;
+                notifyAll();
+                return mat;
+            }
+            public synchronized void put() {
+                while (full) {
+                    try {
+                        wait();
+                    } catch (InterruptedException e) {
+                        Log.e(TAG, "Interrupted when waiting for space in buffer");
+                    }
+                }
+                GLES20.glReadPixels(0, 0, mWidth, mHeight, GL10.GL_RGBA,
+                        GL10.GL_UNSIGNED_BYTE, buf);
+                buf.get(bytes);
+                buf.rewind();
+
+                full = true;
+                notifyAll();
+            }
+
+            public Size getSize() {
+                if (valid) {
+                    return mat.size();
+                }
+                return new Size();
+            }
+        }
+    }
+
+
+    /* a small set of math functions */
+    private static double [] quat2rpy( double [] q) {
+        double [] rpy = {Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2])),
+                Math.asin(2*(q[0]*q[2] - q[3]*q[1])),
+                Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]))};
+        return rpy;
+    }
+
+    private static void quat2rpy( double [] q, double[] rpy) {
+        rpy[0] = Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2]));
+        rpy[1] = Math.asin(2*(q[0]*q[2] - q[3]*q[1]));
+        rpy[2] = Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]));
+    }
+
+    private static Mat quat2rpy(Mat quat) {
+        double [] q = new double[4];
+        quat.get(0,0,q);
+
+        double [] rpy = {Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2])),
+                Math.asin(2*(q[0]*q[2] - q[3]*q[1])),
+                Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]))};
+
+        Mat rpym = new Mat(3,1, CvType.CV_64F);
+        rpym.put(0,0, rpy);
+        return rpym;
+    }
+
+    private static double [] rodr2quat( double [] r) {
+        double t = Math.sqrt(r[0]*r[0]+r[1]*r[1]+r[2]*r[2]);
+        double [] quat = {Math.cos(t/2), Math.sin(t/2)*r[0]/t,Math.sin(t/2)*r[1]/t,
+                Math.sin(t/2)*r[2]/t};
+        return quat;
+    }
+
+    private static void rodr2quat( double [] r, double [] quat) {
+        double t = Math.sqrt(r[0]*r[0]+r[1]*r[1]+r[2]*r[2]);
+        quat[0] = Math.cos(t/2);
+        quat[1] = Math.sin(t/2)*r[0]/t;
+        quat[2] = Math.sin(t/2)*r[1]/t;
+        quat[3] = Math.sin(t/2)*r[2]/t;
+    }
+
+    private static Mat rodr2quat(Mat rodr) {
+        double t = Core.norm(rodr);
+        double [] r = new double[3];
+        rodr.get(0,0,r);
+
+        double [] quat = {Math.cos(t/2), Math.sin(t/2)*r[0]/t,Math.sin(t/2)*r[1]/t,
+                Math.sin(t/2)*r[2]/t};
+        Mat quatm = new Mat(4,1, CvType.CV_64F);
+        quatm.put(0, 0, quat);
+        return quatm;
+    }
+
+    private static double [] rodr2rpy( double [] r) {
+        return quat2rpy(rodr2quat(r));
+    }
+    //////////////////
+
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckTestActivity.java
new file mode 100644
index 0000000..ffb0d85
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckTestActivity.java
@@ -0,0 +1,318 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.sensors;
+
+
+import android.hardware.cts.helpers.SensorTestStateNotSupportedException;
+import android.os.Bundle;
+
+import com.android.cts.verifier.sensors.base.SensorCtsVerifierTestActivity;
+import com.android.cts.verifier.sensors.helpers.OpenCVLibrary;
+
+import junit.framework.Assert;
+
+import android.content.Intent;
+
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * This test (Rotation Vector - Computer Vision Cross Check, or RXCVXCheck for short) verifies that
+ * mobile device can detect the orientation of itself in a relatively accurate manner.
+ *
+ * Currently only ROTATION_VECTOR sensor is used.
+ *
+ */
+public class RVCVXCheckTestActivity
+        extends SensorCtsVerifierTestActivity {
+    public RVCVXCheckTestActivity() {
+        super(RVCVXCheckTestActivity.class);
+    }
+
+    CountDownLatch mRecordActivityFinishedSignal = null;
+
+    private static final int REQ_CODE_TXCVRECORD = 0x012345678;
+    private static final boolean TEST_USING_DEBUGGING_DATA = false;
+    private static final String PATH_DEBUGGING_DATA = "/sdcard/RXCVRecData/150313-014443/";
+
+    private String mRecPath;
+
+    RVCVXCheckAnalyzer.AnalyzeReport mReport = null;
+
+    private boolean mRecordSuccessful = false;
+    private boolean mOpenCVLoadSuccessful = false;
+
+
+    /**
+     * The activity setup collects all the required data for test cases.
+     * This approach allows to test all sensors at once.
+     */
+    @Override
+    protected void activitySetUp() throws InterruptedException {
+
+        mRecPath = "";
+
+        showUserMessage("Loading OpenCV Library...");
+        int retry = 10;
+
+        while(retry-->0) {
+            try {
+                Thread.sleep(100);
+            } catch (InterruptedException e) {
+                //
+            }
+            if (OpenCVLibrary.isLoaded()) {
+                break;
+            }
+        }
+        if (!OpenCVLibrary.isLoaded()) {
+            // failed requirement test
+            clearText();
+            return;
+        }
+        showUserMessage("OpenCV Library Successfully Loaded");
+
+        mOpenCVLoadSuccessful = true;
+
+        if (TEST_USING_DEBUGGING_DATA) {
+            mRecPath = PATH_DEBUGGING_DATA;
+
+            // assume the data is there already
+            mRecordSuccessful = true;
+        } else {
+            showUserMessage("Take the test as instructed below:\n" +
+                "1. Print out the test pattern and place it on a "+
+                   "horizontal surface.\n" +
+                "2. Start the test and align the yellow square on the screen "+
+                   "roughly to the yellow sqaure.\n" +
+                "3. Follow the prompt to rotate the phone while keeping the "+
+                   "entire test pattern inside view of camera. This requires " +
+                   "orbiting the phone around and aiming the "+
+                   "camera at the test pattern at the same time.\n" +
+                "4. Wait patiently for the analysis to finish.\n");
+
+            waitForUserToContinue();
+
+            // prepare sync signal
+            mRecordActivityFinishedSignal = new CountDownLatch(1);
+
+            // record both sensor and camera
+            Intent intent = new Intent(this, RVCVRecordActivity.class);
+            startActivityForResult(intent, REQ_CODE_TXCVRECORD);
+
+            // wait for record finish
+            mRecordActivityFinishedSignal.await();
+
+            if ("".equals(mRecPath)) {
+                showUserMessage("Recording failed or exited prematurely.");
+                waitForUserToContinue();
+            } else {
+                showUserMessage("Recording is done!");
+                showUserMessage("Result are in path: " + mRecPath);
+                mRecordSuccessful = true;
+            }
+        }
+
+
+        if (mRecordSuccessful) {
+            showUserMessage("Please wait for the analysis ... \n"+
+                            "It may take a few minutes, you will be noted when "+
+                            "its finished by sound and vibration. ");
+
+            // Analysis of recorded video and sensor data using RVCXAnalyzer
+            RVCVXCheckAnalyzer analyzer = new RVCVXCheckAnalyzer(mRecPath);
+            mReport = analyzer.processDataSet();
+
+            playSound();
+            vibrate(500);
+
+            if (mReport == null) {
+                showUserMessage("Analysis failed due to unknown reason!");
+            } else {
+                if (mReport.error) {
+                    showUserMessage("Analysis failed: " + mReport.reason);
+                } else {
+                    showUserMessage(String.format("Analysis finished!\n" +
+                                    "Roll error (Rms, max) = %4.3f, %4.3f rad\n" +
+                                    "Pitch error (Rms, max) = %4.3f, %4.3f rad\n" +
+                                    "Yaw error (Rms, max) = %4.3f, %4.3f rad\n" +
+                                    "N of Frame (valid, total) = %d, %d\n" +
+                                    "Sensor period (mean, stdev) = %4.3f, %4.3f ms\n" +
+                                    "Time offset: %4.3f s \n" +
+                                    "Yaw offset: %4.3f rad \n\n",
+                            mReport.roll_rms_error, mReport.roll_max_error,
+                            mReport.pitch_rms_error, mReport.pitch_max_error,
+                            mReport.yaw_rms_error, mReport.yaw_max_error,
+                            mReport.n_of_valid_frame, mReport.n_of_frame,
+                            mReport.sensor_period_avg * 1000.0, mReport.sensor_period_stdev*1000.0,
+                            mReport.optimal_delta_t, mReport.yaw_offset));
+                    showUserMessage("Please click next after details reviewed.");
+                    waitForUserToContinue();
+                }
+            }
+        }
+        clearText();
+    }
+
+    /**
+    Receiving the results from the RVCVRecordActivity, which is a patch where the recorded
+    video and sensor data is stored.
+    */
+    @Override
+    protected void onActivityResult(int requestCode, int resultCode, Intent data) {
+        // Check which request we're responding to
+        if (requestCode == REQ_CODE_TXCVRECORD) {
+            // Make sure the request was successful
+
+            if (resultCode == RESULT_OK) {
+                mRecPath = data.getData().getPath();
+            }
+
+            // notify it is finished
+            mRecordActivityFinishedSignal.countDown();
+        }
+        super.onActivityResult(requestCode, resultCode, data);
+    }
+
+    /**
+     * Test cases.
+     */
+
+    public String test00OpenCV() throws Throwable {
+
+        String message = "OpenCV is loaded";
+        Assert.assertTrue("OpenCV library cannot be loaded.", mOpenCVLoadSuccessful);
+        return message;
+    }
+
+
+    public String test01Recording() throws Throwable {
+
+        loadOpenCVSuccessfulOrSkip();
+
+        String message = "Record is successful.";
+        Assert.assertTrue("Record is not successful.", mRecordSuccessful);
+        return message;
+    }
+
+    public String test02Analysis() throws Throwable {
+
+        loadOpenCVSuccessfulOrSkip();
+        recordSuccessfulOrSkip();
+
+        String message = "Analysis result: " + mReport.reason;
+        Assert.assertTrue(message, (mReport!=null && !mReport.error));
+        return message;
+    }
+
+    public String test1RollAxis() throws Throwable {
+
+        loadOpenCVSuccessfulOrSkip();
+        recordSuccessfulOrSkip();
+        analyzeSuccessfulOrSkip();
+
+        String message = "Test Roll Axis Accuracy";
+
+        Assert.assertEquals("Roll RMS error", 0.0, mReport.roll_rms_error, 0.15);
+        Assert.assertEquals("Roll max error", 0.0, mReport.roll_max_error, 0.35);
+        return message;
+    }
+
+    public String test2PitchAxis() throws Throwable {
+
+        loadOpenCVSuccessfulOrSkip();
+        recordSuccessfulOrSkip();
+        analyzeSuccessfulOrSkip();
+
+        String message = "Test Pitch Axis Accuracy";
+
+        Assert.assertEquals("Pitch RMS error", 0.0, mReport.pitch_rms_error, 0.15);
+        Assert.assertEquals("Pitch max error", 0.0, mReport.pitch_max_error, 0.35);
+        return message;
+    }
+
+    public String test3YawAxis() throws Throwable {
+
+        loadOpenCVSuccessfulOrSkip();
+        recordSuccessfulOrSkip();
+        analyzeSuccessfulOrSkip();
+
+        String message = "Test Yaw Axis Accuracy";
+
+        Assert.assertEquals("Yaw RMS error", 0.0, mReport.yaw_rms_error, 0.2);
+        Assert.assertEquals("Yaw max error", 0.0, mReport.yaw_max_error, 0.4);
+        return message;
+    }
+
+    public String test4SensorPeriod() throws Throwable {
+
+        loadOpenCVSuccessfulOrSkip();
+        recordSuccessfulOrSkip();
+        analyzeSuccessfulOrSkip();
+
+        String message = "Test Sensor Period";
+
+        Assert.assertEquals("Sensor Period Mean", 5e-3, mReport.sensor_period_avg, 0.2e-3);
+        Assert.assertEquals("Sensor Period Stdev", 0.0, mReport.sensor_period_stdev, 0.5e-3);
+        return message;
+    }
+
+    private void loadOpenCVSuccessfulOrSkip() throws SensorTestStateNotSupportedException {
+        if (!mOpenCVLoadSuccessful)
+            throw new SensorTestStateNotSupportedException("Skipped due to OpenCV cannot be loaded");
+    }
+
+    private void recordSuccessfulOrSkip() throws SensorTestStateNotSupportedException {
+        if (!mRecordSuccessful)
+            throw new SensorTestStateNotSupportedException("Skipped due to record failure.");
+    }
+
+    private void analyzeSuccessfulOrSkip() throws SensorTestStateNotSupportedException {
+        if (mReport == null || mReport.error)
+            throw new SensorTestStateNotSupportedException("Skipped due to CV Analysis failure.");
+    }
+
+    /*
+     *  This function serves as a proxy as showUserMessage is marked to be deprecated.
+     *  When appendText is removed, this function will have a different implementation.
+     *
+     */
+    void showUserMessage(String s) {
+        appendText(s);
+    }
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+
+        super.onCreate(savedInstanceState);
+
+        // GlSurfaceView is not necessary for this test
+        closeGlSurfaceView();
+
+        OpenCVLibrary.loadAsync(this);
+    }
+
+    @Override
+    protected void onPause() {
+        super.onPause();
+    }
+
+    @Override
+    protected void onResume() {
+        super.onResume();
+
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/helpers/OpenCVLibrary.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/helpers/OpenCVLibrary.java
new file mode 100644
index 0000000..2f5c873
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/helpers/OpenCVLibrary.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.cts.verifier.sensors.helpers;
+
+import android.content.Context;
+import android.os.Looper;
+import android.util.Log;
+
+import org.opencv.android.BaseLoaderCallback;
+import org.opencv.android.LoaderCallbackInterface;
+import org.opencv.android.OpenCVLoader;
+
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * OpenCV library loader class
+ */
+public class OpenCVLibrary {
+
+    private static String TAG = "OpenCVLibraryProbe";
+    private static boolean mLoaded = false;
+
+    /**
+     * Load OpenCV Library in async mode
+     * @param context Activity context
+     */
+    public static void loadAsync(Context context) {
+        // only need to load once
+        if (isLoaded())  return;
+
+        // Load the library through loader
+        OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_9, context,
+                new BaseLoaderCallback(context) {
+                    @Override
+                    public void onManagerConnected(int status) {
+                        Log.v(TAG, "New Loading status: "+status);
+                        switch (status) {
+                            case LoaderCallbackInterface.SUCCESS: {
+                                mLoaded = true;
+                            }
+                            break;
+                            default: {
+                                super.onManagerConnected(status);
+                            }
+                            break;
+                        }
+                    }
+                });
+    }
+
+    /**
+     * Test if the library is loaded
+     * @return a boolean indicates whether the OpenCV library is loaded.
+     */
+    public static boolean isLoaded() {
+        return mLoaded;
+    }
+}
diff --git a/build/module_test_config.mk b/build/module_test_config.mk
index 1a397ac..6584ef2 100644
--- a/build/module_test_config.mk
+++ b/build/module_test_config.mk
@@ -14,7 +14,7 @@
 
 cts_module_test_config := $(if $(wildcard \
 	$(LOCAL_PATH)/$(CTS_MODULE_TEST_CONFIG)), \
-	$(CTS_TESTCASES_OUT)/$(LOCAL_PACKAGE_NAME).config)
+	$(CTS_TESTCASES_OUT)/$(LOCAL_MODULE).config)
 ifneq ($(cts_module_test_config),)
 $(cts_module_test_config): $(LOCAL_PATH)/$(CTS_MODULE_TEST_CONFIG) | $(ACP)
 	$(call copy-file-to-target)
diff --git a/build/test_target_java_library.mk b/build/test_target_java_library.mk
index 2d3abfb..fe1000a 100644
--- a/build/test_target_java_library.mk
+++ b/build/test_target_java_library.mk
@@ -19,6 +19,7 @@
 # Disable by default so "m cts" will work in emulator builds
 LOCAL_DEX_PREOPT := false
 include $(BUILD_JAVA_LIBRARY)
+include $(BUILD_CTS_MODULE_TEST_CONFIG)
 
 cts_library_jar := $(CTS_TESTCASES_OUT)/$(LOCAL_MODULE).jar
 $(cts_library_jar): $(LOCAL_BUILT_MODULE)
@@ -32,6 +33,7 @@
 $(cts_library_xml): PRIVATE_JAR_PATH := $(LOCAL_MODULE).jar
 $(cts_library_xml): PRIVATE_RUNTIME_ARGS := $(LOCAL_CTS_TARGET_RUNTIME_ARGS)
 $(cts_library_xml): $(cts_library_jar)
+$(cts_library_xml): $(cts_module_test_config)
 $(cts_library_xml): $(CTS_EXPECTATIONS) $(CTS_UNSUPPORTED_ABIS) $(CTS_JAVA_TEST_SCANNER_DOCLET) $(CTS_JAVA_TEST_SCANNER) $(CTS_XML_GENERATOR)
 	$(hide) echo Generating test description for target library $(PRIVATE_LIBRARY)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
@@ -48,4 +50,4 @@
 						-o $@
 
 # Have the module name depend on the cts files; so the cts files get generated when you run mm/mmm/mma/mmma.
-$(my_register_name) : $(cts_library_jar) $(cts_library_xml)
+$(my_register_name) : $(cts_library_jar) $(cts_library_xml $(cts_module_test_config))
diff --git a/libs/deviceutillegacy/src/android/webkit/cts/WebViewOnUiThread.java b/libs/deviceutillegacy/src/android/webkit/cts/WebViewOnUiThread.java
index 5cd6f30..adcc06f 100644
--- a/libs/deviceutillegacy/src/android/webkit/cts/WebViewOnUiThread.java
+++ b/libs/deviceutillegacy/src/android/webkit/cts/WebViewOnUiThread.java
@@ -21,6 +21,7 @@
 import android.graphics.Bitmap;
 import android.graphics.Picture;
 import android.graphics.Rect;
+import android.net.Uri;
 import android.os.Bundle;
 import android.os.Looper;
 import android.os.Message;
@@ -36,6 +37,8 @@
 import android.webkit.ValueCallback;
 import android.webkit.WebBackForwardList;
 import android.webkit.WebChromeClient;
+import android.webkit.WebMessage;
+import android.webkit.WebMessagePort;
 import android.webkit.WebSettings;
 import android.webkit.WebView.HitTestResult;
 import android.webkit.WebView.PictureListener;
@@ -307,6 +310,24 @@
         });
     }
 
+    public WebMessagePort[] createWebMessageChannel() {
+        return getValue(new ValueGetter<WebMessagePort[]>() {
+            @Override
+            public WebMessagePort[] capture() {
+                return mWebView.createWebMessageChannel();
+            }
+        });
+    }
+
+    public void postMessageToMainFrame(final WebMessage message, final Uri targetOrigin) {
+        runOnUiThread(new Runnable() {
+            @Override
+            public void run() {
+                mWebView.postMessageToMainFrame(message, targetOrigin);
+            }
+        });
+    }
+
     public void addJavascriptInterface(final Object object, final String name) {
         runOnUiThread(new Runnable() {
             @Override
@@ -629,11 +650,11 @@
         });
     }
 
-    public void insertVisualStateCallback(final long requestId, final VisualStateCallback callback) {
+    public void postVisualStateCallback(final long requestId, final VisualStateCallback callback) {
         runOnUiThread(new Runnable() {
             @Override
             public void run() {
-                mWebView.insertVisualStateCallback(requestId, callback);
+                mWebView.postVisualStateCallback(requestId, callback);
             }
         });
     }
diff --git a/suite/cts/deviceTests/tvproviderperf/src/com/android/cts/tvproviderperf/TvProviderPerfTest.java b/suite/cts/deviceTests/tvproviderperf/src/com/android/cts/tvproviderperf/TvProviderPerfTest.java
index df89cae..286d4fd 100644
--- a/suite/cts/deviceTests/tvproviderperf/src/com/android/cts/tvproviderperf/TvProviderPerfTest.java
+++ b/suite/cts/deviceTests/tvproviderperf/src/com/android/cts/tvproviderperf/TvProviderPerfTest.java
@@ -50,6 +50,7 @@
  */
 public class TvProviderPerfTest extends CtsAndroidTestCase {
     private static final int TRANSACTION_RUNS = 100;
+    private static final int QUERY_RUNS = 10;
 
     private ContentResolver mContentResolver;
     private String mInputId;
@@ -76,7 +77,7 @@
         }
     }
 
-    @TimeoutReq(minutes = 10)
+    @TimeoutReq(minutes = 8)
     public void testChannels() throws Exception {
         if (!mHasTvInputFramework) return;
         double[] averages = new double[4];
@@ -138,11 +139,11 @@
         averages[1] = Stat.getAverage(applyBatchTimes);
 
         // Query
-        applyBatchTimes = MeasureTime.measure(TRANSACTION_RUNS, new MeasureRun() {
+        applyBatchTimes = MeasureTime.measure(QUERY_RUNS, new MeasureRun() {
             @Override
             public void run(int i) {
                 int j = 0;
-                try (final Cursor cursor = mContentResolver.query(Channels.CONTENT_URI, null, null,
+                try (Cursor cursor = mContentResolver.query(Channels.CONTENT_URI, null, null,
                         null, null)) {
                     while (cursor.moveToNext()) {
                         ++j;
@@ -169,7 +170,7 @@
                 averages, ResultType.LOWER_BETTER, ResultUnit.MS);
     }
 
-    @TimeoutReq(minutes = 15)
+    @TimeoutReq(minutes = 12)
     public void testPrograms() throws Exception {
         if (!mHasTvInputFramework) return;
         double[] averages = new double[6];
@@ -234,7 +235,7 @@
             public void run(int i) {
                 Uri channelUri = channelUris.get(i);
                 operations.clear();
-                try (final Cursor cursor = mContentResolver.query(
+                try (Cursor cursor = mContentResolver.query(
                         TvContract.buildProgramsUriForChannel(channelUri),
                         projection, null, null, null)) {
                     long startTimeMs = 0;
@@ -262,11 +263,11 @@
         averages[1] = Stat.getAverage(applyBatchTimes);
 
         // Query
-        applyBatchTimes = MeasureTime.measure(TRANSACTION_RUNS, new MeasureRun() {
+        applyBatchTimes = MeasureTime.measure(QUERY_RUNS, new MeasureRun() {
             @Override
             public void run(int i) {
                 int j = 0;
-                try (final Cursor cursor = mContentResolver.query(Programs.CONTENT_URI, null, null,
+                try (Cursor cursor = mContentResolver.query(Programs.CONTENT_URI, null, null,
                         null, null)) {
                     while (cursor.moveToNext()) {
                         ++j;
@@ -279,12 +280,12 @@
         averages[2] = Stat.getAverage(applyBatchTimes);
 
         // Query programs with selection
-        applyBatchTimes = MeasureTime.measure(NUM_CHANNELS, new MeasureRun() {
+        applyBatchTimes = MeasureTime.measure(QUERY_RUNS, new MeasureRun() {
             @Override
             public void run(int i) {
                 Uri channelUri = channelUris.get(i);
                 int j = 0;
-                try (final Cursor cursor = mContentResolver.query(
+                try (Cursor cursor = mContentResolver.query(
                         TvContract.buildProgramsUriForChannel(
                                 channelUri, 0,
                                 PROGRAM_DURATION_MS * TRANSACTION_SIZE / 2),
diff --git a/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/CodecInfo.java b/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/CodecInfo.java
index 9ea8482..88b005a 100644
--- a/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/CodecInfo.java
+++ b/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/CodecInfo.java
@@ -24,6 +24,7 @@
 import android.media.MediaCodecList;
 import android.media.MediaFormat;
 import android.util.Log;
+import android.util.Range;
 
 import java.io.IOException;
 
@@ -63,6 +64,7 @@
         CodecCapabilities cap = codec.getCodecInfo().getCapabilitiesForType(mimeType);
         if (cap.colorFormats.length == 0) {
             Log.w(TAG, "no supported color format");
+            codec.release();
             return null;
         }
 
@@ -82,6 +84,7 @@
             info.mFps = vidCap.getSupportedFrameRatesFor(w, h).getUpper().intValue();
         } catch (IllegalArgumentException e) {
             Log.w(TAG, "unsupported size");
+            codec.release();
             return null;
         }
         info.mBitRate = vidCap.getBitrateRange().getUpper();
@@ -90,6 +93,22 @@
         return info;
     }
 
+    public static Range<Double> getAchievableFrameRatesFor(
+            String codecName, String mimeType, int width, int height) {
+        MediaCodec codec;
+        try {
+            codec = MediaCodec.createByCodecName(codecName);
+        } catch (IOException e) {
+            return null;
+        }
+
+        VideoCapabilities cap =
+            codec.getCodecInfo().getCapabilitiesForType(mimeType).getVideoCapabilities();
+        Range<Double> results = cap.getAchievableFrameRatesFor(width, height);
+        codec.release();
+        return results;
+    }
+
     // for debugging
     private static void printIntArray(String msg, int[] data) {
         StringBuilder builder = new StringBuilder();
diff --git a/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/VideoEncoderDecoderTest.java b/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/VideoEncoderDecoderTest.java
index 6bd631b..28b4feb 100644
--- a/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/VideoEncoderDecoderTest.java
+++ b/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/VideoEncoderDecoderTest.java
@@ -28,6 +28,7 @@
 import android.media.MediaCodecList;
 import android.media.MediaFormat;
 import android.util.Log;
+import android.util.Range;
 import android.util.Size;
 
 import android.cts.util.CtsAndroidTestCase;
@@ -38,6 +39,7 @@
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.lang.System;
+import java.util.ArrayList;
 import java.util.Random;
 import java.util.Vector;
 
@@ -89,12 +91,20 @@
     private double mRmsErrorMargain = PIXEL_RMS_ERROR_MARGAIN;
     private Random mRandom;
 
+    private class TestConfig {
+        public boolean mTestPixels = true;
+        public boolean mTestResult = true;
+    }
+
+    private TestConfig mTestConfig;
+
     @Override
     protected void setUp() throws Exception {
         mEncodedOutputBuffer = new Vector<ByteBuffer>(TOTAL_FRAMES * 2);
         // Use time as a seed, hoping to prevent checking pixels in the same pattern
         long now = System.currentTimeMillis();
         mRandom = new Random(now);
+        mTestConfig = new TestConfig();
         super.setUp();
     }
 
@@ -107,22 +117,22 @@
         mYDirectBuffer = null;
         mUVDirectBuffer = null;
         mRandom = null;
+        mTestConfig = null;
         super.tearDown();
     }
 
-    private String getEncoderName(String mime, boolean isGoog) {
-        return getCodecName(mime, isGoog, true /* isEncoder */);
+    private String getEncoderName(String mime) {
+        return getCodecName(mime, true /* isEncoder */);
     }
 
-    private String getDecoderName(String mime, boolean isGoog) {
-        return getCodecName(mime, isGoog, false /* isEncoder */);
+    private String getDecoderName(String mime) {
+        return getCodecName(mime, false /* isEncoder */);
     }
 
-    private String getCodecName(String mime, boolean isGoog, boolean isEncoder) {
+    private String getCodecName(String mime, boolean isEncoder) {
         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
         for (MediaCodecInfo info : mcl.getCodecInfos()) {
-            if (info.isEncoder() != isEncoder
-                    || info.getName().toLowerCase().startsWith("omx.google.") != isGoog) {
+            if (info.isEncoder() != isEncoder) {
                 continue;
             }
             CodecCapabilities caps = null;
@@ -136,15 +146,59 @@
         return null;
     }
 
+    private String[] getEncoderName(String mime, boolean isGoog) {
+        return getCodecName(mime, isGoog, true /* isEncoder */);
+    }
+
+    private String[] getDecoderName(String mime, boolean isGoog) {
+        return getCodecName(mime, isGoog, false /* isEncoder */);
+    }
+
+    private String[] getCodecName(String mime, boolean isGoog, boolean isEncoder) {
+        MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
+        ArrayList<String> result = new ArrayList<String>();
+        for (MediaCodecInfo info : mcl.getCodecInfos()) {
+            if (info.isEncoder() != isEncoder
+                    || info.getName().toLowerCase().startsWith("omx.google.") != isGoog) {
+                continue;
+            }
+            CodecCapabilities caps = null;
+            try {
+                caps = info.getCapabilitiesForType(mime);
+            } catch (IllegalArgumentException e) {  // mime is not supported
+                continue;
+            }
+            result.add(info.getName());
+        }
+        return result.toArray(new String[result.size()]);
+    }
+
+    public void testAvc0176x0144() throws Exception {
+        doTestDefault(VIDEO_AVC, 176, 144);
+    }
+
+    public void testAvc0352x0288() throws Exception {
+        doTestDefault(VIDEO_AVC, 352, 288);
+    }
+
+    public void testAvc0720x0480() throws Exception {
+        doTestDefault(VIDEO_AVC, 720, 480);
+    }
+
+    public void testAvc1280x0720() throws Exception {
+        doTestDefault(VIDEO_AVC, 1280, 720);
+    }
+
+    /**
+     * resolution intentionally set to 1072 not 1080
+     * as 1080 is not multiple of 16, and it requires additional setting like stride
+     * which is not specified in API documentation.
+     */
+    public void testAvc1920x1072() throws Exception {
+        doTestDefault(VIDEO_AVC, 1920, 1072);
+    }
+
     // Avc tests
-    public void testAvc0176x0144Other() throws Exception {
-        doTestOther(VIDEO_AVC, 176, 144);
-    }
-
-    public void testAvc0176x0144Goog() throws Exception {
-        doTestGoog(VIDEO_AVC, 176, 144);
-    }
-
     public void testAvc0320x0240Other() throws Exception {
         doTestOther(VIDEO_AVC, 320, 240);
     }
@@ -153,14 +207,6 @@
         doTestGoog(VIDEO_AVC, 320, 240);
     }
 
-    public void testAvc0352x0288Other() throws Exception {
-        doTestOther(VIDEO_AVC, 352, 288);
-    }
-
-    public void testAvc0352x0288Goog() throws Exception {
-        doTestGoog(VIDEO_AVC, 352, 288);
-    }
-
     public void testAvc0720x0480Other() throws Exception {
         doTestOther(VIDEO_AVC, 720, 480);
     }
@@ -177,17 +223,12 @@
         doTestGoog(VIDEO_AVC, 1280, 720);
     }
 
-    /**
-     * resolution intentionally set to 1072 not 1080
-     * as 1080 is not multiple of 16, and it requires additional setting like stride
-     * which is not specified in API documentation.
-     */
-    public void testAvc1920x1072Other() throws Exception {
-        doTestOther(VIDEO_AVC, 1920, 1072);
+    public void testAvc1920x1080Other() throws Exception {
+        doTestOther(VIDEO_AVC, 1920, 1080);
     }
 
-    public void testAvc1920x1072Goog() throws Exception {
-        doTestGoog(VIDEO_AVC, 1920, 1072);
+    public void testAvc1920x1080Goog() throws Exception {
+        doTestGoog(VIDEO_AVC, 1920, 1080);
     }
 
     // Vp8 tests
@@ -301,13 +342,34 @@
     }
 
     private void doTestGoog(String mimeType, int w, int h) throws Exception {
+        mTestConfig.mTestPixels = false;
+        mTestConfig.mTestResult = false;
         doTest(true /* isGoog */, mimeType, w, h, NUMBER_OF_REPEAT);
     }
 
     private void doTestOther(String mimeType, int w, int h) throws Exception {
+        mTestConfig.mTestPixels = false;
         doTest(false /* isGoog */, mimeType, w, h, NUMBER_OF_REPEAT);
     }
 
+    private void doTestDefault(String mimeType, int w, int h) throws Exception {
+        mTestConfig.mTestResult = false;
+
+        String encoderName = getEncoderName(mimeType);
+        if (encoderName == null) {
+            Log.i(TAG, "Encoder for " + mimeType + " not found");
+            return;
+        }
+
+        String decoderName = getDecoderName(mimeType);
+        if (decoderName == null) {
+            Log.i(TAG, "Encoder for " + mimeType + " not found");
+            return;
+        }
+
+        doTestByName(encoderName, decoderName, mimeType, w, h, NUMBER_OF_REPEAT);
+    }
+
     /**
      * Run encoding / decoding test for given mimeType of codec
      * @param isGoog test google or non-google codec.
@@ -318,20 +380,30 @@
      */
     private void doTest(boolean isGoog, String mimeType, int w, int h, int numberRepeat)
             throws Exception {
-        String encoderName = getEncoderName(mimeType, isGoog);
-        if (encoderName == null) {
+        String[] encoderNames = getEncoderName(mimeType, isGoog);
+        if (encoderNames.length == 0) {
             Log.i(TAG, isGoog ? "Google " : "Non-google "
                     + "encoder for " + mimeType + " not found");
             return;
         }
 
-        String decoderName = getDecoderName(mimeType, isGoog);
-        if (decoderName == null) {
+        String[] decoderNames = getDecoderName(mimeType, isGoog);
+        if (decoderNames.length == 0) {
             Log.i(TAG, isGoog ? "Google " : "Non-google "
                     + "decoder for " + mimeType + " not found");
             return;
         }
 
+        for (String encoderName: encoderNames) {
+            for (String decoderName: decoderNames) {
+                doTestByName(encoderName, decoderName, mimeType, w, h, numberRepeat);
+            }
+        }
+    }
+
+    private void doTestByName(
+            String encoderName, String decoderName, String mimeType, int w, int h, int numberRepeat)
+            throws Exception {
         CodecInfo infoEnc = CodecInfo.getSupportedFormatInfo(encoderName, mimeType, w, h);
         if (infoEnc == null) {
             Log.i(TAG, "Encoder " + mimeType + " with " + w + "," + h + " not supported");
@@ -409,6 +481,27 @@
                         + decoderRmsErrorResults[i] + " vs " + mRmsErrorMargain);
             }
         }
+
+        if (mTestConfig.mTestResult) {
+            Range<Double> reportedEncoderResults =
+                    CodecInfo.getAchievableFrameRatesFor(encoderName, mimeType, w, h);
+            Range<Double> reportedDecoderResults =
+                    CodecInfo.getAchievableFrameRatesFor(decoderName, mimeType, w, h);
+            if (reportedEncoderResults == null) {
+                fail("Failed to getAchievableFrameRatesFor "
+                        + encoderName + " " + mimeType + " " + w + "x" + h);
+            }
+            if (reportedDecoderResults == null) {
+                fail("Failed to getAchievableFrameRatesFor "
+                        + decoderName + " " + mimeType + " " + w + "x" + h);
+            }
+            if (!reportedEncoderResults.contains(Stat.getAverage(encoderFpsResults))) {
+                fail("Expecting achievable frame rate in the rang of " + reportedEncoderResults);
+            }
+            if (!reportedDecoderResults.contains(Stat.getAverage(decoderFpsResults))) {
+                fail("Expecting achievable frame rate in the rang of " + reportedDecoderResults);
+            }
+        }
     }
 
     /**
@@ -770,29 +863,48 @@
 
                 // only do YUV compare on EOS frame if the buffer size is none-zero
                 if (info.size > 0) {
-                    Point origin = getOrigin(outFrameCount);
-                    int i;
+                    if (mTestConfig.mTestPixels) {
+                        Point origin = getOrigin(outFrameCount);
+                        int i;
 
-                    // if decoder supports planar or semiplanar, check output with
-                    // ByteBuffer & Image each on half of the points
-                    int pixelCheckPerFrame = PIXEL_CHECK_PER_FRAME;
-                    if (!isDstFlexYUV()) {
-                        pixelCheckPerFrame /= 2;
-                        ByteBuffer buf = codec.getOutputBuffer(outputBufIndex);
-                        if (VERBOSE && (outFrameCount == 0)) {
-                            printByteBuffer("Y ", buf, 0, 20);
-                            printByteBuffer("UV ", buf, mVideoWidth * mVideoHeight, 20);
-                            printByteBuffer("UV ", buf,
-                                    mVideoWidth * mVideoHeight + mVideoWidth * 60, 20);
+                        // if decoder supports planar or semiplanar, check output with
+                        // ByteBuffer & Image each on half of the points
+                        int pixelCheckPerFrame = PIXEL_CHECK_PER_FRAME;
+                        if (!isDstFlexYUV()) {
+                            pixelCheckPerFrame /= 2;
+                            ByteBuffer buf = codec.getOutputBuffer(outputBufIndex);
+                            if (VERBOSE && (outFrameCount == 0)) {
+                                printByteBuffer("Y ", buf, 0, 20);
+                                printByteBuffer("UV ", buf, mVideoWidth * mVideoHeight, 20);
+                                printByteBuffer("UV ", buf,
+                                        mVideoWidth * mVideoHeight + mVideoWidth * 60, 20);
+                            }
+                            for (i = 0; i < pixelCheckPerFrame; i++) {
+                                int w = mRandom.nextInt(mVideoWidth);
+                                int h = mRandom.nextInt(mVideoHeight);
+                                getPixelValuesFromYUVBuffers(origin.x, origin.y, w, h, expected);
+                                getPixelValuesFromOutputBuffer(buf, w, h, decoded);
+                                if (VERBOSE) {
+                                    Log.i(TAG, outFrameCount + "-" + i + "- th round: ByteBuffer:"
+                                            + " expected "
+                                            + expected.mY + "," + expected.mU + "," + expected.mV
+                                            + " decoded "
+                                            + decoded.mY + "," + decoded.mU + "," + decoded.mV);
+                                }
+                                totalErrorSquared += expected.calcErrorSquared(decoded);
+                            }
                         }
+
+                        Image image = codec.getOutputImage(outputBufIndex);
+                        assertTrue(image != null);
                         for (i = 0; i < pixelCheckPerFrame; i++) {
                             int w = mRandom.nextInt(mVideoWidth);
                             int h = mRandom.nextInt(mVideoHeight);
                             getPixelValuesFromYUVBuffers(origin.x, origin.y, w, h, expected);
-                            getPixelValuesFromOutputBuffer(buf, w, h, decoded);
+                            getPixelValuesFromImage(image, w, h, decoded);
                             if (VERBOSE) {
-                                Log.i(TAG, outFrameCount + "-" + i + "- th round: ByteBuffer:"
-                                        + " expected "
+                                Log.i(TAG, outFrameCount + "-" + i + "- th round: FlexYUV:"
+                                        + " expcted "
                                         + expected.mY + "," + expected.mU + "," + expected.mV
                                         + " decoded "
                                         + decoded.mY + "," + decoded.mU + "," + decoded.mV);
@@ -800,23 +912,6 @@
                             totalErrorSquared += expected.calcErrorSquared(decoded);
                         }
                     }
-
-                    Image image = codec.getOutputImage(outputBufIndex);
-                    assertTrue(image != null);
-                    for (i = 0; i < pixelCheckPerFrame; i++) {
-                        int w = mRandom.nextInt(mVideoWidth);
-                        int h = mRandom.nextInt(mVideoHeight);
-                        getPixelValuesFromYUVBuffers(origin.x, origin.y, w, h, expected);
-                        getPixelValuesFromImage(image, w, h, decoded);
-                        if (VERBOSE) {
-                            Log.i(TAG, outFrameCount + "-" + i + "- th round: FlexYUV:"
-                                    + " expcted "
-                                    + expected.mY + "," + expected.mU + "," + expected.mV
-                                    + " decoded "
-                                    + decoded.mY + "," + decoded.mU + "," + decoded.mV);
-                        }
-                        totalErrorSquared += expected.calcErrorSquared(decoded);
-                    }
                     outFrameCount++;
                 }
                 codec.releaseOutputBuffer(outputBufIndex, false /* render */);
diff --git a/tests/JobScheduler/src/android/jobscheduler/cts/ConnectivityConstraintTest.java b/tests/JobScheduler/src/android/jobscheduler/cts/ConnectivityConstraintTest.java
index a83f7a9..547b205 100644
--- a/tests/JobScheduler/src/android/jobscheduler/cts/ConnectivityConstraintTest.java
+++ b/tests/JobScheduler/src/android/jobscheduler/cts/ConnectivityConstraintTest.java
@@ -51,6 +51,8 @@
     private boolean mHasWifi;
     /** Whether the device running these tests supports telephony. */
     private boolean mHasTelephony;
+    /** Track whether WiFi was enabled in case we turn it off. */
+    private boolean mInitialWiFiState;
 
     private JobInfo.Builder mBuilder;
 
@@ -67,6 +69,14 @@
         mHasTelephony = packageManager.hasSystemFeature(PackageManager.FEATURE_TELEPHONY);
         mBuilder =
                 new JobInfo.Builder(CONNECTIVITY_JOB_ID, kJobServiceComponent);
+
+        mInitialWiFiState = mWifiManager.isWifiEnabled();
+    }
+
+    @Override
+    public void tearDown() throws Exception {
+        // Ensure that we leave WiFi in its previous state.
+        mWifiManager.setWifiEnabled(mInitialWiFiState);
     }
 
     // --------------------------------------------------------------------------------------------
@@ -202,6 +212,14 @@
         }
     }
 
+    /**
+     * Disconnect from WiFi in an attempt to connect to cellular data. Worth noting that this is
+     * best effort - there are no public APIs to force connecting to cell data. We disable WiFi
+     * and wait for a broadcast that we're connected to cell.
+     * We will not call into this function if the device doesn't support telephony.
+     * @see #mHasTelephony
+     * @see #checkDeviceSupportsMobileData()
+     */
     private void disconnectWifiToConnectToMobile() throws InterruptedException {
         if (mHasWifi && mWifiManager.isWifiEnabled()) {
             ConnectivityActionReceiver connectMobileReceiver =
diff --git a/tests/expectations/knownfailures.txt b/tests/expectations/knownfailures.txt
index 6e571a7..1e29e10 100644
--- a/tests/expectations/knownfailures.txt
+++ b/tests/expectations/knownfailures.txt
@@ -169,179 +169,6 @@
   bug: 17605875
 },
 {
-  description: "Failures on these tests are known on several devices.",
-  names: [
-    "android.hardware.cts.SensorBatchingTests#testAccelerometer_fastest_batching",
-    "android.hardware.cts.SensorBatchingTests#testAccelerometer_50hz_batching",
-    "android.hardware.cts.SensorBatchingTests#testAccelerometer_fastest_flush",
-    "android.hardware.cts.SensorBatchingTests#testAccelerometer_50hz_flush",
-    "android.hardware.cts.SensorBatchingTests#testMagneticField_fastest_batching",
-    "android.hardware.cts.SensorBatchingTests#testMagneticField_50hz_batching",
-    "android.hardware.cts.SensorBatchingTests#testMagneticField_fastest_flush",
-    "android.hardware.cts.SensorBatchingTests#testMagneticField_50hz_flush",
-    "android.hardware.cts.SensorBatchingTests#testOrientation_fastest_batching",
-    "android.hardware.cts.SensorBatchingTests#testOrientation_50hz_batching",
-    "android.hardware.cts.SensorBatchingTests#testOrientation_fastest_flush",
-    "android.hardware.cts.SensorBatchingTests#testOrientation_50hz_flush",
-    "android.hardware.cts.SensorBatchingTests#testGyroscope_fastest_batching",
-    "android.hardware.cts.SensorBatchingTests#testGyroscope_50hz_batching",
-    "android.hardware.cts.SensorBatchingTests#testGyroscope_fastest_flush",
-    "android.hardware.cts.SensorBatchingTests#testGyroscope_50hz_flush",
-    "android.hardware.cts.SensorBatchingTests#testPressure_fastest_batching",
-    "android.hardware.cts.SensorBatchingTests#testPressure_50hz_batching",
-    "android.hardware.cts.SensorBatchingTests#testPressure_fastest_flush",
-    "android.hardware.cts.SensorBatchingTests#testPressure_50hz_flush",
-    "android.hardware.cts.SensorBatchingTests#testGravity_fastest_batching",
-    "android.hardware.cts.SensorBatchingTests#testGravity_50hz_batching",
-    "android.hardware.cts.SensorBatchingTests#testGravity_fastest_flush",
-    "android.hardware.cts.SensorBatchingTests#testGravity_50hz_flush",
-    "android.hardware.cts.SensorBatchingTests#testRotationVector_fastest_batching",
-    "android.hardware.cts.SensorBatchingTests#testRotationVector_50hz_batching",
-    "android.hardware.cts.SensorBatchingTests#testRotationVector_fastest_flush",
-    "android.hardware.cts.SensorBatchingTests#testRotationVector_50hz_flush",
-    "android.hardware.cts.SensorBatchingTests#testMagneticFieldUncalibrated_fastest_batching",
-    "android.hardware.cts.SensorBatchingTests#testMagneticFieldUncalibrated_50hz_batching",
-    "android.hardware.cts.SensorBatchingTests#testMagneticFieldUncalibrated_fastest_flush",
-    "android.hardware.cts.SensorBatchingTests#testMagneticFieldUncalibrated_50hz_flush",
-    "android.hardware.cts.SensorBatchingTests#testGameRotationVector_fastest_batching",
-    "android.hardware.cts.SensorBatchingTests#testGameRotationVector_50hz_batching",
-    "android.hardware.cts.SensorBatchingTests#testGameRotationVector_fastest_flush",
-    "android.hardware.cts.SensorBatchingTests#testGameRotationVector_50hz_flush",
-    "android.hardware.cts.SensorBatchingTests#testGyroscopeUncalibrated_fastest_batching",
-    "android.hardware.cts.SensorBatchingTests#testGyroscopeUncalibrated_50hz_batching",
-    "android.hardware.cts.SensorBatchingTests#testGyroscopeUncalibrated_fastest_flush",
-    "android.hardware.cts.SensorBatchingTests#testGyroscopeUncalibrated_50hz_flush",
-    "android.hardware.cts.SensorBatchingTests#testLinearAcceleration_fastest_batching",
-    "android.hardware.cts.SensorBatchingTests#testLinearAcceleration_50hz_batching",
-    "android.hardware.cts.SensorBatchingTests#testLinearAcceleration_fastest_flush",
-    "android.hardware.cts.SensorBatchingTests#testLinearAcceleration_50hz_flush",
-    "android.hardware.cts.SensorBatchingTests#testGeomagneticRotationVector_fastest_batching",
-    "android.hardware.cts.SensorBatchingTests#testGeomagneticRotationVector_50hz_batching",
-    "android.hardware.cts.SensorBatchingTests#testGeomagneticRotationVector_fastest_flush",
-    "android.hardware.cts.SensorBatchingTests#testGeomagneticRotationVector_50hz_flush",
-    "android.hardware.cts.SensorIntegrationTests#testSensorsWithSeveralClients",
-    "android.hardware.cts.SensorIntegrationTests#testSensorsMovingRates",
-    "android.hardware.cts.SensorIntegrationTests#testAccelerometerAccelerometerStopping",
-    "android.hardware.cts.SensorIntegrationTests#testAccelerometerGyroscopeStopping",
-    "android.hardware.cts.SensorIntegrationTests#testAccelerometerMagneticFieldStopping",
-    "android.hardware.cts.SensorIntegrationTests#testGyroscopeAccelerometerStopping",
-    "android.hardware.cts.SensorIntegrationTests#testGyroscopeGyroscopeStopping",
-    "android.hardware.cts.SensorIntegrationTests#testGyroscopeMagneticFieldStopping",
-    "android.hardware.cts.SensorIntegrationTests#testMagneticFieldAccelerometerStopping",
-    "android.hardware.cts.SensorIntegrationTests#testMagneticFieldGyroscopeStopping",
-    "android.hardware.cts.SensorIntegrationTests#testMagneticFieldMagneticFieldStopping",
-    "android.hardware.cts.SingleSensorTests#testSensorProperties",
-    "android.hardware.cts.SingleSensorTests#testAccelerometer_fastest",
-    "android.hardware.cts.SingleSensorTests#testAccelerometer_100hz",
-    "android.hardware.cts.SingleSensorTests#testAccelerometer_200hz",
-    "android.hardware.cts.SingleSensorTests#testAccelerometer_50hz",
-    "android.hardware.cts.SingleSensorTests#testAccelerometer_25hz",
-    "android.hardware.cts.SingleSensorTests#testAccelerometer_15hz",
-    "android.hardware.cts.SingleSensorTests#testAccelerometer_10hz",
-    "android.hardware.cts.SingleSensorTests#testAccelerometer_5hz",
-    "android.hardware.cts.SingleSensorTests#testAccelerometer_1hz",
-    "android.hardware.cts.SingleSensorTests#testMagneticField_fastest",
-    "android.hardware.cts.SingleSensorTests#testMagneticField_200hz",
-    "android.hardware.cts.SingleSensorTests#testMagneticField_100hz",
-    "android.hardware.cts.SingleSensorTests#testMagneticField_50hz",
-    "android.hardware.cts.SingleSensorTests#testMagneticField_25hz",
-    "android.hardware.cts.SingleSensorTests#testMagneticField_15hz",
-    "android.hardware.cts.SingleSensorTests#testMagneticField_10hz",
-    "android.hardware.cts.SingleSensorTests#testMagneticField_5hz",
-    "android.hardware.cts.SingleSensorTests#testMagneticField_1hz",
-    "android.hardware.cts.SingleSensorTests#testOrientation_fastest",
-    "android.hardware.cts.SingleSensorTests#testOrientation_200hz",
-    "android.hardware.cts.SingleSensorTests#testOrientation_100hz",
-    "android.hardware.cts.SingleSensorTests#testOrientation_50hz",
-    "android.hardware.cts.SingleSensorTests#testOrientation_25hz",
-    "android.hardware.cts.SingleSensorTests#testOrientation_15hz",
-    "android.hardware.cts.SingleSensorTests#testOrientation_10hz",
-    "android.hardware.cts.SingleSensorTests#testOrientation_5hz",
-    "android.hardware.cts.SingleSensorTests#testOrientation_1hz",
-    "android.hardware.cts.SingleSensorTests#testGyroscope_fastest",
-    "android.hardware.cts.SingleSensorTests#testGyroscope_200hz",
-    "android.hardware.cts.SingleSensorTests#testGyroscope_100hz",
-    "android.hardware.cts.SingleSensorTests#testGyroscope_50hz",
-    "android.hardware.cts.SingleSensorTests#testGyroscope_25hz",
-    "android.hardware.cts.SingleSensorTests#testGyroscope_15hz",
-    "android.hardware.cts.SingleSensorTests#testGyroscope_10hz",
-    "android.hardware.cts.SingleSensorTests#testGyroscope_5hz",
-    "android.hardware.cts.SingleSensorTests#testGyroscope_1hz",
-    "android.hardware.cts.SingleSensorTests#testPressure_fastest",
-    "android.hardware.cts.SingleSensorTests#testPressure_200hz",
-    "android.hardware.cts.SingleSensorTests#testPressure_100hz",
-    "android.hardware.cts.SingleSensorTests#testPressure_50hz",
-    "android.hardware.cts.SingleSensorTests#testPressure_25hz",
-    "android.hardware.cts.SingleSensorTests#testPressure_15hz",
-    "android.hardware.cts.SingleSensorTests#testPressure_10hz",
-    "android.hardware.cts.SingleSensorTests#testPressure_5hz",
-    "android.hardware.cts.SingleSensorTests#testPressure_1hz",
-    "android.hardware.cts.SingleSensorTests#testGravity_fastest",
-    "android.hardware.cts.SingleSensorTests#testGravity_200hz",
-    "android.hardware.cts.SingleSensorTests#testGravity_100hz",
-    "android.hardware.cts.SingleSensorTests#testGravity_50hz",
-    "android.hardware.cts.SingleSensorTests#testGravity_25hz",
-    "android.hardware.cts.SingleSensorTests#testGravity_15hz",
-    "android.hardware.cts.SingleSensorTests#testGravity_10hz",
-    "android.hardware.cts.SingleSensorTests#testGravity_5hz",
-    "android.hardware.cts.SingleSensorTests#testGravity_1hz",
-    "android.hardware.cts.SingleSensorTests#testRotationVector_fastest",
-    "android.hardware.cts.SingleSensorTests#testRotationVector_200hz",
-    "android.hardware.cts.SingleSensorTests#testRotationVector_100hz",
-    "android.hardware.cts.SingleSensorTests#testRotationVector_50hz",
-    "android.hardware.cts.SingleSensorTests#testRotationVector_25hz",
-    "android.hardware.cts.SingleSensorTests#testRotationVector_15hz",
-    "android.hardware.cts.SingleSensorTests#testRotationVector_10hz",
-    "android.hardware.cts.SingleSensorTests#testRotationVector_5hz",
-    "android.hardware.cts.SingleSensorTests#testRotationVector_1hz",
-    "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_fastest",
-    "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_200hz",
-    "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_100hz",
-    "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_50hz",
-    "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_25hz",
-    "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_15hz",
-    "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_10hz",
-    "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_5hz",
-    "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_1hz",
-    "android.hardware.cts.SingleSensorTests#testGameRotationVector_fastest",
-    "android.hardware.cts.SingleSensorTests#testGameRotationVector_200hz",
-    "android.hardware.cts.SingleSensorTests#testGameRotationVector_100hz",
-    "android.hardware.cts.SingleSensorTests#testGameRotationVector_50hz",
-    "android.hardware.cts.SingleSensorTests#testGameRotationVector_25hz",
-    "android.hardware.cts.SingleSensorTests#testGameRotationVector_15hz",
-    "android.hardware.cts.SingleSensorTests#testGameRotationVector_10hz",
-    "android.hardware.cts.SingleSensorTests#testGameRotationVector_5hz",
-    "android.hardware.cts.SingleSensorTests#testGameRotationVector_1hz",
-    "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_fastest",
-    "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_200hz",
-    "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_100hz",
-    "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_50hz",
-    "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_25hz",
-    "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_15hz",
-    "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_10hz",
-    "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_5hz",
-    "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_1hz",
-    "android.hardware.cts.SingleSensorTests#testGeomagneticRotationVector_fastest",
-    "android.hardware.cts.SingleSensorTests#testLinearAcceleration_200hz",
-    "android.hardware.cts.SingleSensorTests#testLinearAcceleration_100hz",
-    "android.hardware.cts.SingleSensorTests#testLinearAcceleration_50hz",
-    "android.hardware.cts.SingleSensorTests#testLinearAcceleration_25hz",
-    "android.hardware.cts.SingleSensorTests#testLinearAcceleration_15hz",
-    "android.hardware.cts.SingleSensorTests#testLinearAcceleration_10hz",
-    "android.hardware.cts.SingleSensorTests#testLinearAcceleration_5hz",
-    "android.hardware.cts.SingleSensorTests#testLinearAcceleration_1hz",
-    "android.hardware.cts.SensorTest#testSensorTimeStamps"
-  ],
-  bug: 17675466
-},
-{
-  description: "tests will soon become mandatory",
-  names: [
-    "android.hardware.cts.SensorTest#testBatchAndFlush"
-  ],
-  bug: 18958411
-},
-{
   description: "This test failed on hw decoder that doesn't output frame with the configured format.",
   names: [
     "android.media.cts.ImageReaderDecoderTest#testHwAVCDecode360pForFlexibleYuv"
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseCallbackTest.java b/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseCallbackTest.java
new file mode 100644
index 0000000..1c68022
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseCallbackTest.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.le.AdvertiseCallback;
+import android.bluetooth.le.AdvertiseSettings;
+import android.test.AndroidTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Test of {@link AdvertiseCallback}.
+ */
+public class AdvertiseCallbackTest extends AndroidTestCase {
+
+    private final static int ADVERTISE_TYPE_SUCCESS = 0;
+    private final static int ADVERTISE_TYPE_FAIL = 1;
+
+    private final MockAdvertiser mMockAdvertiser = new MockAdvertiser();
+    private final BleAdvertiseCallback mAdvertiseCallback = new BleAdvertiseCallback();
+
+    @SmallTest
+    public void testAdvertiseSuccess() {
+        mAdvertiseCallback.mAdvertiseType = ADVERTISE_TYPE_SUCCESS;
+        mMockAdvertiser.startAdvertise(mAdvertiseCallback);
+    }
+
+    @SmallTest
+    public void testAdvertiseFailure() {
+        mAdvertiseCallback.mAdvertiseType = ADVERTISE_TYPE_SUCCESS;
+        mMockAdvertiser.startAdvertise(mAdvertiseCallback);
+
+        // Second advertise with the same callback should fail.
+        mAdvertiseCallback.mAdvertiseType = ADVERTISE_TYPE_FAIL;
+        mMockAdvertiser.startAdvertise(mAdvertiseCallback);
+    }
+
+    // A mock advertiser which emulate BluetoothLeAdvertiser behavior.
+    private static class MockAdvertiser {
+        private Set<AdvertiseCallback> mCallbacks = new HashSet<>();
+
+        void startAdvertise(AdvertiseCallback callback) {
+            synchronized (mCallbacks) {
+                if (mCallbacks.contains(callback)) {
+                    callback.onStartFailure(AdvertiseCallback.ADVERTISE_FAILED_ALREADY_STARTED);
+                } else {
+                    callback.onStartSuccess(null);
+                    mCallbacks.add(callback);
+                }
+            }
+        }
+    }
+
+    private static class BleAdvertiseCallback extends AdvertiseCallback {
+        int mAdvertiseType = ADVERTISE_TYPE_SUCCESS;
+
+        @Override
+        public void onStartSuccess(AdvertiseSettings settings) {
+            if (mAdvertiseType == ADVERTISE_TYPE_FAIL) {
+                fail("advertise should fail");
+            }
+        }
+
+        @Override
+        public void onStartFailure(int error) {
+            if (mAdvertiseType == ADVERTISE_TYPE_SUCCESS) {
+                assertEquals(AdvertiseCallback.ADVERTISE_FAILED_ALREADY_STARTED, error);
+            }
+        }
+    }
+}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseDataTest.java b/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseDataTest.java
new file mode 100644
index 0000000..3f2bf52
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseDataTest.java
@@ -0,0 +1,175 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.le.AdvertiseData;
+import android.os.Parcel;
+import android.os.ParcelUuid;
+import android.test.AndroidTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+
+/**
+ * Unit test cases for {@link AdvertiseData}.
+ * <p>
+ * To run the test, use adb shell am instrument -e class 'android.bluetooth.le.AdvertiseDataTest' -w
+ * 'com.android.bluetooth.tests/android.bluetooth.BluetoothTestRunner'
+ */
+public class AdvertiseDataTest extends AndroidTestCase {
+
+    private AdvertiseData.Builder mAdvertiseDataBuilder;
+
+    @Override
+    protected void setUp() {
+        mAdvertiseDataBuilder = new AdvertiseData.Builder();
+    }
+
+    @SmallTest
+    public void testEmptyData() {
+        Parcel parcel = Parcel.obtain();
+        AdvertiseData data = mAdvertiseDataBuilder.build();
+        data.writeToParcel(parcel, 0);
+        parcel.setDataPosition(0);
+        AdvertiseData dataFromParcel =
+                AdvertiseData.CREATOR.createFromParcel(parcel);
+        assertEquals(data, dataFromParcel);
+        assertFalse(dataFromParcel.getIncludeDeviceName());
+        assertFalse(dataFromParcel.getIncludeTxPowerLevel());
+        assertEquals(0, dataFromParcel.getManufacturerSpecificData().size());
+        assertTrue(dataFromParcel.getServiceData().isEmpty());
+        assertTrue(dataFromParcel.getServiceUuids().isEmpty());
+    }
+
+    @SmallTest
+    public void testEmptyServiceUuid() {
+        Parcel parcel = Parcel.obtain();
+        AdvertiseData data = mAdvertiseDataBuilder.setIncludeDeviceName(true).build();
+        data.writeToParcel(parcel, 0);
+        parcel.setDataPosition(0);
+        AdvertiseData dataFromParcel =
+                AdvertiseData.CREATOR.createFromParcel(parcel);
+        assertEquals(data, dataFromParcel);
+        assertTrue(dataFromParcel.getIncludeDeviceName());
+        assertTrue(dataFromParcel.getServiceUuids().isEmpty());
+    }
+
+    @SmallTest
+    public void testEmptyManufacturerData() {
+        Parcel parcel = Parcel.obtain();
+        int manufacturerId = 50;
+        byte[] manufacturerData = new byte[0];
+        AdvertiseData data =
+                mAdvertiseDataBuilder.setIncludeDeviceName(true)
+                        .addManufacturerData(manufacturerId, manufacturerData).build();
+        data.writeToParcel(parcel, 0);
+        parcel.setDataPosition(0);
+        AdvertiseData dataFromParcel =
+                AdvertiseData.CREATOR.createFromParcel(parcel);
+        assertEquals(data, dataFromParcel);
+        TestUtils.assertArrayEquals(new byte[0], dataFromParcel.getManufacturerSpecificData().get(manufacturerId));
+    }
+
+    @SmallTest
+    public void testEmptyServiceData() {
+        Parcel parcel = Parcel.obtain();
+        ParcelUuid uuid = ParcelUuid.fromString("0000110A-0000-1000-8000-00805F9B34FB");
+        byte[] serviceData = new byte[0];
+        AdvertiseData data =
+                mAdvertiseDataBuilder.setIncludeDeviceName(true)
+                        .addServiceData(uuid, serviceData).build();
+        data.writeToParcel(parcel, 0);
+        parcel.setDataPosition(0);
+        AdvertiseData dataFromParcel =
+                AdvertiseData.CREATOR.createFromParcel(parcel);
+        assertEquals(data, dataFromParcel);
+        TestUtils.assertArrayEquals(new byte[0], dataFromParcel.getServiceData().get(uuid));
+    }
+
+    @SmallTest
+    public void testServiceUuid() {
+        Parcel parcel = Parcel.obtain();
+        ParcelUuid uuid = ParcelUuid.fromString("0000110A-0000-1000-8000-00805F9B34FB");
+        ParcelUuid uuid2 = ParcelUuid.fromString("0000110B-0000-1000-8000-00805F9B34FB");
+
+        AdvertiseData data =
+                mAdvertiseDataBuilder.setIncludeDeviceName(true)
+                        .addServiceUuid(uuid).addServiceUuid(uuid2).build();
+        data.writeToParcel(parcel, 0);
+        parcel.setDataPosition(0);
+        AdvertiseData dataFromParcel =
+                AdvertiseData.CREATOR.createFromParcel(parcel);
+        assertEquals(data, dataFromParcel);
+        assertTrue(dataFromParcel.getServiceUuids().contains(uuid));
+        assertTrue(dataFromParcel.getServiceUuids().contains(uuid2));
+    }
+
+    @SmallTest
+    public void testManufacturerData() {
+        Parcel parcel = Parcel.obtain();
+        ParcelUuid uuid = ParcelUuid.fromString("0000110A-0000-1000-8000-00805F9B34FB");
+        ParcelUuid uuid2 = ParcelUuid.fromString("0000110B-0000-1000-8000-00805F9B34FB");
+
+        int manufacturerId = 50;
+        byte[] manufacturerData = new byte[] {
+                (byte) 0xF0, 0x00, 0x02, 0x15 };
+        AdvertiseData data =
+                mAdvertiseDataBuilder.setIncludeDeviceName(true)
+                        .addServiceUuid(uuid).addServiceUuid(uuid2)
+                        .addManufacturerData(manufacturerId, manufacturerData).build();
+
+        data.writeToParcel(parcel, 0);
+        parcel.setDataPosition(0);
+        AdvertiseData dataFromParcel =
+                AdvertiseData.CREATOR.createFromParcel(parcel);
+        assertEquals(data, dataFromParcel);
+        TestUtils.assertArrayEquals(manufacturerData,
+                dataFromParcel.getManufacturerSpecificData().get(manufacturerId));
+    }
+
+    @SmallTest
+    public void testServiceData() {
+        Parcel parcel = Parcel.obtain();
+        ParcelUuid uuid = ParcelUuid.fromString("0000110A-0000-1000-8000-00805F9B34FB");
+        byte[] serviceData = new byte[] {
+                (byte) 0xF0, 0x00, 0x02, 0x15 };
+        AdvertiseData data =
+                mAdvertiseDataBuilder.setIncludeDeviceName(true)
+                        .addServiceData(uuid, serviceData).build();
+        data.writeToParcel(parcel, 0);
+        parcel.setDataPosition(0);
+        AdvertiseData dataFromParcel =
+                AdvertiseData.CREATOR.createFromParcel(parcel);
+        assertEquals(data, dataFromParcel);
+        TestUtils.assertArrayEquals(serviceData, dataFromParcel.getServiceData().get(uuid));
+    }
+
+    @SmallTest
+    public void testIncludeTxPower() {
+        Parcel parcel = Parcel.obtain();
+        AdvertiseData data = mAdvertiseDataBuilder.setIncludeTxPowerLevel(true).build();
+        data.writeToParcel(parcel, 0);
+        parcel.setDataPosition(0);
+        AdvertiseData dataFromParcel =
+                AdvertiseData.CREATOR.createFromParcel(parcel);
+        assertEquals(dataFromParcel.getIncludeTxPowerLevel(), true);
+    }
+
+    @SmallTest
+    public void testDescribeContents() {
+        AdvertiseData data = new AdvertiseData.Builder().build();
+        assertEquals(0, data.describeContents());
+    }
+}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseSettingsTest.java b/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseSettingsTest.java
new file mode 100644
index 0000000..19b7c29
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseSettingsTest.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.le.AdvertiseSettings;
+import android.os.Parcel;
+import android.test.AndroidTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+
+/**
+ * Test for {@link AdvertiseSettings}.
+ */
+public class AdvertiseSettingsTest extends AndroidTestCase {
+
+    @SmallTest
+    public void testDefaultSettings() {
+        AdvertiseSettings settings = new AdvertiseSettings.Builder().build();
+        assertEquals(AdvertiseSettings.ADVERTISE_MODE_LOW_POWER, settings.getMode());
+        assertEquals(AdvertiseSettings.ADVERTISE_TX_POWER_MEDIUM, settings.getTxPowerLevel());
+        assertEquals(0, settings.getTimeout());
+        assertTrue(settings.isConnectable());
+    }
+
+    @SmallTest
+    public void testDescribeContents() {
+        AdvertiseSettings settings = new AdvertiseSettings.Builder().build();
+        assertEquals(0, settings.describeContents());
+    }
+
+    @SmallTest
+    public void testReadWriteParcel() {
+        final int timeoutMillis = 60 * 1000;
+        Parcel parcel = Parcel.obtain();
+        AdvertiseSettings settings = new AdvertiseSettings.Builder()
+                .setAdvertiseMode(AdvertiseSettings.ADVERTISE_MODE_LOW_LATENCY)
+                .setConnectable(false)
+                .setTimeout(timeoutMillis)
+                .setTxPowerLevel(AdvertiseSettings.ADVERTISE_TX_POWER_MEDIUM)
+                .build();
+        settings.writeToParcel(parcel, 0);
+        parcel.setDataPosition(0);
+        AdvertiseSettings settingsFromParcel = AdvertiseSettings.CREATOR.createFromParcel(parcel);
+        assertEquals(AdvertiseSettings.ADVERTISE_MODE_LOW_LATENCY, settingsFromParcel.getMode());
+        assertEquals(AdvertiseSettings.ADVERTISE_TX_POWER_MEDIUM,
+                settingsFromParcel.getTxPowerLevel());
+        assertEquals(timeoutMillis, settingsFromParcel.getTimeout());
+        assertFalse(settings.isConnectable());
+    }
+
+    @SmallTest
+    public void testIllegalTimeout() {
+        AdvertiseSettings.Builder builder = new AdvertiseSettings.Builder();
+        builder.setTimeout(0).build();
+        builder.setTimeout(180 * 1000).build();
+        // Maximum timeout is 3 minutes.
+        try {
+            builder.setTimeout(180 * 1000 + 1).build();
+            fail("should not allow setting timeout to more than 3 minutes");
+        } catch (IllegalArgumentException e) {
+            // nothing to do.
+        }
+        // Negative time out is not allowed.
+        try {
+            builder.setTimeout(-1).build();
+            fail("should not allow setting timeout to more than 3 minutes");
+        } catch (IllegalArgumentException e) {
+            // nothing to do.
+        }
+
+    }
+}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/ScanCallbackTest.java b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanCallbackTest.java
new file mode 100644
index 0000000..f447f10
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanCallbackTest.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.le.ScanCallback;
+import android.bluetooth.le.ScanResult;
+import android.bluetooth.le.ScanSettings;
+import android.test.AndroidTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * Test cases for {@link ScanCallback}.
+ */
+public class ScanCallbackTest extends AndroidTestCase {
+
+    // Scan types are used to determine which callback method is expected.
+    private final static int SCAN_TYPE_SUCCESS = 0;
+    private final static int SCAN_TYPE_FAIL = 1;
+    private final static int SCAN_TYPE_BATCH = 2;
+
+    private MockScanner mMockScanner = new MockScanner();
+    private BleScanCallback mMockScanCallback = new BleScanCallback();
+
+    @SmallTest
+    public void testScanSuccess() {
+        mMockScanCallback.mScanType = SCAN_TYPE_SUCCESS;
+        mMockScanner.startScan(new ScanSettings.Builder().build(), mMockScanCallback);
+    }
+
+    @SmallTest
+    public void testBatchScans() {
+        ScanSettings settings = new ScanSettings.Builder().setReportDelay(1000).build();
+        mMockScanCallback.mScanType = SCAN_TYPE_BATCH;
+        mMockScanner.startScan(settings, mMockScanCallback);
+    }
+
+    @SmallTest
+    public void testScanFail() {
+        ScanSettings settings = new ScanSettings.Builder().build();
+        // The first scan is success.
+        mMockScanCallback.mScanType = SCAN_TYPE_SUCCESS;
+        mMockScanner.startScan(settings, mMockScanCallback);
+        // A second scan with the same callback should fail.
+        mMockScanCallback.mScanType = SCAN_TYPE_FAIL;
+        mMockScanner.startScan(settings, mMockScanCallback);
+    }
+
+    // A mock scanner for mocking BLE scanner functionalities.
+    private static class MockScanner {
+        private Set<ScanCallback> mCallbacks = new HashSet<>();
+
+        void startScan(ScanSettings settings, ScanCallback callback) {
+            synchronized (mCallbacks) {
+                if (mCallbacks.contains(callback)) {
+                    callback.onScanFailed(ScanCallback.SCAN_FAILED_ALREADY_STARTED);
+                    return;
+                }
+                mCallbacks.add(callback);
+                if (settings.getReportDelayMillis() == 0) {
+                    callback.onScanResult(0, null);
+                } else {
+                    callback.onBatchScanResults(null);
+                }
+            }
+        }
+    }
+
+    private static class BleScanCallback extends ScanCallback {
+        int mScanType = SCAN_TYPE_SUCCESS;
+
+        @Override
+        public void onScanResult(int callbackType, ScanResult result) {
+            if (mScanType != SCAN_TYPE_SUCCESS) {
+                fail("scan should fail");
+            }
+        }
+
+        @Override
+        public void onBatchScanResults(List<ScanResult> results) {
+            if (mScanType != SCAN_TYPE_BATCH) {
+                fail("not a batch scan");
+            }
+        }
+
+        @Override
+        public void onScanFailed(int errorCode) {
+            if (mScanType != SCAN_TYPE_FAIL) {
+                fail("scan should not fail");
+            }
+        }
+
+    }
+}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/ScanFilterTest.java b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanFilterTest.java
new file mode 100644
index 0000000..16e1413
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanFilterTest.java
@@ -0,0 +1,238 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.BluetoothAdapter;
+import android.bluetooth.BluetoothDevice;
+import android.bluetooth.le.ScanFilter;
+import android.bluetooth.le.ScanResult;
+import android.os.Parcel;
+import android.os.ParcelUuid;
+import android.test.AndroidTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+
+/**
+ * Unit test cases for Bluetooth LE scan filters.
+ * <p>
+ * To run this test, use adb shell am instrument -e class 'android.bluetooth.ScanFilterTest' -w
+ * 'com.android.bluetooth.tests/android.bluetooth.BluetoothTestRunner'
+ */
+public class ScanFilterTest extends AndroidTestCase {
+
+    private static final String LOCAL_NAME = "Ped";
+    private static final String DEVICE_MAC = "01:02:03:04:05:AB";
+    private static final String UUID1 = "0000110a-0000-1000-8000-00805f9b34fb";
+    private static final String UUID2 = "0000110b-0000-1000-8000-00805f9b34fb";
+    private static final String UUID3 = "0000110c-0000-1000-8000-00805f9b34fb";
+
+    private ScanResult mScanResult;
+    private ScanFilter.Builder mFilterBuilder;
+
+    @Override
+    protected void setUp() {
+        byte[] scanRecord = new byte[] {
+                0x02, 0x01, 0x1a, // advertising flags
+                0x05, 0x02, 0x0b, 0x11, 0x0a, 0x11, // 16 bit service uuids
+                0x04, 0x09, 0x50, 0x65, 0x64, // setName
+                0x02, 0x0A, (byte) 0xec, // tx power level
+                0x05, 0x16, 0x0b, 0x11, 0x50, 0x64, // service data
+                0x05, (byte) 0xff, (byte) 0xe0, 0x00, 0x02, 0x15, // manufacturer specific data
+                0x03, 0x50, 0x01, 0x02, // an unknown data type won't cause trouble
+        };
+
+        BluetoothAdapter adapter = BluetoothAdapter.getDefaultAdapter();
+        BluetoothDevice device = adapter.getRemoteDevice(DEVICE_MAC);
+        mScanResult = new ScanResult(device, TestUtils.parseScanRecord(scanRecord),
+                -10, 1397545200000000L);
+        mFilterBuilder = new ScanFilter.Builder();
+    }
+
+    @SmallTest
+    public void testsetNameFilter() {
+        ScanFilter filter = mFilterBuilder.setDeviceName(LOCAL_NAME).build();
+        assertEquals(LOCAL_NAME, filter.getDeviceName());
+        assertTrue("setName filter fails", filter.matches(mScanResult));
+
+        filter = mFilterBuilder.setDeviceName("Pem").build();
+        assertFalse("setName filter fails", filter.matches(mScanResult));
+    }
+
+    @SmallTest
+    public void testDeviceAddressFilter() {
+        ScanFilter filter = mFilterBuilder.setDeviceAddress(DEVICE_MAC).build();
+        assertEquals(DEVICE_MAC, filter.getDeviceAddress());
+        assertTrue("device filter fails", filter.matches(mScanResult));
+
+        filter = mFilterBuilder.setDeviceAddress("11:22:33:44:55:66").build();
+        assertFalse("device filter fails", filter.matches(mScanResult));
+    }
+
+    @SmallTest
+    public void testsetServiceUuidFilter() {
+        ScanFilter filter = mFilterBuilder.setServiceUuid(
+                ParcelUuid.fromString(UUID1)).build();
+        assertEquals(UUID1, filter.getServiceUuid().toString());
+        assertTrue("uuid filter fails", filter.matches(mScanResult));
+
+        filter = mFilterBuilder.setServiceUuid(
+                ParcelUuid.fromString(UUID3)).build();
+        assertEquals(UUID3, filter.getServiceUuid().toString());
+        assertFalse("uuid filter fails", filter.matches(mScanResult));
+
+        ParcelUuid mask = ParcelUuid.fromString("FFFFFFF0-FFFF-FFFF-FFFF-FFFFFFFFFFFF");
+        filter = mFilterBuilder
+                .setServiceUuid(ParcelUuid.fromString(UUID3),
+                        mask)
+                .build();
+        assertEquals(mask.toString(), filter.getServiceUuidMask().toString());
+        assertTrue("uuid filter fails", filter.matches(mScanResult));
+    }
+
+    @SmallTest
+    public void testsetServiceDataFilter() {
+        byte[] setServiceData = new byte[] {
+                0x50, 0x64 };
+        ParcelUuid serviceDataUuid = ParcelUuid.fromString(UUID2);
+        ScanFilter filter = mFilterBuilder.setServiceData(serviceDataUuid, setServiceData).build();
+        assertEquals(serviceDataUuid, filter.getServiceDataUuid());
+        assertTrue("service data filter fails", filter.matches(mScanResult));
+
+        byte[] emptyData = new byte[0];
+        filter = mFilterBuilder.setServiceData(serviceDataUuid, emptyData).build();
+        assertTrue("service data filter fails", filter.matches(mScanResult));
+
+        byte[] prefixData = new byte[] {
+                0x50 };
+        filter = mFilterBuilder.setServiceData(serviceDataUuid, prefixData).build();
+        assertTrue("service data filter fails", filter.matches(mScanResult));
+
+        byte[] nonMatchData = new byte[] {
+                0x51, 0x64 };
+        byte[] mask = new byte[] {
+                (byte) 0x00, (byte) 0xFF };
+        filter = mFilterBuilder.setServiceData(serviceDataUuid, nonMatchData, mask).build();
+        assertEquals(nonMatchData, filter.getServiceData());
+        assertEquals(mask, filter.getServiceDataMask());
+        assertTrue("partial service data filter fails", filter.matches(mScanResult));
+
+        filter = mFilterBuilder.setServiceData(serviceDataUuid, nonMatchData).build();
+        assertFalse("service data filter fails", filter.matches(mScanResult));
+    }
+
+    @SmallTest
+    public void testManufacturerSpecificData() {
+        byte[] manufacturerData = new byte[] {
+                0x02, 0x15 };
+        int manufacturerId = 0xE0;
+        ScanFilter filter =
+                mFilterBuilder.setManufacturerData(manufacturerId, manufacturerData).build();
+        assertEquals(manufacturerId, filter.getManufacturerId());
+        assertEquals(manufacturerData, filter.getManufacturerData());
+        assertTrue("manufacturer data filter fails", filter.matches(mScanResult));
+
+        byte[] emptyData = new byte[0];
+        filter = mFilterBuilder.setManufacturerData(manufacturerId, emptyData).build();
+        assertTrue("manufacturer data filter fails", filter.matches(mScanResult));
+
+        byte[] prefixData = new byte[] {
+                0x02 };
+        filter = mFilterBuilder.setManufacturerData(manufacturerId, prefixData).build();
+        assertTrue("manufacturer data filter fails", filter.matches(mScanResult));
+
+        // Test data mask
+        byte[] nonMatchData = new byte[] {
+                0x02, 0x14 };
+        filter = mFilterBuilder.setManufacturerData(manufacturerId, nonMatchData).build();
+        assertFalse("manufacturer data filter fails", filter.matches(mScanResult));
+        byte[] mask = new byte[] {
+                (byte) 0xFF, (byte) 0x00
+        };
+        filter = mFilterBuilder.setManufacturerData(manufacturerId, nonMatchData, mask).build();
+        assertEquals(manufacturerId, filter.getManufacturerId());
+        assertEquals(nonMatchData, filter.getManufacturerData());
+        assertEquals(mask, filter.getManufacturerDataMask());
+        assertTrue("partial setManufacturerData filter fails", filter.matches(mScanResult));
+    }
+
+    @SmallTest
+    public void testReadWriteParcel() {
+        ScanFilter filter = mFilterBuilder.build();
+        testReadWriteParcelForFilter(filter);
+
+        filter = mFilterBuilder.setDeviceName(LOCAL_NAME).build();
+        testReadWriteParcelForFilter(filter);
+
+        filter = mFilterBuilder.setDeviceAddress("11:22:33:44:55:66").build();
+        testReadWriteParcelForFilter(filter);
+
+        filter = mFilterBuilder.setServiceUuid(
+                ParcelUuid.fromString(UUID3)).build();
+        testReadWriteParcelForFilter(filter);
+
+        filter = mFilterBuilder.setServiceUuid(
+                ParcelUuid.fromString(UUID3),
+                ParcelUuid.fromString("FFFFFFF0-FFFF-FFFF-FFFF-FFFFFFFFFFFF")).build();
+        testReadWriteParcelForFilter(filter);
+
+        byte[] serviceData = new byte[] {
+                0x50, 0x64 };
+
+        ParcelUuid serviceDataUuid = ParcelUuid.fromString(UUID2);
+        filter = mFilterBuilder.setServiceData(serviceDataUuid, serviceData).build();
+        testReadWriteParcelForFilter(filter);
+
+        filter = mFilterBuilder.setServiceData(serviceDataUuid, new byte[0]).build();
+        testReadWriteParcelForFilter(filter);
+
+        byte[] serviceDataMask = new byte[] {
+                (byte) 0xFF, (byte) 0xFF };
+        filter = mFilterBuilder.setServiceData(serviceDataUuid, serviceData, serviceDataMask)
+                .build();
+        testReadWriteParcelForFilter(filter);
+
+        byte[] manufacturerData = new byte[] {
+                0x02, 0x15 };
+        int manufacturerId = 0xE0;
+        filter = mFilterBuilder.setManufacturerData(manufacturerId, manufacturerData).build();
+        testReadWriteParcelForFilter(filter);
+
+        filter = mFilterBuilder.setServiceData(serviceDataUuid, new byte[0]).build();
+        testReadWriteParcelForFilter(filter);
+
+        byte[] manufacturerDataMask = new byte[] {
+                (byte) 0xFF, (byte) 0xFF
+        };
+        filter = mFilterBuilder.setManufacturerData(manufacturerId, manufacturerData,
+                manufacturerDataMask).build();
+        testReadWriteParcelForFilter(filter);
+    }
+
+    @SmallTest
+    public void testDescribeContents() {
+        final int expected = 0;
+        assertEquals(expected, new ScanFilter.Builder().build().describeContents());
+    }
+
+    private void testReadWriteParcelForFilter(ScanFilter filter) {
+        Parcel parcel = Parcel.obtain();
+        filter.writeToParcel(parcel, 0);
+        parcel.setDataPosition(0);
+        ScanFilter filterFromParcel =
+                ScanFilter.CREATOR.createFromParcel(parcel);
+        assertEquals(filter, filterFromParcel);
+    }
+}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/ScanRecordTest.java b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanRecordTest.java
new file mode 100644
index 0000000..30ad06f
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanRecordTest.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.le.ScanRecord;
+import android.os.ParcelUuid;
+import android.test.AndroidTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+
+/**
+ * Unit test cases for {@link ScanRecord}.
+ * <p>
+ * To run this test, use adb shell am instrument -e class 'android.bluetooth.ScanRecordTest' -w
+ * 'com.android.bluetooth.tests/android.bluetooth.BluetoothTestRunner'
+ */
+public class ScanRecordTest extends AndroidTestCase {
+
+    @SmallTest
+    public void testParser() {
+        byte[] scanRecord = new byte[] {
+                0x02, 0x01, 0x1a, // advertising flags
+                0x05, 0x02, 0x0b, 0x11, 0x0a, 0x11, // 16 bit service uuids
+                0x04, 0x09, 0x50, 0x65, 0x64, // name
+                0x02, 0x0A, (byte) 0xec, // tx power level
+                0x05, 0x16, 0x0b, 0x11, 0x50, 0x64, // service data
+                0x05, (byte) 0xff, (byte) 0xe0, 0x00, 0x02, 0x15, // manufacturer specific data
+                0x03, 0x50, 0x01, 0x02, // an unknown data type won't cause trouble
+        };
+        ScanRecord data = TestUtils.parseScanRecord(scanRecord);
+        assertEquals(0x1a, data.getAdvertiseFlags());
+        ParcelUuid uuid1 = ParcelUuid.fromString("0000110A-0000-1000-8000-00805F9B34FB");
+        ParcelUuid uuid2 = ParcelUuid.fromString("0000110B-0000-1000-8000-00805F9B34FB");
+        assertTrue(data.getServiceUuids().contains(uuid1));
+        assertTrue(data.getServiceUuids().contains(uuid2));
+
+        assertEquals("Ped", data.getDeviceName());
+        assertEquals(-20, data.getTxPowerLevel());
+
+        assertTrue(data.getManufacturerSpecificData().get(0x00E0) != null);
+
+        final byte[] manufacturerData = new byte[] {
+                0x02, 0x15 };
+        TestUtils.assertArrayEquals(manufacturerData,
+                data.getManufacturerSpecificData().get(0x00E0));
+        TestUtils.assertArrayEquals(manufacturerData, data.getManufacturerSpecificData(0x00E0));
+
+        assertTrue(data.getServiceData().containsKey(uuid2));
+        final byte[] serviceData = new byte[] {
+                0x50, 0x64 };
+        TestUtils.assertArrayEquals(serviceData, data.getServiceData().get(uuid2));
+        TestUtils.assertArrayEquals(serviceData, data.getServiceData(uuid2));
+    }
+}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/ScanResultTest.java b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanResultTest.java
new file mode 100644
index 0000000..91da8c3
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanResultTest.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.BluetoothAdapter;
+import android.bluetooth.BluetoothDevice;
+import android.bluetooth.le.ScanResult;
+import android.os.Parcel;
+import android.test.AndroidTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+
+/**
+ * Unit test cases for Bluetooth LE scans.
+ * <p>
+ * To run this test, use adb shell am instrument -e class 'android.bluetooth.ScanResultTest' -w
+ * 'com.android.bluetooth.tests/android.bluetooth.BluetoothTestRunner'
+ */
+public class ScanResultTest extends AndroidTestCase {
+    private static final String DEVICE_ADDRESS = "01:02:03:04:05:06";
+    private static final byte[] SCAN_RECORD = new byte[] {
+            1, 2, 3 };
+    private static final int RSSI = -10;
+    private static final long TIMESTAMP_NANOS = 10000L;
+
+    /**
+     * Test read and write parcel of ScanResult
+     */
+    @SmallTest
+    public void testScanResultParceling() {
+        BluetoothDevice device =
+                BluetoothAdapter.getDefaultAdapter().getRemoteDevice(DEVICE_ADDRESS);
+        ScanResult result = new ScanResult(device, TestUtils.parseScanRecord(SCAN_RECORD), RSSI,
+                TIMESTAMP_NANOS);
+        Parcel parcel = Parcel.obtain();
+        result.writeToParcel(parcel, 0);
+        // Need to reset parcel data position to the beginning.
+        parcel.setDataPosition(0);
+        ScanResult resultFromParcel = ScanResult.CREATOR.createFromParcel(parcel);
+
+        assertEquals(RSSI, resultFromParcel.getRssi());
+        assertEquals(TIMESTAMP_NANOS, resultFromParcel.getTimestampNanos());
+        assertEquals(device, resultFromParcel.getDevice());
+        TestUtils.assertArrayEquals(SCAN_RECORD, resultFromParcel.getScanRecord().getBytes());
+    }
+
+    @SmallTest
+    public void testDescribeContents() {
+        BluetoothDevice device =
+                BluetoothAdapter.getDefaultAdapter().getRemoteDevice(DEVICE_ADDRESS);
+        ScanResult result = new ScanResult(device, TestUtils.parseScanRecord(SCAN_RECORD), RSSI,
+                TIMESTAMP_NANOS);
+        assertEquals(0, result.describeContents());
+    }
+}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/ScanSettingsTest.java b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanSettingsTest.java
new file mode 100644
index 0000000..7033c3c
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanSettingsTest.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.le.ScanSettings;
+import android.os.Parcel;
+import android.test.AndroidTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+
+/**
+ * Test for Bluetooth LE {@link ScanSettings}.
+ */
+public class ScanSettingsTest extends AndroidTestCase {
+
+    @SmallTest
+    public void testDefaultSettings() {
+        ScanSettings settings = new ScanSettings.Builder().build();
+        assertEquals(ScanSettings.CALLBACK_TYPE_ALL_MATCHES, settings.getCallbackType());
+        assertEquals(ScanSettings.SCAN_MODE_LOW_POWER, settings.getScanMode());
+        assertEquals(0, settings.getScanResultType());
+        assertEquals(0, settings.getReportDelayMillis());
+    }
+
+    @SmallTest
+    public void testDescribeContents() {
+        ScanSettings settings = new ScanSettings.Builder().build();
+        assertEquals(0, settings.describeContents());
+    }
+
+    @SmallTest
+    public void testReadWriteParcel() {
+        final long reportDelayMillis = 60 * 1000;
+        Parcel parcel = Parcel.obtain();
+        ScanSettings settings = new ScanSettings.Builder()
+                .setReportDelay(reportDelayMillis)
+                .setScanMode(ScanSettings.SCAN_MODE_LOW_LATENCY)
+                .setMatchMode(ScanSettings.MATCH_MODE_AGGRESSIVE)
+                .setNumOfMatches(ScanSettings.MATCH_NUM_MAX_ADVERTISEMENT)
+                .build();
+        settings.writeToParcel(parcel, 0);
+        parcel.setDataPosition(0);
+        ScanSettings settingsFromParcel = ScanSettings.CREATOR.createFromParcel(parcel);
+        assertEquals(reportDelayMillis, settingsFromParcel.getReportDelayMillis());
+        assertEquals(ScanSettings.SCAN_MODE_LOW_LATENCY, settings.getScanMode());
+    }
+}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/TestUtils.java b/tests/tests/bluetooth/src/android/bluetooth/cts/TestUtils.java
new file mode 100644
index 0000000..7c5db9e
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/TestUtils.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.le.ScanRecord;
+
+import junit.framework.Assert;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.util.Arrays;
+
+/**
+ * Utility class for Bluetooth CTS test.
+ */
+class TestUtils {
+
+    /**
+     * Utility method to call hidden ScanRecord.parseFromBytes method.
+     */
+    static ScanRecord parseScanRecord(byte[] bytes) {
+        Class<?> scanRecordClass = ScanRecord.class;
+        try {
+            Method method = scanRecordClass.getDeclaredMethod("parseFromBytes", byte[].class);
+            return (ScanRecord)method.invoke(null, bytes);
+        } catch (NoSuchMethodException | IllegalAccessException | IllegalArgumentException
+                | InvocationTargetException e) {
+            return null;
+        }
+    }
+
+    // Assert two byte arrays are equal.
+    static void assertArrayEquals(byte[] expected, byte[] actual) {
+        if (!Arrays.equals(expected, actual)) {
+            Assert.fail("expected:<" + Arrays.toString(expected) +
+                    "> but was:<" + Arrays.toString(actual) + ">");
+        }
+    }
+}
diff --git a/tests/tests/content/res/values-ldltr/styles.xml b/tests/tests/content/res/values-ldltr/styles.xml
index e17290e..063fc4f 100644
--- a/tests/tests/content/res/values-ldltr/styles.xml
+++ b/tests/tests/content/res/values-ldltr/styles.xml
@@ -18,4 +18,7 @@
     <style name="Theme_LayoutDirectionDependent">
         <item name="themeInteger">111</item>
     </style>
+    <style name="Theme_LayoutIsRTL">
+        <item name="themeBoolean">false</item>
+    </style>
 </resources>
diff --git a/tests/tests/content/res/values-ldrtl/styles.xml b/tests/tests/content/res/values-ldrtl/styles.xml
new file mode 100644
index 0000000..c586192
--- /dev/null
+++ b/tests/tests/content/res/values-ldrtl/styles.xml
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<resources>
+    <style name="Theme_LayoutIsRTL">
+        <item name="themeBoolean">true</item>
+    </style>
+</resources>
diff --git a/tests/tests/content/src/android/content/res/cts/Resources_ThemeTest.java b/tests/tests/content/src/android/content/res/cts/Resources_ThemeTest.java
index ba98dc4..6d1c2e4 100644
--- a/tests/tests/content/src/android/content/res/cts/Resources_ThemeTest.java
+++ b/tests/tests/content/src/android/content/res/cts/Resources_ThemeTest.java
@@ -19,6 +19,7 @@
 import org.xmlpull.v1.XmlPullParser;
 
 import android.content.pm.ActivityInfo;
+import android.content.res.Configuration;
 import android.content.res.Resources;
 import android.content.res.TypedArray;
 import android.content.res.Resources.Theme;
@@ -26,9 +27,12 @@
 import android.util.AttributeSet;
 import android.util.TypedValue;
 import android.util.Xml;
+import android.view.View;
 
 import com.android.cts.content.R;
 
+import java.util.Locale;
+
 
 public class Resources_ThemeTest extends AndroidTestCase {
 
@@ -100,4 +104,30 @@
                 theme.getChangingConfigurations());
     }
 
+    public void testRebase() {
+        Resources res = getContext().getResources();
+        Configuration config = res.getConfiguration();
+        config.setLocale(Locale.ENGLISH);
+        assertEquals("Theme will be created in LTR config",
+                View.LAYOUT_DIRECTION_LTR, config.getLayoutDirection());
+
+        Resources.Theme theme = res.newTheme();
+        theme.applyStyle(R.style.Theme_LayoutIsRTL, true);
+
+        TypedArray t = theme.obtainStyledAttributes(new int[] { R.attr.themeBoolean });
+        assertEquals("Theme was created in LTR config", false, t.getBoolean(0, true));
+        t.recycle();
+
+        config.setLocale(new Locale("iw"));
+        res.updateConfiguration(config, null);
+
+        assertEquals("Theme will be rebased in RTL config",
+                View.LAYOUT_DIRECTION_RTL, config.getLayoutDirection());
+
+        theme.rebase();
+
+        t = theme.obtainStyledAttributes(new int[] { R.attr.themeBoolean });
+        assertEquals("Theme was rebased in RTL config", true, t.getBoolean(0, false));
+        t.recycle();
+    }
 }
diff --git a/tests/tests/deqp/egl-temporary-failures.txt b/tests/tests/deqp/egl-temporary-failures.txt
new file mode 100644
index 0000000..1a3bb94
--- /dev/null
+++ b/tests/tests/deqp/egl-temporary-failures.txt
@@ -0,0 +1,780 @@
+dEQP-EGL.functional.sharing.gles2.multithread.simple.textures#gen_delete
+dEQP-EGL.functional.sharing.gles2.multithread.simple.textures#teximage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple.textures#teximage2d_texsubimage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple.textures#teximage2d_render
+dEQP-EGL.functional.sharing.gles2.multithread.simple.textures#teximage2d_texsubimage2d_render
+dEQP-EGL.functional.sharing.gles2.multithread.simple.images.texture_source#create_destroy
+dEQP-EGL.functional.sharing.gles2.multithread.simple.images.texture_source#create_texture
+dEQP-EGL.functional.sharing.gles2.multithread.simple.images.texture_source#texsubimage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple.images.texture_source#teximage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple.images.texture_source#create_texture_render
+dEQP-EGL.functional.sharing.gles2.multithread.simple.images.texture_source#texsubimage2d_render
+dEQP-EGL.functional.sharing.gles2.multithread.simple.images.texture_source#teximage2d_render
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.texsubimage2d#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.texsubimage2d#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.texsubimage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.texsubimage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.texsubimage2d#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.texsubimage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.buffers#gen_delete
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.buffers#bufferdata
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.buffers#buffersubdata
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.buffers#bufferdata_render
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.textures#gen_delete
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.textures#teximage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.textures#teximage2d_texsubimage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.textures#teximage2d_texsubimage2d_render
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.images.texture_source#texsubimage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.images.texture_source#teximage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.images.texture_source#create_texture_render
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.images.texture_source#teximage2d_render
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.create_destroy#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.create_destroy#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.teximage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.teximage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.teximage2d#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.teximage2d#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.texsubimage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.buffers#gen_delete
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.buffers#bufferdata
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.buffers#buffersubdata
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.textures#gen_delete
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.textures#teximage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.textures#teximage2d_texsubimage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.shaders#vtx_create_destroy
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.shaders#vtx_shadersource
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.shaders#vtx_compile
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.shaders#frag_create_destroy
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.shaders#frag_shadersource
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.shaders#frag_compile
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.images.texture_source#create_destroy
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.images.texture_source#create_texture
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.images.texture_source#texsubimage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.images.texture_source#teximage2d
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#19
+dEQP-EGL.functional.multithread#config
+dEQP-EGL.functional.multithread#pbuffer
+dEQP-EGL.functional.multithread#pixmap
+dEQP-EGL.functional.multithread#single_window
+dEQP-EGL.functional.multithread#context
+dEQP-EGL.functional.multithread#pbuffer_pixmap
+dEQP-EGL.functional.multithread#pixmap_window
+dEQP-EGL.functional.multithread#pixmap_single_window
+dEQP-EGL.functional.multithread#pixmap_context
+dEQP-EGL.functional.multithread#pbuffer_pixmap_context
+dEQP-EGL.functional.multithread#pixmap_single_window_context
diff --git a/tests/tests/deqp/gles3-temporary-failures.txt b/tests/tests/deqp/gles3-temporary-failures.txt
index d8b3208..c2127a6 100644
--- a/tests/tests/deqp/gles3-temporary-failures.txt
+++ b/tests/tests/deqp/gles3-temporary-failures.txt
@@ -624,6 +624,14 @@
 dEQP-GLES3.functional.shaders.builtin_functions.precision.acosh.highp_vertex#vec2
 dEQP-GLES3.functional.shaders.builtin_functions.precision.acosh.highp_vertex#vec3
 dEQP-GLES3.functional.shaders.builtin_functions.precision.acosh.highp_vertex#vec4
+dEQP-GLES3.functional.shaders.builtin_functions.precision.atanh.highp_vertex#scalar
+dEQP-GLES3.functional.shaders.builtin_functions.precision.atanh.highp_vertex#vec2
+dEQP-GLES3.functional.shaders.builtin_functions.precision.atanh.highp_vertex#vec3
+dEQP-GLES3.functional.shaders.builtin_functions.precision.atanh.highp_vertex#vec4
+dEQP-GLES3.functional.shaders.builtin_functions.precision.atanh.highp_fragment#scalar
+dEQP-GLES3.functional.shaders.builtin_functions.precision.atanh.highp_fragment#vec2
+dEQP-GLES3.functional.shaders.builtin_functions.precision.atanh.highp_fragment#vec3
+dEQP-GLES3.functional.shaders.builtin_functions.precision.atanh.highp_fragment#vec4
 dEQP-GLES3.functional.shaders.builtin_functions.precision.asin.highp_fragment#scalar
 dEQP-GLES3.functional.shaders.builtin_functions.precision.asin.highp_fragment#vec2
 dEQP-GLES3.functional.shaders.builtin_functions.precision.asin.highp_fragment#vec3
diff --git a/tests/tests/deqp/gles31-temporary-failures.txt b/tests/tests/deqp/gles31-temporary-failures.txt
index b46ab1e..d921091 100644
--- a/tests/tests/deqp/gles31-temporary-failures.txt
+++ b/tests/tests/deqp/gles31-temporary-failures.txt
@@ -8418,6 +8418,10 @@
 dEQP-GLES31.functional.shaders.builtin_functions.precision.acosh.highp_compute#vec2
 dEQP-GLES31.functional.shaders.builtin_functions.precision.acosh.highp_compute#vec3
 dEQP-GLES31.functional.shaders.builtin_functions.precision.acosh.highp_compute#vec4
+dEQP-GLES31.functional.shaders.builtin_functions.precision.atanh.highp_compute#scalar
+dEQP-GLES31.functional.shaders.builtin_functions.precision.atanh.highp_compute#vec2
+dEQP-GLES31.functional.shaders.builtin_functions.precision.atanh.highp_compute#vec3
+dEQP-GLES31.functional.shaders.builtin_functions.precision.atanh.highp_compute#vec4
 dEQP-GLES31.functional.shaders.builtin_functions.precision.asin.highp_compute#scalar
 dEQP-GLES31.functional.shaders.builtin_functions.precision.asin.highp_compute#vec2
 dEQP-GLES31.functional.shaders.builtin_functions.precision.asin.highp_compute#vec3
diff --git a/tests/tests/display/AndroidManifest.xml b/tests/tests/display/AndroidManifest.xml
index 0b24754..bf84219 100644
--- a/tests/tests/display/AndroidManifest.xml
+++ b/tests/tests/display/AndroidManifest.xml
@@ -19,6 +19,8 @@
     package="com.android.cts.display">
 
     <uses-permission android:name="android.permission.DISABLE_KEYGUARD" />
+    <!-- For special presentation windows when testing mode switches. -->
+    <uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
 
     <application>
         <uses-library android:name="android.test.runner" />
diff --git a/tests/tests/display/AndroidTest.xml b/tests/tests/display/AndroidTest.xml
index 80d33b0..dd42984 100644
--- a/tests/tests/display/AndroidTest.xml
+++ b/tests/tests/display/AndroidTest.xml
@@ -16,6 +16,6 @@
 <configuration description="Base config for CTS package preparer">
     <include name="common-config" />
     <!-- Use a non-standard pattern, must match values in tests/tests/display/.../DisplayTest.java -->
-    <option name="run-command:run-command" value="settings put global overlay_display_devices 181x161/214" />
+    <option name="run-command:run-command" value="settings put global overlay_display_devices '181x161/214|181x161/214'" />
     <option name="run-command:teardown-command" value="settings put global overlay_display_devices &quot;&quot;" />
 </configuration>
diff --git a/tests/tests/display/src/android/display/cts/DisplayTest.java b/tests/tests/display/src/android/display/cts/DisplayTest.java
index bea99ed..112710e 100644
--- a/tests/tests/display/src/android/display/cts/DisplayTest.java
+++ b/tests/tests/display/src/android/display/cts/DisplayTest.java
@@ -16,18 +16,29 @@
 
 package android.display.cts;
 
+import android.app.Presentation;
 import android.content.Context;
+import android.graphics.Color;
 import android.graphics.PixelFormat;
 import android.graphics.Point;
 import android.hardware.display.DisplayManager;
+import android.hardware.display.DisplayManager.DisplayListener;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.Looper;
 import android.test.AndroidTestCase;
 import android.util.DisplayMetrics;
 import android.view.Display;
+import android.view.View;
+import android.view.ViewGroup;
 import android.view.WindowManager;
 
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
 public class DisplayTest extends AndroidTestCase {
-    // This test is called from DisplayTestRunner which brings up an overlay display on the target
-    // device. The overlay display parameters must match the ones defined there which are
+    // The CTS package brings up an overlay display on the target device (see AndroidTest.xml).
+    // The overlay display parameters must match the ones defined there which are
     // 181x161/214 (wxh/dpi).  It only matters that these values are different from any real
     // display.
 
@@ -44,6 +55,9 @@
     private DisplayManager mDisplayManager;
     private WindowManager mWindowManager;
 
+    // To test display mode switches.
+    private TestPresentation mPresentation;
+
     @Override
     protected void setUp() throws Exception {
         super.setUp();
@@ -177,4 +191,105 @@
 
         assertEquals(Display.FLAG_PRESENTATION, display.getFlags());
     }
+
+    /**
+     * Tests that the mode-related attributes and methods work as expected.
+     */
+    public void testMode() {
+        Display display = getSecondaryDisplay(mDisplayManager.getDisplays());
+        assertEquals(2, display.getSupportedModes().length);
+        Display.Mode mode = display.getMode();
+        assertEquals(display.getSupportedModes()[0], mode);
+        assertEquals(SECONDARY_DISPLAY_WIDTH, mode.getPhysicalWidth());
+        assertEquals(SECONDARY_DISPLAY_HEIGHT, mode.getPhysicalHeight());
+        assertEquals(display.getRefreshRate(), mode.getRefreshRate());
+    }
+
+    /**
+     * Tests that mode switch requests are correctly executed.
+     */
+    public void testModeSwitch() throws Exception {
+        final Display display = getSecondaryDisplay(mDisplayManager.getDisplays());
+        Display.Mode[] modes = display.getSupportedModes();
+        assertEquals(2, modes.length);
+        Display.Mode mode = display.getMode();
+        assertEquals(modes[0], mode);
+        final Display.Mode newMode = modes[1];
+
+        Handler handler = new Handler(Looper.getMainLooper());
+
+        // Register for display events.
+        final CountDownLatch changeSignal = new CountDownLatch(1);
+        mDisplayManager.registerDisplayListener(new DisplayListener() {
+            @Override
+            public void onDisplayAdded(int displayId) {}
+
+            @Override
+            public void onDisplayChanged(int displayId) {
+                if (displayId == display.getDisplayId()) {
+                    changeSignal.countDown();
+                }
+            }
+
+            @Override
+            public void onDisplayRemoved(int displayId) {}
+        }, handler);
+
+        // Show the presentation.
+        final CountDownLatch presentationSignal = new CountDownLatch(1);
+        handler.post(new Runnable() {
+            @Override
+            public void run() {
+                mPresentation = new TestPresentation(
+                        getContext(), display, newMode.getModeId());
+                mPresentation.show();
+                presentationSignal.countDown();
+            }
+        });
+        assertTrue(presentationSignal.await(5, TimeUnit.SECONDS));
+
+        // Wait until the display change is effective.
+        assertTrue(changeSignal.await(5, TimeUnit.SECONDS));
+
+        assertEquals(newMode, display.getMode());
+        handler.post(new Runnable() {
+            @Override
+            public void run() {
+                mPresentation.dismiss();
+            }
+        });
+    }
+
+    /**
+     * Used to force mode changes on a display.
+     * <p>
+     * Note that due to limitations of the Presentation class, the modes must have the same size
+     * otherwise the presentation will be automatically dismissed.
+     */
+    private static final class TestPresentation extends Presentation {
+
+        private final int mModeId;
+
+        public TestPresentation(Context context, Display display, int modeId) {
+            super(context, display);
+            mModeId = modeId;
+        }
+
+        @Override
+        protected void onCreate(Bundle savedInstanceState) {
+            super.onCreate(savedInstanceState);
+
+            View content = new View(getContext());
+            content.setLayoutParams(new ViewGroup.LayoutParams(
+                    ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
+            content.setBackgroundColor(Color.RED);
+            setContentView(content);
+
+            WindowManager.LayoutParams params = getWindow().getAttributes();
+            params.preferredDisplayModeId = mModeId;
+            params.type = WindowManager.LayoutParams.TYPE_SYSTEM_ALERT;
+            params.setTitle("CtsTestPresentation");
+            getWindow().setAttributes(params);
+        }
+    }
 }
diff --git a/tests/tests/hardware/Android.mk b/tests/tests/hardware/Android.mk
index 153445d..1c144ff 100644
--- a/tests/tests/hardware/Android.mk
+++ b/tests/tests/hardware/Android.mk
@@ -56,4 +56,4 @@
 
 LOCAL_JAVA_LIBRARIES := android.test.runner
 
-include $(BUILD_CTS_PACKAGE)
+include $(BUILD_CTS_PACKAGE)
\ No newline at end of file
diff --git a/tests/tests/hardware/AndroidManifest.xml b/tests/tests/hardware/AndroidManifest.xml
index ab81162..7b15b61 100644
--- a/tests/tests/hardware/AndroidManifest.xml
+++ b/tests/tests/hardware/AndroidManifest.xml
@@ -25,6 +25,7 @@
     <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
     <uses-permission android:name="android.permission.BODY_SENSORS" />
     <uses-permission android:name="android.permission.TRANSMIT_IR" />
+    <uses-permission android:name="android.permission.REORDER_TASKS" />
 
     <application>
         <uses-library android:name="android.test.runner" />
@@ -50,6 +51,26 @@
         <activity android:name="android.hardware.cts.GLSurfaceViewCtsActivity"
             android:label="GLSurfaceViewCtsActivity"/>
 
+        <service android:name="android.hardware.multiprocess.ErrorLoggingService"
+            android:label="ErrorLoggingService"
+            android:process=":errorLoggingServiceProcess"
+            android:exported="false">
+        </service>
+
+        <activity android:name="android.hardware.multiprocess.camera.cts.Camera1Activity"
+            android:label="RemoteCamera1Activity"
+            android:screenOrientation="landscape"
+            android:configChanges="keyboardHidden|orientation|screenSize"
+            android:process=":camera1ActivityProcess">
+        </activity>
+
+        <activity android:name="android.hardware.multiprocess.camera.cts.Camera2Activity"
+            android:label="RemoteCamera2Activity"
+            android:screenOrientation="landscape"
+            android:configChanges="keyboardHidden|orientation|screenSize"
+            android:process=":camera2ActivityProcess">
+        </activity>
+
     </application>
 
     <instrumentation android:name="android.support.test.runner.AndroidJUnitRunner"
diff --git a/tests/tests/hardware/AndroidTest.xml b/tests/tests/hardware/AndroidTest.xml
new file mode 100644
index 0000000..783eafe
--- /dev/null
+++ b/tests/tests/hardware/AndroidTest.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Base config for Sensor CTS tests. Put SensorService in restricted mode">
+    <include name="common-config" />
+    <!-- Put SensorService in restricted mode so that only CTS tests will be able to get access to
+    sensors -->
+    <option name="run-command:run-command" value="dumpsys sensorservice restrict" />
+    <option name="run-command:teardown-command" value="dumpsys sensorservice enable" />
+</configuration>
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/BurstCaptureTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/BurstCaptureTest.java
index da8ea65..2da8cdb 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/BurstCaptureTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/BurstCaptureTest.java
@@ -89,24 +89,13 @@
                 config.getOutputMinFrameDuration(ImageFormat.YUV_420_888, stillSize);
 
         // Find suitable target FPS range - as high as possible
-        Range<Integer>[] fpsRanges = mStaticInfo.getAeAvailableTargetFpsRangesChecked();
+        Range<Integer> targetRange = mStaticInfo.getAeMaxTargetFpsRange();
         int minBurstFps = (int) Math.floor(1e9 / minStillFrameDuration);
-        Range<Integer> targetRange = null;
-        for (Range<Integer> candidateRange : fpsRanges) {
-            if (candidateRange.getLower() >= minBurstFps) {
-                if (targetRange == null) {
-                    targetRange = candidateRange;
-                } else if (candidateRange.getLower() > targetRange.getLower()) {
-                    targetRange = candidateRange;
-                } else if (candidateRange.getUpper() > targetRange.getUpper()) {
-                    targetRange = candidateRange;
-                }
-            }
-        }
+
         assertTrue(String.format("Cam %s: No target FPS range found with minimum FPS above " +
                         " 1/minFrameDuration (%d fps, duration %d ns) for full-resolution YUV",
-                cameraId, minBurstFps, minStillFrameDuration),
-            targetRange != null);
+                        cameraId, minBurstFps, minStillFrameDuration),
+                targetRange.getLower() >= minBurstFps);
 
         Log.i(TAG, String.format("Selected frame rate range %d - %d for YUV burst",
                         targetRange.getLower(), targetRange.getUpper()));
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraDeviceTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraDeviceTest.java
index 53ca31f..9bbcc43 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraDeviceTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraDeviceTest.java
@@ -23,6 +23,7 @@
 import static android.hardware.camera2.CaptureRequest.*;
 
 import android.content.Context;
+import android.graphics.SurfaceTexture;
 import android.graphics.ImageFormat;
 import android.hardware.camera2.CameraAccessException;
 import android.hardware.camera2.CameraCaptureSession;
@@ -380,11 +381,7 @@
                 closeSession();
             }
             finally {
-                try {
-
-                } finally {
-                    closeDevice(mCameraIds[i], mCameraMockListener);
-                }
+                closeDevice(mCameraIds[i], mCameraMockListener);
             }
         }
     }
@@ -581,6 +578,146 @@
         }
     }
 
+    /**
+     * Verify basic semantics and error conditions of the prepare call.
+     *
+     */
+    public void testPrepare() throws Exception {
+        for (int i = 0; i < mCameraIds.length; i++) {
+            try {
+                openDevice(mCameraIds[i], mCameraMockListener);
+                waitForDeviceState(STATE_OPENED, CAMERA_OPEN_TIMEOUT_MS);
+
+                prepareTestByCamera();
+            }
+            finally {
+                closeDevice(mCameraIds[i], mCameraMockListener);
+            }
+        }
+    }
+
+    private void prepareTestByCamera() throws Exception {
+        final int PREPARE_TIMEOUT_MS = 10000;
+
+        mSessionMockListener = spy(new BlockingSessionCallback());
+
+        SurfaceTexture output1 = new SurfaceTexture(1);
+        Surface output1Surface = new Surface(output1);
+        SurfaceTexture output2 = new SurfaceTexture(2);
+        Surface output2Surface = new Surface(output2);
+
+        List<Surface> outputSurfaces = new ArrayList<>(
+            Arrays.asList(output1Surface, output2Surface));
+        mCamera.createCaptureSession(outputSurfaces, mSessionMockListener, mHandler);
+
+        mSession = mSessionMockListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
+
+        // Try basic prepare
+
+        mSession.prepare(output1Surface);
+
+        verify(mSessionMockListener, timeout(PREPARE_TIMEOUT_MS).times(1))
+                .onSurfacePrepared(eq(mSession), eq(output1Surface));
+
+        // Should not complain if preparing already prepared stream
+
+        mSession.prepare(output1Surface);
+
+        verify(mSessionMockListener, timeout(PREPARE_TIMEOUT_MS).times(2))
+                .onSurfacePrepared(eq(mSession), eq(output1Surface));
+
+        // Check surface not included in session
+
+        SurfaceTexture output3 = new SurfaceTexture(3);
+        Surface output3Surface = new Surface(output3);
+        try {
+            mSession.prepare(output3Surface);
+            fail("Preparing surface not part of session must throw IllegalArgumentException");
+        } catch (IllegalArgumentException e) {
+            // expected
+        }
+
+        // Ensure second prepare also works
+
+        mSession.prepare(output2Surface);
+
+        verify(mSessionMockListener, timeout(PREPARE_TIMEOUT_MS).times(1))
+                .onSurfacePrepared(eq(mSession), eq(output2Surface));
+
+        // Use output1
+
+        CaptureRequest.Builder r = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+        r.addTarget(output1Surface);
+
+        mSession.capture(r.build(), null, null);
+
+        try {
+            mSession.prepare(output1Surface);
+            fail("Preparing already-used surface must throw IllegalArgumentException");
+        } catch (IllegalArgumentException e) {
+            // expected
+        }
+
+        // Create new session with outputs 1 and 3, ensure output1Surface still can't be prepared
+        // again
+
+        mSessionMockListener = spy(new BlockingSessionCallback());
+
+        outputSurfaces = new ArrayList<>(
+            Arrays.asList(output1Surface, output3Surface));
+        mCamera.createCaptureSession(outputSurfaces, mSessionMockListener, mHandler);
+
+        mSession = mSessionMockListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
+
+        try {
+            mSession.prepare(output1Surface);
+            fail("Preparing surface used in previous session must throw IllegalArgumentException");
+        } catch (IllegalArgumentException e) {
+            // expected
+        }
+
+        // Use output3, wait for result, then make sure prepare still doesn't work
+
+        r = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+        r.addTarget(output3Surface);
+
+        SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
+        mSession.capture(r.build(), resultListener, mHandler);
+
+        resultListener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
+
+        try {
+            mSession.prepare(output3Surface);
+            fail("Preparing already-used surface must throw IllegalArgumentException");
+        } catch (IllegalArgumentException e) {
+            // expected
+        }
+
+        // Create new session with outputs 1 and 2, ensure output2Surface can be prepared again
+
+        mSessionMockListener = spy(new BlockingSessionCallback());
+
+        outputSurfaces = new ArrayList<>(
+            Arrays.asList(output1Surface, output2Surface));
+        mCamera.createCaptureSession(outputSurfaces, mSessionMockListener, mHandler);
+
+        mSession = mSessionMockListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
+
+        mSession.prepare(output2Surface);
+
+        verify(mSessionMockListener, timeout(PREPARE_TIMEOUT_MS).times(1))
+                .onSurfacePrepared(eq(mSession), eq(output2Surface));
+
+        try {
+            mSession.prepare(output1Surface);
+            fail("Preparing surface used in previous session must throw IllegalArgumentException");
+        } catch (IllegalArgumentException e) {
+            // expected
+        }
+
+    }
+
+
     private void invalidRequestCaptureTestByCamera() throws Exception {
         if (VERBOSE) Log.v(TAG, "invalidRequestCaptureTestByCamera");
 
@@ -888,7 +1025,7 @@
         mSession = mSessionMockListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
         waitForSessionState(SESSION_CONFIGURED, SESSION_CONFIGURE_TIMEOUT_MS);
         waitForSessionState(SESSION_READY, SESSION_READY_TIMEOUT_MS);
-}
+    }
 
     private void waitForDeviceState(int state, long timeoutMs) {
         mCameraMockListener.waitForState(state, timeoutMs);
@@ -1333,7 +1470,7 @@
                     // OK
                 } else if (template == CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG &&
                         !mStaticInfo.isCapabilitySupported(CameraCharacteristics.
-                                REQUEST_AVAILABLE_CAPABILITIES_OPAQUE_REPROCESSING)) {
+                                REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING)) {
                     // OK.
                 } else if (sLegacySkipTemplates.contains(template) &&
                         mStaticInfo.isHardwareLevelLegacy()) {
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraTestUtils.java b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraTestUtils.java
index 9da74d1..f4eeba9 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraTestUtils.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraTestUtils.java
@@ -37,6 +37,7 @@
 import android.hardware.camera2.params.StreamConfigurationMap;
 import android.media.Image;
 import android.media.ImageReader;
+import android.media.ImageWriter;
 import android.media.Image.Plane;
 import android.os.Handler;
 import android.util.Log;
@@ -60,10 +61,12 @@
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.Comparator;
+import java.util.HashMap;
 import java.util.List;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
 
 /**
  * A package private utility class for wrapping up the camera2 cts test common utility functions
@@ -106,19 +109,31 @@
     public static ImageReader makeImageReader(Size size, int format, int maxNumImages,
             ImageReader.OnImageAvailableListener listener, Handler handler) {
         ImageReader reader;
-        if (format == ImageFormat.PRIVATE) {
-            reader = ImageReader.newOpaqueInstance(size.getWidth(), size.getHeight(),
-                    maxNumImages);
-        } else {
-            reader = ImageReader.newInstance(size.getWidth(), size.getHeight(), format,
-                    maxNumImages);
-        }
+        reader = ImageReader.newInstance(size.getWidth(), size.getHeight(), format,
+                maxNumImages);
         reader.setOnImageAvailableListener(listener, handler);
         if (VERBOSE) Log.v(TAG, "Created ImageReader size " + size);
         return reader;
     }
 
     /**
+     * Create an ImageWriter and hook up the ImageListener.
+     *
+     * @param inputSurface The input surface of the ImageWriter.
+     * @param maxImages The max number of Images that can be dequeued simultaneously.
+     * @param listener The listener used by this ImageWriter to notify callbacks
+     * @param handler The handler to post listener callbacks.
+     * @return ImageWriter object created.
+     */
+    public static ImageWriter makeImageWriter(
+            Surface inputSurface, int maxImages,
+            ImageWriter.OnImageReleasedListener listener, Handler handler) {
+        ImageWriter writer = ImageWriter.newInstance(inputSurface, maxImages);
+        writer.setOnImageReleasedListener(listener, handler);
+        return writer;
+    }
+
+    /**
      * Close pending images and clean up an {@link android.media.ImageReader} object.
      * @param reader an {@link android.media.ImageReader} to close.
      */
@@ -129,6 +144,16 @@
     }
 
     /**
+     * Close pending images and clean up an {@link android.media.ImageWriter} object.
+     * @param writer an {@link android.media.ImageWriter} to close.
+     */
+    public static void closeImageWriter(ImageWriter writer) {
+        if (writer != null) {
+            writer.close();
+        }
+    }
+
+    /**
      * Dummy listener that release the image immediately once it is available.
      *
      * <p>
@@ -179,11 +204,52 @@
             implements ImageReader.OnImageAvailableListener {
         private final LinkedBlockingQueue<Image> mQueue =
                 new LinkedBlockingQueue<Image>();
+        // Indicate whether this listener will drop images or not,
+        // when the queued images reaches the reader maxImages
+        private final boolean mAsyncMode;
+        // maxImages held by the queue in async mode.
+        private final int mMaxImages;
+
+        /**
+         * Create a synchronous SimpleImageReaderListener that queues the images
+         * automatically when they are available, no image will be dropped. If
+         * the caller doesn't call getImage(), the producer will eventually run
+         * into buffer starvation.
+         */
+        public SimpleImageReaderListener() {
+            mAsyncMode = false;
+            mMaxImages = 0;
+        }
+
+        /**
+         * Create a synchronous/asynchronous SimpleImageReaderListener that
+         * queues the images automatically when they are available. For
+         * asynchronous listener, image will be dropped if the queued images
+         * reach to maxImages queued. If the caller doesn't call getImage(), the
+         * producer will not be blocked. For synchronous listener, no image will
+         * be dropped. If the caller doesn't call getImage(), the producer will
+         * eventually run into buffer starvation.
+         *
+         * @param asyncMode If the listener is operating at asynchronous mode.
+         * @param maxImages The max number of images held by this listener.
+         */
+        /**
+         *
+         * @param asyncMode
+         */
+        public SimpleImageReaderListener(boolean asyncMode, int maxImages) {
+            mAsyncMode = asyncMode;
+            mMaxImages = maxImages;
+        }
 
         @Override
         public void onImageAvailable(ImageReader reader) {
             try {
                 mQueue.put(reader.acquireNextImage());
+                if (mAsyncMode && mQueue.size() >= mMaxImages) {
+                    Image img = mQueue.poll();
+                    img.close();
+                }
             } catch (InterruptedException e) {
                 throw new UnsupportedOperationException(
                         "Can't handle InterruptedException in onImageAvailable");
@@ -201,6 +267,47 @@
             assertNotNull("Wait for an image timed out in " + timeout + "ms", image);
             return image;
         }
+
+        /**
+         * Drain the pending images held by this listener currently.
+         *
+         */
+        public void drain() {
+            for (int i = 0; i < mQueue.size(); i++) {
+                Image image = mQueue.poll();
+                assertNotNull("Unable to get an image", image);
+                image.close();
+            }
+        }
+    }
+
+    public static class SimpleImageWriterListener implements ImageWriter.OnImageReleasedListener {
+        private final Semaphore mImageReleasedSema = new Semaphore(0);
+        private final ImageWriter mWriter;
+        @Override
+        public void onImageReleased(ImageWriter writer) {
+            if (writer != mWriter) {
+                return;
+            }
+
+            if (VERBOSE) {
+                Log.v(TAG, "Input image is released");
+            }
+            mImageReleasedSema.release();
+        }
+
+        public SimpleImageWriterListener(ImageWriter writer) {
+            if (writer == null) {
+                throw new IllegalArgumentException("writer cannot be null");
+            }
+            mWriter = writer;
+        }
+
+        public void waitForImageReleased(long timeoutMs) throws InterruptedException {
+            if (!mImageReleasedSema.tryAcquire(timeoutMs, TimeUnit.MILLISECONDS)) {
+                fail("wait for image available timed out after " + timeoutMs + "ms");
+            }
+        }
     }
 
     public static class SimpleCaptureCallback extends CameraCaptureSession.CaptureCallback {
@@ -244,6 +351,27 @@
             return getTotalCaptureResult(timeout);
         }
 
+        public TotalCaptureResult getCaptureResult(long timeout, long timestamp) {
+            try {
+                long currentTs = -1L;
+                TotalCaptureResult result;
+                while (true) {
+                    result = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
+                    if (result == null) {
+                        throw new RuntimeException(
+                                "Wait for a capture result timed out in " + timeout + "ms");
+                    }
+                    currentTs = result.get(CaptureResult.SENSOR_TIMESTAMP);
+                    if (currentTs == timestamp) {
+                        return result;
+                    }
+                }
+
+            } catch (InterruptedException e) {
+                throw new UnsupportedOperationException("Unhandled interrupted exception", e);
+            }
+        }
+
         public TotalCaptureResult getTotalCaptureResult(long timeout) {
             try {
                 TotalCaptureResult result = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
@@ -289,16 +417,63 @@
          */
         public TotalCaptureResult getTotalCaptureResultForRequest(CaptureRequest myRequest,
                 int numResultsWait) {
+            ArrayList<CaptureRequest> captureRequests = new ArrayList<>(1);
+            captureRequests.add(myRequest);
+            return getTotalCaptureResultsForRequests(captureRequests, numResultsWait)[0];
+        }
+
+        /**
+         * Get an array of {@link #TotalCaptureResult total capture results} for a given list of
+         * {@link #CaptureRequest capture requests}. This can be used when the order of results
+         * may not the same as the order of requests.
+         *
+         * @param captureRequests The list of {@link #CaptureRequest capture requests} whose
+         *            corresponding {@link #TotalCaptureResult capture results} are
+         *            being waited for.
+         * @param numResultsWait Number of frames to wait for the capture results
+         *            before timeout.
+         * @throws TimeoutRuntimeException If more than numResultsWait results are
+         *            seen before all the results matching captureRequests arrives.
+         */
+        public TotalCaptureResult[] getTotalCaptureResultsForRequests(
+                List<CaptureRequest> captureRequests, int numResultsWait) {
             if (numResultsWait < 0) {
                 throw new IllegalArgumentException("numResultsWait must be no less than 0");
             }
+            if (captureRequests == null || captureRequests.size() == 0) {
+                throw new IllegalArgumentException("captureRequests must have at least 1 request.");
+            }
 
-            TotalCaptureResult result;
+            // Create a request -> a list of result indices map that it will wait for.
+            HashMap<CaptureRequest, ArrayList<Integer>> remainingResultIndicesMap = new HashMap<>();
+            for (int i = 0; i < captureRequests.size(); i++) {
+                CaptureRequest request = captureRequests.get(i);
+                ArrayList<Integer> indices = remainingResultIndicesMap.get(request);
+                if (indices == null) {
+                    indices = new ArrayList<>();
+                    remainingResultIndicesMap.put(request, indices);
+                }
+                indices.add(i);
+            }
+
+            TotalCaptureResult[] results = new TotalCaptureResult[captureRequests.size()];
             int i = 0;
             do {
-                result = getTotalCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
-                if (result.getRequest().equals(myRequest)) {
-                    return result;
+                TotalCaptureResult result = getTotalCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
+                CaptureRequest request = result.getRequest();
+                ArrayList<Integer> indices = remainingResultIndicesMap.get(request);
+                if (indices != null) {
+                    results[indices.get(0)] = result;
+                    indices.remove(0);
+
+                    // Remove the entry if all results for this request has been fulfilled.
+                    if (indices.isEmpty()) {
+                        remainingResultIndicesMap.remove(request);
+                    }
+                }
+
+                if (remainingResultIndicesMap.isEmpty()) {
+                    return results;
                 }
             } while (i++ < numResultsWait);
 
@@ -310,6 +485,11 @@
         {
             return mQueue.isEmpty();
         }
+
+        public void drain() {
+            mQueue.clear();
+            mNumFramesArrived.getAndSet(0);
+        }
     }
 
     /**
@@ -384,12 +564,12 @@
         return sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
     }
 
-    public static CameraCaptureSession configureReprocessibleCameraSession(CameraDevice camera,
+    public static CameraCaptureSession configureReprocessableCameraSession(CameraDevice camera,
             InputConfiguration inputConfiguration, List<Surface> outputSurfaces,
             CameraCaptureSession.StateCallback listener, Handler handler)
             throws CameraAccessException {
         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
-        camera.createReprocessibleCaptureSession(inputConfiguration, outputSurfaces,
+        camera.createReprocessableCaptureSession(inputConfiguration, outputSurfaces,
                 sessionListener, handler);
 
         return sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
@@ -1085,8 +1265,9 @@
         if (src.getFormat() != dst.getFormat()) {
             throw new IllegalArgumentException("Src and dst images should have the same format");
         }
-        if (src.isOpaque() || dst.isOpaque()) {
-            throw new IllegalArgumentException("Opaque image is not copyable");
+        if (src.getFormat() == ImageFormat.PRIVATE ||
+                dst.getFormat() == ImageFormat.PRIVATE) {
+            throw new IllegalArgumentException("PRIVATE format images are not copyable");
         }
 
         // TODO: check the owner of the dst image, it must be from ImageWriter, other source may
@@ -1113,9 +1294,9 @@
      * Checks whether the two images are strongly equal.
      * </p>
      * <p>
-     * Two images are strongly equal if and only if the data, formats, sizes, and
-     * timestamps are same. For opaque images ({@link Image#isOpaque()} returns
-     * true), the image data is not not accessible thus the data comparison is
+     * Two images are strongly equal if and only if the data, formats, sizes,
+     * and timestamps are same. For {@link ImageFormat#PRIVATE PRIVATE} format
+     * images, the image data is not not accessible thus the data comparison is
      * effectively skipped as the number of planes is zero.
      * </p>
      * <p>
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/DngCreatorTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/DngCreatorTest.java
index 807aa29..a8f1b48 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/DngCreatorTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/DngCreatorTest.java
@@ -19,15 +19,16 @@
 import android.content.Context;
 import android.graphics.Bitmap;
 import android.graphics.BitmapFactory;
-import android.graphics.BitmapRegionDecoder;
 import android.graphics.ImageFormat;
 import android.graphics.Rect;
 import android.graphics.RectF;
+import android.hardware.camera2.CameraCaptureSession;
 import android.hardware.camera2.CameraCharacteristics;
 import android.hardware.camera2.CameraDevice;
 import android.hardware.camera2.CaptureRequest;
 import android.hardware.camera2.CaptureResult;
 import android.hardware.camera2.DngCreator;
+import android.hardware.camera2.TotalCaptureResult;
 import android.hardware.camera2.cts.helpers.StaticMetadata;
 import android.hardware.camera2.cts.rs.BitmapUtils;
 import android.hardware.camera2.cts.rs.RawConverter;
@@ -37,6 +38,7 @@
 import android.media.ExifInterface;
 import android.media.Image;
 import android.media.ImageReader;
+import android.os.ConditionVariable;
 import android.util.Log;
 import android.util.Pair;
 import android.util.Size;
@@ -51,7 +53,6 @@
 import java.util.List;
 
 import static android.hardware.camera2.cts.helpers.AssertHelpers.*;
-import static junit.framework.Assert.assertTrue;
 
 /**
  * Tests for the DngCreator API.
@@ -61,8 +62,9 @@
     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
     private static final String DEBUG_DNG_FILE = "raw16.dng";
 
-    private static final double IMAGE_DIFFERENCE_TOLERANCE = 60;
+    private static final double IMAGE_DIFFERENCE_TOLERANCE = 65;
     private static final int DEFAULT_PATCH_DIMEN = 512;
+    private static final int AE_TIMEOUT_MS = 2000;
 
     @Override
     protected void setUp() throws Exception {
@@ -131,7 +133,7 @@
                 captureReader = createImageReader(activeArraySize, ImageFormat.RAW_SENSOR, 2,
                         captureListener);
                 Pair<Image, CaptureResult> resultPair = captureSingleRawShot(activeArraySize,
-                        captureReader, captureListener);
+                        /*waitForAe*/false, captureReader, captureListener);
                 CameraCharacteristics characteristics = mStaticInfo.getCharacteristics();
 
                 // Test simple writeImage, no header checks
@@ -233,7 +235,7 @@
                 captureListeners.add(previewListener);
 
                 Pair<List<Image>, CaptureResult> resultPair = captureSingleRawShot(activeArraySize,
-                        captureReaders, captureListeners);
+                        captureReaders, /*waitForAe*/false, captureListeners);
                 CameraCharacteristics characteristics = mStaticInfo.getCharacteristics();
 
                 // Test simple writeImage, no header checks
@@ -356,7 +358,7 @@
                 captureListeners.add(jpegListener);
 
                 Pair<List<Image>, CaptureResult> resultPair = captureSingleRawShot(activeArraySize,
-                        captureReaders, captureListeners);
+                        captureReaders, /*waitForAe*/true, captureListeners);
                 CameraCharacteristics characteristics = mStaticInfo.getCharacteristics();
                 Image raw = resultPair.first.get(0);
                 Image jpeg = resultPair.first.get(1);
@@ -369,8 +371,9 @@
                 raw.getPlanes()[0].getBuffer().get(rawPlane);
                 raw.getPlanes()[0].getBuffer().rewind();
                 RawConverter.convertToSRGB(RenderScriptSingleton.getRS(), raw.getWidth(),
-                        raw.getHeight(), rawPlane, characteristics,
-                        resultPair.second, /*offsetX*/0, /*offsetY*/0, /*out*/rawBitmap);
+                        raw.getHeight(), raw.getPlanes()[0].getRowStride(), rawPlane,
+                        characteristics, resultPair.second, /*offsetX*/0, /*offsetY*/0,
+                        /*out*/rawBitmap);
 
                 // Decompress JPEG image to a bitmap
                 byte[] compressedJpegData = CameraTestUtils.getDataFromImage(jpeg);
@@ -494,20 +497,23 @@
         }
     }
 
-    private Pair<Image, CaptureResult> captureSingleRawShot(Size s, ImageReader captureReader,
+    private Pair<Image, CaptureResult> captureSingleRawShot(Size s, boolean waitForAe,
+            ImageReader captureReader,
             CameraTestUtils.SimpleImageReaderListener captureListener) throws Exception {
         List<ImageReader> readers = new ArrayList<ImageReader>();
         readers.add(captureReader);
         List<CameraTestUtils.SimpleImageReaderListener> listeners =
                 new ArrayList<CameraTestUtils.SimpleImageReaderListener>();
         listeners.add(captureListener);
-        Pair<List<Image>, CaptureResult> res = captureSingleRawShot(s, readers, listeners);
+        Pair<List<Image>, CaptureResult> res = captureSingleRawShot(s, readers, waitForAe,
+                listeners);
         return new Pair<Image, CaptureResult>(res.first.get(0), res.second);
     }
 
-    private Pair<List<Image>, CaptureResult> captureSingleRawShot(Size s, List<ImageReader> captureReaders,
+    private Pair<List<Image>, CaptureResult> captureSingleRawShot(Size s,
+            List<ImageReader> captureReaders, boolean waitForAe,
             List<CameraTestUtils.SimpleImageReaderListener> captureListeners) throws Exception {
-        return captureRawShots(s, captureReaders, captureListeners, 1).get(0);
+        return captureRawShots(s, captureReaders, waitForAe, captureListeners, 1).get(0);
     }
 
     /**
@@ -520,8 +526,10 @@
      * @return a list of pairs containing a {@link Image} and {@link CaptureResult} used for
      *          each capture.
      */
-    private List<Pair<List<Image>, CaptureResult>> captureRawShots(Size s, List<ImageReader> captureReaders,
-            List<CameraTestUtils.SimpleImageReaderListener> captureListeners, int numShots) throws Exception {
+    private List<Pair<List<Image>, CaptureResult>> captureRawShots(Size s,
+            List<ImageReader> captureReaders, boolean waitForAe,
+            List<CameraTestUtils.SimpleImageReaderListener> captureListeners,
+            int numShots) throws Exception {
         if (VERBOSE) {
             Log.v(TAG, "captureSingleRawShot - Capturing raw image.");
         }
@@ -540,16 +548,74 @@
         }
         assertTrue("Capture size is supported.", validSize);
 
-
         // Capture images.
-        List<Surface> outputSurfaces = new ArrayList<Surface>();
+        final List<Surface> outputSurfaces = new ArrayList<Surface>();
         for (ImageReader captureReader : captureReaders) {
             Surface captureSurface = captureReader.getSurface();
             outputSurfaces.add(captureSurface);
         }
 
-        CaptureRequest.Builder request = prepareCaptureRequestForSurfaces(outputSurfaces,
-                CameraDevice.TEMPLATE_STILL_CAPTURE);
+        // Set up still capture template targeting JPEG/RAW outputs
+        CaptureRequest.Builder request =
+                mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
+        assertNotNull("Fail to get captureRequest", request);
+        for (Surface surface : outputSurfaces) {
+            request.addTarget(surface);
+        }
+
+        ImageReader previewReader = null;
+        if (waitForAe) {
+            // Also setup a small YUV output for AE metering if needed
+            Size yuvSize = (mOrderedPreviewSizes.size() == 0) ? null :
+                    mOrderedPreviewSizes.get(mOrderedPreviewSizes.size() - 1);
+            assertNotNull("Must support at least one small YUV size.", yuvSize);
+            previewReader = createImageReader(yuvSize, ImageFormat.YUV_420_888,
+                        /*maxNumImages*/2, new CameraTestUtils.ImageDropperListener());
+            outputSurfaces.add(previewReader.getSurface());
+        }
+
+        createSession(outputSurfaces);
+
+        if (waitForAe) {
+            CaptureRequest.Builder precaptureRequest =
+                    mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+            assertNotNull("Fail to get captureRequest", precaptureRequest);
+            precaptureRequest.addTarget(previewReader.getSurface());
+            precaptureRequest.set(CaptureRequest.CONTROL_MODE,
+                    CaptureRequest.CONTROL_MODE_AUTO);
+            precaptureRequest.set(CaptureRequest.CONTROL_AE_MODE,
+                    CaptureRequest.CONTROL_AE_MODE_ON);
+
+            final ConditionVariable waitForAeCondition = new ConditionVariable(/*isOpen*/false);
+            CameraCaptureSession.CaptureCallback captureCallback =
+                    new CameraCaptureSession.CaptureCallback() {
+                @Override
+                public void onCaptureProgressed(CameraCaptureSession session,
+                        CaptureRequest request, CaptureResult partialResult) {
+                    if (partialResult.get(CaptureResult.CONTROL_AE_STATE) ==
+                            CaptureRequest.CONTROL_AE_STATE_CONVERGED) {
+                        waitForAeCondition.open();
+                    }
+                }
+
+                @Override
+                public void onCaptureCompleted(CameraCaptureSession session,
+                        CaptureRequest request, TotalCaptureResult result) {
+                    if (result.get(CaptureResult.CONTROL_AE_STATE) ==
+                            CaptureRequest.CONTROL_AE_STATE_CONVERGED) {
+                        waitForAeCondition.open();
+                    }
+                }
+            };
+            startCapture(precaptureRequest.build(), /*repeating*/true, captureCallback, mHandler);
+
+            precaptureRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
+                    CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
+            startCapture(precaptureRequest.build(), /*repeating*/false, captureCallback, mHandler);
+            assertTrue("Timeout out waiting for AE to converge",
+                    waitForAeCondition.block(AE_TIMEOUT_MS));
+        }
+
         request.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE,
                 CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE_ON);
         CameraTestUtils.SimpleCaptureCallback resultListener =
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/ExtendedCameraCharacteristicsTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/ExtendedCameraCharacteristicsTest.java
index 00a5d66..f504b44 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/ExtendedCameraCharacteristicsTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/ExtendedCameraCharacteristicsTest.java
@@ -87,7 +87,7 @@
     private static final int YUV_REPROCESS =
             CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING;
     private static final int OPAQUE_REPROCESS =
-            CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_OPAQUE_REPROCESSING;
+            CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING;
 
     @Override
     public void setContext(Context context) {
@@ -491,7 +491,7 @@
             boolean supportYUV = arrayContains(capabilities,
                     CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
             boolean supportOpaque = arrayContains(capabilities,
-                    CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_OPAQUE_REPROCESSING);
+                    CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
             StreamConfigurationMap configs =
                     c.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
             Integer maxNumInputStreams =
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/ImageWriterTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/ImageWriterTest.java
index a4d611d..b081660 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/ImageWriterTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/ImageWriterTest.java
@@ -28,7 +28,6 @@
 import android.media.Image.Plane;
 import android.media.ImageReader;
 import android.media.ImageWriter;
-import android.os.ConditionVariable;
 import android.util.Log;
 import android.util.Size;
 import android.view.Surface;
@@ -51,7 +50,7 @@
     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
     // Max number of images can be accessed simultaneously from ImageReader.
     private static final int MAX_NUM_IMAGES = 3;
-    private static final int CAMERA_OPAQUE_FORMAT = ImageFormat.PRIVATE;
+    private static final int CAMERA_PRIVATE_FORMAT = ImageFormat.PRIVATE;
     private ImageReader mReaderForWriter;
     private ImageWriter mWriter;
 
@@ -125,34 +124,13 @@
             try {
                 Log.i(TAG, "Testing Camera " + id);
                 openDevice(id);
-                readerWriterFormatTestByCamera(CAMERA_OPAQUE_FORMAT);
+                readerWriterFormatTestByCamera(CAMERA_PRIVATE_FORMAT);
             } finally {
                 closeDevice(id);
             }
         }
     }
 
-    private final class SimpleImageWriterListener implements ImageWriter.ImageListener {
-        private final ConditionVariable imageReleased = new ConditionVariable();
-        @Override
-        public void onInputImageReleased(ImageWriter writer) {
-            if (writer != mWriter) {
-                return;
-            }
-
-            if (VERBOSE) Log.v(TAG, "Input image is released");
-            imageReleased.open();
-        }
-
-        public void waitForImageReleassed(long timeoutMs) {
-            if (imageReleased.block(timeoutMs)) {
-                imageReleased.close();
-            } else {
-                fail("wait for image available timed out after " + timeoutMs + "ms");
-            }
-        }
-    }
-
     private void readerWriterFormatTestByCamera(int format)  throws Exception {
         List<Size> sizes = getSortedSizesForFormat(mCamera.getId(), mCameraManager, format, null);
         Size maxSize = sizes.get(0);
@@ -178,8 +156,8 @@
         Surface surface = mReaderForWriter.getSurface();
         assertNotNull("Surface from ImageReader shouldn't be null", surface);
         mWriter = ImageWriter.newInstance(surface, MAX_NUM_IMAGES);
-        SimpleImageWriterListener writerImageListener = new SimpleImageWriterListener();
-        mWriter.setImageListener(writerImageListener, mHandler);
+        SimpleImageWriterListener writerImageListener = new SimpleImageWriterListener(mWriter);
+        mWriter.setOnImageReleasedListener(writerImageListener, mHandler);
 
         // Start capture: capture 2 images.
         List<Surface> outputSurfaces = new ArrayList<Surface>();
@@ -201,17 +179,19 @@
         Image inputImage = null;
         // Image from the second ImageReader.
         Image outputImage = null;
-        if (format == CAMERA_OPAQUE_FORMAT) {
-            assertTrue("First ImageReader should be opaque",
-                    mReader.isOpaque());
-            assertTrue("Second ImageReader should be opaque",
-                    mReaderForWriter.isOpaque());
-            assertTrue("Format of first ImageReader should be opaque",
-                    mReader.getImageFormat() == CAMERA_OPAQUE_FORMAT);
-            assertTrue(" Format of second ImageReader should be opaque",
-                    mReaderForWriter.getImageFormat() == CAMERA_OPAQUE_FORMAT);
-            assertTrue(" Format of ImageWriter should be opaque",
-                    mWriter.getFormat() == CAMERA_OPAQUE_FORMAT);
+        assertTrue("ImageWriter max images should be " + MAX_NUM_IMAGES,
+                mWriter.getMaxImages() == MAX_NUM_IMAGES);
+        if (format == CAMERA_PRIVATE_FORMAT) {
+            assertTrue("First ImageReader format should be PRIVATE",
+                    mReader.getImageFormat() == CAMERA_PRIVATE_FORMAT);
+            assertTrue("Second ImageReader should be PRIVATE",
+                    mReaderForWriter.getImageFormat() == CAMERA_PRIVATE_FORMAT);
+            assertTrue("Format of first ImageReader should be PRIVATE",
+                    mReader.getImageFormat() == CAMERA_PRIVATE_FORMAT);
+            assertTrue(" Format of second ImageReader should be PRIVATE",
+                    mReaderForWriter.getImageFormat() == CAMERA_PRIVATE_FORMAT);
+            assertTrue(" Format of ImageWriter should be PRIVATE",
+                    mWriter.getFormat() == CAMERA_PRIVATE_FORMAT);
 
             // Validate 2 images
             validateOpaqueImages(maxSize, listenerForCamera, listenerForWriter, captureListener,
@@ -255,7 +235,7 @@
             outputImage.close();
 
             // Make sure ImageWriter listener callback is fired.
-            writerImageListener.waitForImageReleassed(CAPTURE_IMAGE_TIMEOUT_MS);
+            writerImageListener.waitForImageReleased(CAPTURE_IMAGE_TIMEOUT_MS);
 
             // Test case 2: Directly inject the image into ImageWriter: works for all formats.
 
@@ -289,7 +269,7 @@
             outputImage.close();
 
             // Make sure ImageWriter listener callback is fired.
-            writerImageListener.waitForImageReleassed(CAPTURE_IMAGE_TIMEOUT_MS);
+            writerImageListener.waitForImageReleased(CAPTURE_IMAGE_TIMEOUT_MS);
         }
 
         stopCapture(/*fast*/false);
@@ -319,24 +299,16 @@
             validateOpaqueImage(outputImage, "First Opaque image output by ImageWriter: ",
                     maxSize, result);
             outputImage.close();
-            writerListener.waitForImageReleassed(CAPTURE_IMAGE_TIMEOUT_MS);
+            writerListener.waitForImageReleased(CAPTURE_IMAGE_TIMEOUT_MS);
         }
     }
 
     private void validateOpaqueImage(Image image, String msg, Size imageSize,
             CaptureResult result) {
         assertNotNull("Opaque image Capture result should not be null", result != null);
-        mCollector.expectTrue(msg + "Opaque image format should be: " + CAMERA_OPAQUE_FORMAT,
-                image.getFormat() == CAMERA_OPAQUE_FORMAT);
-        mCollector.expectTrue(msg + "Opaque image format should be: " + CAMERA_OPAQUE_FORMAT,
-                image.getFormat() == CAMERA_OPAQUE_FORMAT);
+        mCollector.expectImageProperties(msg + "Opaque ", image, CAMERA_PRIVATE_FORMAT,
+                imageSize, result.get(CaptureResult.SENSOR_TIMESTAMP));
         mCollector.expectTrue(msg + "Opaque image number planes should be zero",
                 image.getPlanes().length == 0);
-        mCollector.expectTrue(msg + "Opaque image size should be " + imageSize,
-                image.getWidth() == imageSize.getWidth() &&
-                image.getHeight() == imageSize.getHeight());
-        long timestampNs = result.get(CaptureResult.SENSOR_TIMESTAMP);
-        mCollector.expectTrue(msg + "Opaque image timestamp should be " + timestampNs,
-                image.getTimestamp() == timestampNs);
     }
 }
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/PerformanceTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/PerformanceTest.java
index d4a0e73..3f54a39 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/PerformanceTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/PerformanceTest.java
@@ -19,16 +19,19 @@
 import static com.android.ex.camera2.blocking.BlockingSessionCallback.*;
 
 import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraAccessException;
 import android.hardware.camera2.CameraCaptureSession;
 import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
 import android.hardware.camera2.CameraDevice;
 import android.hardware.camera2.CaptureRequest;
 import android.hardware.camera2.CaptureResult;
 import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
 import android.hardware.camera2.cts.CameraTestUtils.SimpleImageReaderListener;
 import android.hardware.camera2.cts.helpers.StaticMetadata;
 import android.hardware.camera2.cts.helpers.StaticMetadata.CheckLevel;
 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase;
+import android.hardware.camera2.params.InputConfiguration;
 import android.util.Log;
 import android.util.Pair;
 import android.util.Size;
@@ -36,10 +39,10 @@
 import android.cts.util.DeviceReportLog;
 import android.media.Image;
 import android.media.ImageReader;
+import android.media.ImageWriter;
 import android.os.ConditionVariable;
 import android.os.SystemClock;
 
-import com.android.cts.util.ReportLog;
 import com.android.cts.util.ResultType;
 import com.android.cts.util.ResultUnit;
 import com.android.cts.util.Stat;
@@ -47,6 +50,7 @@
 import com.android.ex.camera2.exceptions.TimeoutRuntimeException;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.TimeUnit;
@@ -61,9 +65,28 @@
     private static final int NUM_TEST_LOOPS = 5;
     private static final int NUM_MAX_IMAGES = 4;
     private static final int NUM_RESULTS_WAIT = 30;
+    private static final int[] REPROCESS_FORMATS = {ImageFormat.YUV_420_888, ImageFormat.PRIVATE};
+    private final int MAX_REPROCESS_IMAGES = 10;
+    private final int MAX_JPEG_IMAGES = MAX_REPROCESS_IMAGES;
+    private final int MAX_INPUT_IMAGES = MAX_REPROCESS_IMAGES;
+    // ZSL queue depth should be bigger than the max simultaneous reprocessing capture request
+    // count to maintain reasonable number of candidate image for the worse-case.
+    // Here we want to make sure we at most dequeue half of the queue max images for the worst-case.
+    private final int MAX_ZSL_IMAGES = MAX_REPROCESS_IMAGES * 2;
+    private final double REPROCESS_STALL_MARGIN = 0.1;
 
     private DeviceReportLog mReportLog;
 
+    // Used for reading camera output buffers.
+    private ImageReader mCameraZslReader;
+    private SimpleImageReaderListener mCameraZslImageListener;
+    // Used for reprocessing (jpeg) output.
+    private ImageReader mJpegReader;
+    private SimpleImageReaderListener mJpegListener;
+    // Used for reprocessing input.
+    private ImageWriter mWriter;
+    private SimpleCaptureCallback mZslResultListener;
+
     @Override
     protected void setUp() throws Exception {
         mReportLog = new DeviceReportLog();
@@ -121,8 +144,8 @@
                         configureStreamTimes[i] = configureTimeMs - openTimeMs;
 
                         // Blocking start preview (start preview to first image arrives)
-                        CameraTestUtils.SimpleCaptureCallback resultListener =
-                                new CameraTestUtils.SimpleCaptureCallback();
+                        SimpleCaptureCallback resultListener =
+                                new SimpleCaptureCallback();
                         blockingStartPreview(resultListener, imageListener);
                         previewStartedTimeMs = SystemClock.elapsedRealtime();
                         startPreviewTimes[i] = previewStartedTimeMs - configureTimeMs;
@@ -205,8 +228,8 @@
                             mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
                     CaptureRequest.Builder captureBuilder =
                             mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
-                    CameraTestUtils.SimpleCaptureCallback previewResultListener =
-                            new CameraTestUtils.SimpleCaptureCallback();
+                    SimpleCaptureCallback previewResultListener =
+                            new SimpleCaptureCallback();
                     SimpleTimingResultListener captureResultListener =
                             new SimpleTimingResultListener();
                     SimpleImageListener imageListener = new SimpleImageListener();
@@ -252,7 +275,7 @@
                     // simulate real scenario (preview runs a bit)
                     waitForNumResults(previewResultListener, NUM_RESULTS_WAIT);
 
-                    stopPreview();
+                    blockingStopPreview();
 
                 }
                 mReportLog.printArray("Camera " + id
@@ -267,6 +290,11 @@
                 mReportLog.printArray("Camera " + id
                         + ": Camera capture result latency", getResultTimes,
                         ResultType.LOWER_BETTER, ResultUnit.MS);
+
+                // Result will not be reported in CTS report if no summary is printed.
+                mReportLog.printSummary("Camera capture result average latency for Camera " + id,
+                        Stat.getAverage(getResultTimes),
+                        ResultType.LOWER_BETTER, ResultUnit.MS);
             }
             finally {
                 closeImageReader();
@@ -275,6 +303,338 @@
         }
     }
 
+    /**
+     * Test reprocessing shot-to-shot latency, i.e., from the time a reprocess
+     * request is issued to the time the reprocess image is returned.
+     *
+     */
+    public void testReprocessingLatency() throws Exception {
+        for (String id : mCameraIds) {
+            for (int format : REPROCESS_FORMATS) {
+                if (!isReprocessSupported(id, format)) {
+                    continue;
+                }
+
+                try {
+                    openDevice(id);
+
+                    reprocessingPerformanceTestByCamera(format, /*asyncMode*/false);
+                } finally {
+                    closeReaderWriters();
+                    closeDevice();
+                }
+            }
+        }
+    }
+
+    /**
+     * Test reprocessing throughput, i.e., how many frames can be reprocessed
+     * during a given amount of time.
+     *
+     */
+    public void testReprocessingThroughput() throws Exception {
+        for (String id : mCameraIds) {
+            for (int format : REPROCESS_FORMATS) {
+                if (!isReprocessSupported(id, format)) {
+                    continue;
+                }
+
+                try {
+                    openDevice(id);
+
+                    reprocessingPerformanceTestByCamera(format, /*asyncMode*/true);
+                } finally {
+                    closeReaderWriters();
+                    closeDevice();
+                }
+            }
+        }
+    }
+
+    /**
+     * Testing reprocessing caused preview stall (frame drops)
+     */
+    public void testReprocessingCaptureStall() throws Exception {
+        for (String id : mCameraIds) {
+            for (int format : REPROCESS_FORMATS) {
+                if (!isReprocessSupported(id, format)) {
+                    continue;
+                }
+
+                try {
+                    openDevice(id);
+
+                    reprocessingCaptureStallTestByCamera(format);
+                } finally {
+                    closeReaderWriters();
+                    closeDevice();
+                }
+            }
+        }
+    }
+
+    private void reprocessingCaptureStallTestByCamera(int reprocessInputFormat) throws Exception {
+        prepareReprocessCapture(reprocessInputFormat);
+
+        // Let it stream for a while before reprocessing
+        startZslStreaming();
+        waitForFrames(NUM_RESULTS_WAIT);
+
+        final int NUM_REPROCESS_TESTED = MAX_REPROCESS_IMAGES / 2;
+        // Prepare several reprocessing request
+        Image[] inputImages = new Image[NUM_REPROCESS_TESTED];
+        CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES];
+        for (int i = 0; i < NUM_REPROCESS_TESTED; i++) {
+            inputImages[i] =
+                    mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
+            TotalCaptureResult zslResult =
+                    mZslResultListener.getCaptureResult(
+                            WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp());
+            reprocessReqs[i] = mCamera.createReprocessCaptureRequest(zslResult);
+            reprocessReqs[i].addTarget(mJpegReader.getSurface());
+            mWriter.queueInputImage(inputImages[i]);
+        }
+
+        double[] maxCaptureGapsMs = new double[NUM_REPROCESS_TESTED];
+        double[] averageFrameDurationMs = new double[NUM_REPROCESS_TESTED];
+        Arrays.fill(averageFrameDurationMs, 0.0);
+        final int MAX_REPROCESS_RETURN_FRAME_COUNT = 20;
+        SimpleCaptureCallback reprocessResultListener = new SimpleCaptureCallback();
+        for (int i = 0; i < NUM_REPROCESS_TESTED; i++) {
+            mZslResultListener.drain();
+            CaptureRequest reprocessRequest = reprocessReqs[i].build();
+            mSession.capture(reprocessRequest, reprocessResultListener, mHandler);
+            // Wait for reprocess output jpeg and result come back.
+            reprocessResultListener.getCaptureResultForRequest(reprocessRequest,
+                    CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
+            mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
+            long numFramesMaybeStalled = mZslResultListener.getTotalNumFrames();
+            assertTrue("Reprocess capture result should be returned in "
+                    + MAX_REPROCESS_RETURN_FRAME_COUNT + " frames",
+                    numFramesMaybeStalled <= MAX_REPROCESS_RETURN_FRAME_COUNT);
+
+            // Need look longer time, as the stutter could happen after the reprocessing
+            // output frame is received.
+            long[] timestampGap = new long[MAX_REPROCESS_RETURN_FRAME_COUNT + 1];
+            Arrays.fill(timestampGap, 0);
+            CaptureResult[] results = new CaptureResult[timestampGap.length];
+            long[] frameDurationsNs = new long[timestampGap.length];
+            for (int j = 0; j < results.length; j++) {
+                results[j] = mZslResultListener.getCaptureResult(
+                        CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
+                if (j > 0) {
+                    timestampGap[j] = results[j].get(CaptureResult.SENSOR_TIMESTAMP) -
+                            results[j - 1].get(CaptureResult.SENSOR_TIMESTAMP);
+                    assertTrue("Time stamp should be monotonically increasing",
+                            timestampGap[j] > 0);
+                }
+                frameDurationsNs[j] = results[j].get(CaptureResult.SENSOR_FRAME_DURATION);
+            }
+
+            if (VERBOSE) {
+                Log.i(TAG, "timestampGap: " + Arrays.toString(timestampGap));
+                Log.i(TAG, "frameDurationsNs: " + Arrays.toString(frameDurationsNs));
+            }
+
+            // Get the number of candidate results, calculate the average frame duration
+            // and max timestamp gap.
+            Arrays.sort(timestampGap);
+            double maxTimestampGapMs = timestampGap[timestampGap.length - 1] / 1000000.0;
+            for (int m = 0; m < frameDurationsNs.length; m++) {
+                averageFrameDurationMs[i] += (frameDurationsNs[m] / 1000000.0);
+            }
+            averageFrameDurationMs[i] /= frameDurationsNs.length;
+
+            maxCaptureGapsMs[i] = maxTimestampGapMs;
+        }
+
+        String reprocessType = " YUV reprocessing ";
+        if (reprocessInputFormat == ImageFormat.PRIVATE) {
+            reprocessType = " opaque reprocessing ";
+        }
+
+        mReportLog.printArray("Camera " + mCamera.getId()
+                + ":" + reprocessType + " max capture timestamp gaps", maxCaptureGapsMs,
+                ResultType.LOWER_BETTER, ResultUnit.MS);
+        mReportLog.printArray("Camera " + mCamera.getId()
+                + ":" + reprocessType + "capture average frame duration", averageFrameDurationMs,
+                ResultType.LOWER_BETTER, ResultUnit.MS);
+        mReportLog.printSummary("Camera reprocessing average max capture timestamp gaps for Camera "
+                + mCamera.getId(), Stat.getAverage(maxCaptureGapsMs), ResultType.LOWER_BETTER,
+                ResultUnit.MS);
+
+        // The max timestamp gap should be less than (captureStall + 1) x average frame
+        // duration * (1 + error margin).
+        int maxCaptureStallFrames = mStaticInfo.getMaxCaptureStallOrDefault();
+        for (int i = 0; i < maxCaptureGapsMs.length; i++) {
+            double stallDurationBound = averageFrameDurationMs[i] *
+                    (maxCaptureStallFrames + 1) * (1 + REPROCESS_STALL_MARGIN);
+            assertTrue("max capture stall duration should be no larger than ",
+                    maxCaptureGapsMs[i] <= stallDurationBound);
+        }
+    }
+
+    private void reprocessingPerformanceTestByCamera(int reprocessInputFormat, boolean asyncMode)
+            throws Exception {
+        // Prepare the reprocessing capture
+        prepareReprocessCapture(reprocessInputFormat);
+
+        // Start ZSL streaming
+        startZslStreaming();
+        waitForFrames(NUM_RESULTS_WAIT);
+
+        CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES];
+        Image[] inputImages = new Image[MAX_REPROCESS_IMAGES];
+        double[] getImageLatenciesMs = new double[MAX_REPROCESS_IMAGES];
+        long startTimeMs;
+        for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
+            inputImages[i] =
+                    mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
+            TotalCaptureResult zslResult =
+                    mZslResultListener.getCaptureResult(
+                            WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp());
+            reprocessReqs[i] = mCamera.createReprocessCaptureRequest(zslResult);
+            reprocessReqs[i].addTarget(mJpegReader.getSurface());
+        }
+
+        if (asyncMode) {
+            // async capture: issue all the reprocess requests as quick as possible, then
+            // check the throughput of the output jpegs.
+            for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
+                // Could be slow for YUV reprocessing, do it in advance.
+                mWriter.queueInputImage(inputImages[i]);
+            }
+
+            // Submit the requests
+            for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
+                mSession.capture(reprocessReqs[i].build(), null, null);
+            }
+
+            // Get images
+            startTimeMs = SystemClock.elapsedRealtime();
+            for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
+                mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
+                getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs;
+                startTimeMs = SystemClock.elapsedRealtime();
+            }
+        } else {
+            // sync capture: issue reprocess request one by one, only submit next one when
+            // the previous capture image is returned. This is to test the back to back capture
+            // performance.
+            for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
+                startTimeMs = SystemClock.elapsedRealtime();
+                mWriter.queueInputImage(inputImages[i]);
+                mSession.capture(reprocessReqs[i].build(), null, null);
+                mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
+                getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs;
+            }
+        }
+
+        String reprocessType = " YUV reprocessing ";
+        if (reprocessInputFormat == ImageFormat.PRIVATE) {
+            reprocessType = " opaque reprocessing ";
+        }
+
+        // Report the performance data
+        if (asyncMode) {
+            mReportLog.printArray("Camera " + mCamera.getId()
+                    + ":" + reprocessType + "capture latency", getImageLatenciesMs,
+                    ResultType.LOWER_BETTER, ResultUnit.MS);
+            mReportLog.printSummary("Camera reprocessing average latency for Camera " +
+                    mCamera.getId(), Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER,
+                    ResultUnit.MS);
+        } else {
+            mReportLog.printArray("Camera " + mCamera.getId()
+                    + ":" + reprocessType + "shot to shot latency", getImageLatenciesMs,
+                    ResultType.LOWER_BETTER, ResultUnit.MS);
+            mReportLog.printSummary("Camera reprocessing shot to shot average latency for Camera " +
+                    mCamera.getId(), Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER,
+                    ResultUnit.MS);
+        }
+    }
+
+    /**
+     * Start preview and ZSL streaming
+     */
+    private void startZslStreaming() throws Exception {
+        CaptureRequest.Builder zslBuilder =
+                mCamera.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
+        zslBuilder.addTarget(mPreviewSurface);
+        zslBuilder.addTarget(mCameraZslReader.getSurface());
+        mSession.setRepeatingRequest(zslBuilder.build(), mZslResultListener, mHandler);
+    }
+
+    /**
+     * Wait for a certain number of frames, the images and results will be drained from the
+     * listeners to make sure that next reprocessing can get matched results and images.
+     *
+     * @param numFrameWait The number of frames to wait before return, 0 means that
+     *      this call returns immediately after streaming on.
+     */
+    private void waitForFrames(int numFrameWait) {
+        if (numFrameWait < 0) {
+            throw new IllegalArgumentException("numFrameWait " + numFrameWait +
+                    " should be non-negative");
+        }
+
+        if (numFrameWait == 0) {
+            // Let is stream out for a while
+            waitForNumResults(mZslResultListener, numFrameWait);
+            // Drain the pending images, to ensure that all future images have an associated
+            // capture result available.
+            mCameraZslImageListener.drain();
+        }
+    }
+
+    private void closeReaderWriters() {
+        CameraTestUtils.closeImageReader(mCameraZslReader);
+        mCameraZslReader = null;
+        CameraTestUtils.closeImageReader(mJpegReader);
+        mJpegReader = null;
+        CameraTestUtils.closeImageWriter(mWriter);
+        mWriter = null;
+    }
+
+    private void prepareReprocessCapture(int inputFormat)
+                    throws CameraAccessException {
+        // 1. Find the right preview and capture sizes.
+        Size maxPreviewSize = mOrderedPreviewSizes.get(0);
+        Size[] supportedInputSizes =
+                mStaticInfo.getAvailableSizesForFormatChecked(inputFormat,
+                StaticMetadata.StreamDirection.Input);
+        Size maxInputSize = CameraTestUtils.getMaxSize(supportedInputSizes);
+        Size maxJpegSize = mOrderedStillSizes.get(0);
+        updatePreviewSurface(maxPreviewSize);
+        mZslResultListener = new SimpleCaptureCallback();
+
+        // 2. Create camera output ImageReaders.
+        // YUV/Opaque output, camera should support output with input size/format
+        mCameraZslImageListener = new SimpleImageReaderListener(
+                /*asyncMode*/true, MAX_ZSL_IMAGES / 2);
+        mCameraZslReader = CameraTestUtils.makeImageReader(
+                maxInputSize, inputFormat, MAX_ZSL_IMAGES, mCameraZslImageListener, mHandler);
+        // Jpeg reprocess output
+        mJpegListener = new SimpleImageReaderListener();
+        mJpegReader = CameraTestUtils.makeImageReader(
+                maxJpegSize, ImageFormat.JPEG, MAX_JPEG_IMAGES, mJpegListener, mHandler);
+
+        // create camera reprocess session
+        List<Surface> outSurfaces = new ArrayList<Surface>();
+        outSurfaces.add(mPreviewSurface);
+        outSurfaces.add(mCameraZslReader.getSurface());
+        outSurfaces.add(mJpegReader.getSurface());
+        InputConfiguration inputConfig = new InputConfiguration(maxInputSize.getWidth(),
+                maxInputSize.getHeight(), inputFormat);
+        mSessionListener = new BlockingSessionCallback();
+        mSession = CameraTestUtils.configureReprocessableCameraSession(
+                mCamera, inputConfig, outSurfaces, mSessionListener, mHandler);
+
+        // 3. Create ImageWriter for input
+        mWriter = CameraTestUtils.makeImageWriter(
+                mSession.getInputSurface(), MAX_INPUT_IMAGES, /*listener*/null, /*handler*/null);
+
+    }
+
     private void blockingStopPreview() throws Exception {
         stopPreview();
         mSessionListener.getStateWaiter().waitForState(SESSION_CLOSED,
@@ -295,19 +655,6 @@
         imageListener.waitForImageAvailable(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
     }
 
-    private void blockingCaptureImage(CaptureCallback listener,
-            SimpleImageListener imageListener) throws Exception {
-        if (mReaderSurface == null) {
-            throw new IllegalStateException("reader surface must be initialized first");
-        }
-
-        CaptureRequest.Builder captureBuilder =
-                mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
-        captureBuilder.addTarget(mReaderSurface);
-        mSession.capture(captureBuilder.build(), listener, mHandler);
-        imageListener.waitForImageAvailable(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
-    }
-
     /**
      * Configure reader and preview outputs and wait until done.
      */
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/ReprocessCaptureTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/ReprocessCaptureTest.java
index 30c3526..945bb4c 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/ReprocessCaptureTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/ReprocessCaptureTest.java
@@ -25,6 +25,7 @@
 import android.hardware.camera2.CameraCharacteristics;
 import android.hardware.camera2.CameraDevice;
 import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
 import android.hardware.camera2.TotalCaptureResult;
 import android.hardware.camera2.cts.helpers.StaticMetadata;
 import android.hardware.camera2.cts.helpers.StaticMetadata.CheckLevel;
@@ -48,8 +49,6 @@
     private static final String TAG = "ReprocessCaptureTest";
     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
-    private static final int MAX_NUM_IMAGE_READER_IMAGES = 3;
-    private static final int MAX_NUM_IMAGE_WRITER_IMAGES = 3;
     private static final int CAPTURE_TIMEOUT_FRAMES = 100;
     private static final int CAPTURE_TIMEOUT_MS = 3000;
     private static final int WAIT_FOR_SURFACE_CHANGE_TIMEOUT_MS = 1000;
@@ -57,16 +56,27 @@
     private static final int PREVIEW_TEMPLATE = CameraDevice.TEMPLATE_PREVIEW;
     private static final int NUM_REPROCESS_TEST_LOOP = 3;
     private static final int NUM_REPROCESS_CAPTURES = 3;
+    private static final int NUM_REPROCESS_BURST = 3;
     private int mDumpFrameCount = 0;
 
     // The image reader for the first regular capture
     private ImageReader mFirstImageReader;
     // The image reader for the reprocess capture
     private ImageReader mSecondImageReader;
+    // A flag indicating whether the regular capture and the reprocess capture share the same image
+    // reader. If it's true, mFirstImageReader should be used for regular and reprocess outputs.
+    private boolean mShareOneImageReader;
     private SimpleImageReaderListener mFirstImageReaderListener;
     private SimpleImageReaderListener mSecondImageReaderListener;
     private Surface mInputSurface;
     private ImageWriter mImageWriter;
+    private SimpleImageWriterListener mImageWriterListener;
+
+    private enum CaptureTestCase {
+        SINGLE_SHOT,
+        BURST,
+        MIXED_BURST
+    }
 
     /**
      * Test YUV_420_888 -> YUV_420_888 with maximal supported sizes
@@ -137,7 +147,8 @@
                 // open Camera device
                 openDevice(id);
                 // no preview
-                testReprocessingAllCombinations(id, null);
+                testReprocessingAllCombinations(id, /*previewSize*/null,
+                        CaptureTestCase.SINGLE_SHOT);
             } finally {
                 closeDevice();
             }
@@ -156,7 +167,8 @@
             try {
                 // open Camera device
                 openDevice(id);
-                testReprocessingAllCombinations(id, mOrderedPreviewSizes.get(0));
+                testReprocessingAllCombinations(id, mOrderedPreviewSizes.get(0),
+                        CaptureTestCase.SINGLE_SHOT);
             } finally {
                 closeDevice();
             }
@@ -234,8 +246,8 @@
                 }
 
                 setupImageReaders(inputSize, inputFormat, reprocessOutputSize,
-                        reprocessOutputFormat);
-                setupReprocessibleSession(/*previewSurface*/null);
+                        reprocessOutputFormat, /*maxImages*/1);
+                setupReprocessableSession(/*previewSurface*/null, /*numImageWriterImages*/1);
 
                 TotalCaptureResult result = submitCaptureRequest(mFirstImageReader.getSurface(),
                         /*inputResult*/null);
@@ -246,11 +258,17 @@
 
                 // recreate the session
                 closeReprossibleSession();
-                setupReprocessibleSession(/*previewSurface*/null);
+                setupReprocessableSession(/*previewSurface*/null, /*numImageWriterImages*/1);
                 try {
+                    TotalCaptureResult reprocessResult;
                     // issue and wait on reprocess capture request
-                    TotalCaptureResult reprocessResult =
-                            submitCaptureRequest(mSecondImageReader.getSurface(), result);
+                    if (mShareOneImageReader) {
+                        reprocessResult =
+                                submitCaptureRequest(mFirstImageReader.getSurface(), result);
+                    } else {
+                        reprocessResult =
+                                submitCaptureRequest(mSecondImageReader.getSurface(), result);
+                    }
                     fail("Camera " + id + ": should get IllegalArgumentException for cross " +
                             "session reprocess captrue.");
                 } catch (IllegalArgumentException e) {
@@ -268,8 +286,52 @@
         }
     }
 
-    // todo: test aborting reprocessing captures.
-    // todo: test burst reprocessing captures.
+    /**
+     * Test burst reprocessing captures with and without preview.
+     */
+    public void testBurstReprocessing() throws Exception {
+        for (String id : mCameraIds) {
+            if (!isYuvReprocessSupported(id) && !isOpaqueReprocessSupported(id)) {
+                continue;
+            }
+
+            try {
+                // open Camera device
+                openDevice(id);
+                // no preview
+                testReprocessingAllCombinations(id, /*previewSize*/null, CaptureTestCase.BURST);
+                // with preview
+                testReprocessingAllCombinations(id, mOrderedPreviewSizes.get(0),
+                        CaptureTestCase.BURST);
+            } finally {
+                closeDevice();
+            }
+        }
+    }
+
+    /**
+     * Test burst captures mixed with regular and reprocess captures with and without preview.
+     */
+    public void testMixedBurstReprocessing() throws Exception {
+        for (String id : mCameraIds) {
+            if (!isYuvReprocessSupported(id) && !isOpaqueReprocessSupported(id)) {
+                continue;
+            }
+
+            try {
+                // open Camera device
+                openDevice(id);
+                // no preview
+                testReprocessingAllCombinations(id, /*previewSize*/null,
+                        CaptureTestCase.MIXED_BURST);
+                // with preview
+                testReprocessingAllCombinations(id, mOrderedPreviewSizes.get(0),
+                        CaptureTestCase.MIXED_BURST);
+            } finally {
+                closeDevice();
+            }
+        }
+    }
 
     /**
      * Test the input format and output format with the largest input and output sizes.
@@ -294,8 +356,8 @@
     /**
      * Test all input format, input size, output format, and output size combinations.
      */
-    private void testReprocessingAllCombinations(String cameraId,
-            Size previewSize) throws Exception {
+    private void testReprocessingAllCombinations(String cameraId, Size previewSize,
+            CaptureTestCase captureTestCase) throws Exception {
 
         int[] supportedInputFormats =
                 mStaticInfo.getAvailableFormats(StaticMetadata.StreamDirection.Input);
@@ -314,15 +376,165 @@
                             StaticMetadata.StreamDirection.Output);
 
                     for (Size reprocessOutputSize : supportedReprocessOutputSizes) {
-                        testReprocess(cameraId, inputSize, inputFormat,
-                                reprocessOutputSize, reprocessOutputFormat, previewSize,
-                                NUM_REPROCESS_CAPTURES);
+                        switch (captureTestCase) {
+                            case SINGLE_SHOT:
+                                testReprocess(cameraId, inputSize, inputFormat,
+                                        reprocessOutputSize, reprocessOutputFormat, previewSize,
+                                        NUM_REPROCESS_CAPTURES);
+                                break;
+                            case BURST:
+                                testReprocessBurst(cameraId, inputSize, inputFormat,
+                                        reprocessOutputSize, reprocessOutputFormat, previewSize,
+                                        NUM_REPROCESS_BURST);
+                                break;
+                            case MIXED_BURST:
+                                testReprocessMixedBurst(cameraId, inputSize, inputFormat,
+                                        reprocessOutputSize, reprocessOutputFormat, previewSize,
+                                        NUM_REPROCESS_BURST);
+                                break;
+                            default:
+                                throw new IllegalArgumentException("Invalid capture type");
+                        }
                     }
                 }
             }
         }
     }
 
+    /**
+     * Test burst that is mixed with regular and reprocess capture requests.
+     */
+    private void testReprocessMixedBurst(String cameraId, Size inputSize, int inputFormat,
+            Size reprocessOutputSize, int reprocessOutputFormat, Size previewSize,
+            int numBurst) throws Exception {
+        if (VERBOSE) {
+            Log.v(TAG, "testReprocessMixedBurst: cameraId: " + cameraId + " inputSize: " +
+                    inputSize + " inputFormat: " + inputFormat + " reprocessOutputSize: " +
+                    reprocessOutputSize + " reprocessOutputFormat: " + reprocessOutputFormat +
+                    " previewSize: " + previewSize + " numBurst: " + numBurst);
+        }
+
+        boolean enablePreview = (previewSize != null);
+        ImageResultHolder[] imageResultHolders = new ImageResultHolder[0];
+
+        try {
+            // totalNumBurst = number of regular burst + number of reprocess burst.
+            int totalNumBurst = numBurst * 2;
+
+            if (enablePreview) {
+                updatePreviewSurface(previewSize);
+            } else {
+                mPreviewSurface = null;
+            }
+
+            setupImageReaders(inputSize, inputFormat, reprocessOutputSize, reprocessOutputFormat,
+                totalNumBurst);
+            setupReprocessableSession(mPreviewSurface, /*numImageWriterImages*/numBurst);
+
+            if (enablePreview) {
+                startPreview(mPreviewSurface);
+            }
+
+            // Prepare an array of booleans indicating each capture's type (regular or reprocess)
+            boolean[] isReprocessCaptures = new boolean[totalNumBurst];
+            for (int i = 0; i < totalNumBurst; i++) {
+                if ((i & 1) == 0) {
+                    isReprocessCaptures[i] = true;
+                } else {
+                    isReprocessCaptures[i] = false;
+                }
+            }
+
+            imageResultHolders = doMixedReprocessBurstCapture(isReprocessCaptures);
+            for (ImageResultHolder holder : imageResultHolders) {
+                Image reprocessedImage = holder.getImage();
+                TotalCaptureResult result = holder.getTotalCaptureResult();
+
+                mCollector.expectImageProperties("testReprocessMixedBurst", reprocessedImage,
+                            reprocessOutputFormat, reprocessOutputSize,
+                            result.get(CaptureResult.SENSOR_TIMESTAMP));
+
+                if (DEBUG) {
+                    Log.d(TAG, String.format("camera %s in %dx%d %d out %dx%d %d",
+                            cameraId, inputSize.getWidth(), inputSize.getHeight(), inputFormat,
+                            reprocessOutputSize.getWidth(), reprocessOutputSize.getHeight(),
+                            reprocessOutputFormat));
+                    dumpImage(reprocessedImage,
+                            "/testReprocessMixedBurst_camera" + cameraId + "_" + mDumpFrameCount);
+                    mDumpFrameCount++;
+                }
+            }
+        } finally {
+            for (ImageResultHolder holder : imageResultHolders) {
+                holder.getImage().close();
+            }
+            closeReprossibleSession();
+            closeImageReaders();
+        }
+    }
+
+    /**
+     * Test burst of reprocess capture requests.
+     */
+    private void testReprocessBurst(String cameraId, Size inputSize, int inputFormat,
+            Size reprocessOutputSize, int reprocessOutputFormat, Size previewSize,
+            int numBurst) throws Exception {
+        if (VERBOSE) {
+            Log.v(TAG, "testReprocessBurst: cameraId: " + cameraId + " inputSize: " +
+                    inputSize + " inputFormat: " + inputFormat + " reprocessOutputSize: " +
+                    reprocessOutputSize + " reprocessOutputFormat: " + reprocessOutputFormat +
+                    " previewSize: " + previewSize + " numBurst: " + numBurst);
+        }
+
+        boolean enablePreview = (previewSize != null);
+        ImageResultHolder[] imageResultHolders = new ImageResultHolder[0];
+
+        try {
+            if (enablePreview) {
+                updatePreviewSurface(previewSize);
+            } else {
+                mPreviewSurface = null;
+            }
+
+            setupImageReaders(inputSize, inputFormat, reprocessOutputSize, reprocessOutputFormat,
+                numBurst);
+            setupReprocessableSession(mPreviewSurface, numBurst);
+
+            if (enablePreview) {
+                startPreview(mPreviewSurface);
+            }
+
+            imageResultHolders = doReprocessBurstCapture(numBurst);
+            for (ImageResultHolder holder : imageResultHolders) {
+                Image reprocessedImage = holder.getImage();
+                TotalCaptureResult result = holder.getTotalCaptureResult();
+
+                mCollector.expectImageProperties("testReprocessBurst", reprocessedImage,
+                            reprocessOutputFormat, reprocessOutputSize,
+                            result.get(CaptureResult.SENSOR_TIMESTAMP));
+
+                if (DEBUG) {
+                    Log.d(TAG, String.format("camera %s in %dx%d %d out %dx%d %d",
+                            cameraId, inputSize.getWidth(), inputSize.getHeight(), inputFormat,
+                            reprocessOutputSize.getWidth(), reprocessOutputSize.getHeight(),
+                            reprocessOutputFormat));
+                    dumpImage(reprocessedImage,
+                            "/testReprocessBurst_camera" + cameraId + "_" + mDumpFrameCount);
+                    mDumpFrameCount++;
+                }
+            }
+        } finally {
+            for (ImageResultHolder holder : imageResultHolders) {
+                holder.getImage().close();
+            }
+            closeReprossibleSession();
+            closeImageReaders();
+        }
+    }
+
+    /**
+     * Test a sequences of reprocess capture requests.
+     */
     private void testReprocess(String cameraId, Size inputSize, int inputFormat,
             Size reprocessOutputSize, int reprocessOutputFormat, Size previewSize,
             int numReprocessCaptures) throws Exception {
@@ -342,57 +554,39 @@
                 mPreviewSurface = null;
             }
 
-            setupImageReaders(inputSize, inputFormat, reprocessOutputSize, reprocessOutputFormat);
-            setupReprocessibleSession(mPreviewSurface);
+            setupImageReaders(inputSize, inputFormat, reprocessOutputSize, reprocessOutputFormat,
+                    /*maxImages*/1);
+            setupReprocessableSession(mPreviewSurface, /*numImageWriterImages*/1);
 
             if (enablePreview) {
                 startPreview(mPreviewSurface);
             }
 
             for (int i = 0; i < numReprocessCaptures; i++) {
-                Image reprocessedImage = null;
+                ImageResultHolder imageResultHolder = null;
 
                 try {
-                    reprocessedImage = doReprocessCapture();
+                    imageResultHolder = doReprocessCapture();
+                    Image reprocessedImage = imageResultHolder.getImage();
+                    TotalCaptureResult result = imageResultHolder.getTotalCaptureResult();
 
-                    assertTrue(String.format("Reprocess output size is %dx%d. Expecting %dx%d.",
-                            reprocessedImage.getWidth(), reprocessedImage.getHeight(),
-                            reprocessOutputSize.getWidth(), reprocessOutputSize.getHeight()),
-                            reprocessedImage.getWidth() == reprocessOutputSize.getWidth() &&
-                            reprocessedImage.getHeight() == reprocessOutputSize.getHeight());
-                    assertTrue(String.format("Reprocess output format is %d. Expecting %d.",
-                            reprocessedImage.getFormat(), reprocessOutputFormat),
-                            reprocessedImage.getFormat() == reprocessOutputFormat);
+                    mCollector.expectImageProperties("testReprocess", reprocessedImage,
+                            reprocessOutputFormat, reprocessOutputSize,
+                            result.get(CaptureResult.SENSOR_TIMESTAMP));
 
                     if (DEBUG) {
-                        String filename = DEBUG_FILE_NAME_BASE + "/reprocessed_camera" + cameraId +
-                                "_" + mDumpFrameCount;
-                        mDumpFrameCount++;
-
-                        switch(reprocessedImage.getFormat()) {
-                            case ImageFormat.JPEG:
-                                filename += ".jpg";
-                                break;
-                            case ImageFormat.NV16:
-                            case ImageFormat.NV21:
-                            case ImageFormat.YUV_420_888:
-                                filename += ".yuv";
-                                break;
-                            default:
-                                filename += "." + reprocessedImage.getFormat();
-                                break;
-                        }
-
-                        Log.d(TAG, "dumping an image to " + filename);
                         Log.d(TAG, String.format("camera %s in %dx%d %d out %dx%d %d",
                                 cameraId, inputSize.getWidth(), inputSize.getHeight(), inputFormat,
                                 reprocessOutputSize.getWidth(), reprocessOutputSize.getHeight(),
                                 reprocessOutputFormat));
-                        dumpFile(filename , getDataFromImage(reprocessedImage));
+
+                        dumpImage(reprocessedImage,
+                                "/testReprocess_camera" + cameraId + "_" + mDumpFrameCount);
+                        mDumpFrameCount++;
                     }
                 } finally {
-                    if (reprocessedImage != null) {
-                        reprocessedImage.close();
+                    if (imageResultHolder != null) {
+                        imageResultHolder.getImage().close();
                     }
                 }
             }
@@ -402,20 +596,37 @@
         }
     }
 
+    /**
+     * Set up two image readers: one for regular capture (used for reprocess input) and one for
+     * reprocess capture.
+     */
     private void setupImageReaders(Size inputSize, int inputFormat, Size reprocessOutputSize,
-            int reprocessOutputFormat) {
+            int reprocessOutputFormat, int maxImages) {
 
+        mShareOneImageReader = false;
+        // If the regular output and reprocess output have the same size and format,
+        // they can share one image reader.
+        if (inputFormat == reprocessOutputFormat &&
+                inputSize.equals(reprocessOutputSize)) {
+            maxImages *= 2;
+            mShareOneImageReader = true;
+        }
         // create an ImageReader for the regular capture
         mFirstImageReaderListener = new SimpleImageReaderListener();
-        mFirstImageReader = makeImageReader(inputSize, inputFormat,
-                MAX_NUM_IMAGE_READER_IMAGES, mFirstImageReaderListener, mHandler);
+        mFirstImageReader = makeImageReader(inputSize, inputFormat, maxImages,
+                mFirstImageReaderListener, mHandler);
 
-        // create an ImageReader for the reprocess capture
-        mSecondImageReaderListener = new SimpleImageReaderListener();
-        mSecondImageReader = makeImageReader(reprocessOutputSize, reprocessOutputFormat,
-                MAX_NUM_IMAGE_READER_IMAGES, mSecondImageReaderListener, mHandler);
+        if (!mShareOneImageReader) {
+            // create an ImageReader for the reprocess capture
+            mSecondImageReaderListener = new SimpleImageReaderListener();
+            mSecondImageReader = makeImageReader(reprocessOutputSize, reprocessOutputFormat,
+                    maxImages, mSecondImageReaderListener, mHandler);
+        }
     }
 
+    /**
+     * Close two image readers.
+     */
     private void closeImageReaders() {
         CameraTestUtils.closeImageReader(mFirstImageReader);
         mFirstImageReader = null;
@@ -423,27 +634,47 @@
         mSecondImageReader = null;
     }
 
-    private void setupReprocessibleSession(Surface previewSurface) throws Exception {
-        // create a reprocessible capture session
+    /**
+     * Set up a reprocessable session and create an ImageWriter with the sessoin's input surface.
+     */
+    private void setupReprocessableSession(Surface previewSurface, int numImageWriterImages)
+            throws Exception {
+        // create a reprocessable capture session
         List<Surface> outSurfaces = new ArrayList<Surface>();
         outSurfaces.add(mFirstImageReader.getSurface());
-        outSurfaces.add(mSecondImageReader.getSurface());
+        if (!mShareOneImageReader) {
+            outSurfaces.add(mSecondImageReader.getSurface());
+        }
         if (previewSurface != null) {
             outSurfaces.add(previewSurface);
         }
 
         InputConfiguration inputConfig = new InputConfiguration(mFirstImageReader.getWidth(),
                 mFirstImageReader.getHeight(), mFirstImageReader.getImageFormat());
+        assertTrue(String.format("inputConfig is wrong: %dx%d format %d. Expect %dx%d format %d",
+                inputConfig.getWidth(), inputConfig.getHeight(), inputConfig.getFormat(),
+                mFirstImageReader.getWidth(), mFirstImageReader.getHeight(),
+                mFirstImageReader.getImageFormat()),
+                inputConfig.getWidth() == mFirstImageReader.getWidth() &&
+                inputConfig.getHeight() == mFirstImageReader.getHeight() &&
+                inputConfig.getFormat() == mFirstImageReader.getImageFormat());
+
         mSessionListener = new BlockingSessionCallback();
-        mSession = configureReprocessibleCameraSession(mCamera, inputConfig, outSurfaces,
+        mSession = configureReprocessableCameraSession(mCamera, inputConfig, outSurfaces,
                 mSessionListener, mHandler);
 
         // create an ImageWriter
         mInputSurface = mSession.getInputSurface();
         mImageWriter = ImageWriter.newInstance(mInputSurface,
-                MAX_NUM_IMAGE_WRITER_IMAGES);
+                numImageWriterImages);
+
+        mImageWriterListener = new SimpleImageWriterListener(mImageWriter);
+        mImageWriter.setOnImageReleasedListener(mImageWriterListener, mHandler);
     }
 
+    /**
+     * Close the reprocessable session and ImageWriter.
+     */
     private void closeReprossibleSession() {
         mInputSurface = null;
 
@@ -458,20 +689,74 @@
         }
     }
 
-    private Image doReprocessCapture() throws Exception {
-        // issue and wait on regular capture request
-        TotalCaptureResult result = submitCaptureRequest(mFirstImageReader.getSurface(),
-                /*inputResult*/null);
-        Image image = mFirstImageReaderListener.getImage(CAPTURE_TIMEOUT_MS);
+    /**
+     * Do one reprocess capture.
+     */
+    private ImageResultHolder doReprocessCapture() throws Exception {
+        return doReprocessBurstCapture(/*numBurst*/1)[0];
+    }
 
-        // queue the image to image writer
-        mImageWriter.queueInputImage(image);
+    /**
+     * Do a burst of reprocess captures.
+     */
+    private ImageResultHolder[] doReprocessBurstCapture(int numBurst) throws Exception {
+        boolean[] isReprocessCaptures = new boolean[numBurst];
+        for (int i = 0; i < numBurst; i++) {
+            isReprocessCaptures[i] = true;
+        }
 
-        // issue and wait on reprocess capture request
-        TotalCaptureResult reprocessResult =
-                submitCaptureRequest(mSecondImageReader.getSurface(), result);
+        return doMixedReprocessBurstCapture(isReprocessCaptures);
+    }
 
-        return mSecondImageReaderListener.getImage(CAPTURE_TIMEOUT_MS);
+    /**
+     * Do a burst of captures that are mixed with regular and reprocess captures.
+     *
+     * @param isReprocessCaptures An array whose elements indicate whether it's a reprocess capture
+     *                            request. If the element is true, it represents a reprocess capture
+     *                            request. If the element is false, it represents a regular capture
+     *                            request. The size of the array is the number of capture requests
+     *                            in the burst.
+     */
+    private ImageResultHolder[] doMixedReprocessBurstCapture(boolean[] isReprocessCaptures)
+            throws Exception {
+        if (isReprocessCaptures == null || isReprocessCaptures.length <= 0) {
+            throw new IllegalArgumentException("isReprocessCaptures must have at least 1 capture.");
+        }
+
+        TotalCaptureResult[] results = new TotalCaptureResult[isReprocessCaptures.length];
+        for (int i = 0; i < isReprocessCaptures.length; i++) {
+            // submit a capture and get the result if this entry is a reprocess capture.
+            if (isReprocessCaptures[i]) {
+                results[i] = submitCaptureRequest(mFirstImageReader.getSurface(),
+                        /*inputResult*/null);
+                mImageWriter.queueInputImage(
+                        mFirstImageReaderListener.getImage(CAPTURE_TIMEOUT_MS));
+            }
+        }
+
+        Surface[] outputSurfaces = new Surface[isReprocessCaptures.length];
+        for (int i = 0; i < isReprocessCaptures.length; i++) {
+            if (mShareOneImageReader) {
+                outputSurfaces[i] = mFirstImageReader.getSurface();
+            } else {
+                outputSurfaces[i] = mSecondImageReader.getSurface();
+            }
+        }
+
+        TotalCaptureResult[] finalResults = submitMixedCaptureBurstRequest(outputSurfaces, results);
+
+        ImageResultHolder[] holders = new ImageResultHolder[isReprocessCaptures.length];
+        for (int i = 0; i < isReprocessCaptures.length; i++) {
+            Image image;
+            if (mShareOneImageReader) {
+                image = mFirstImageReaderListener.getImage(CAPTURE_TIMEOUT_MS);
+            } else {
+                image = mSecondImageReaderListener.getImage(CAPTURE_TIMEOUT_MS);
+            }
+            holders[i] = new ImageResultHolder(image, finalResults[i]);
+        }
+
+        return holders;
     }
 
     /**
@@ -487,22 +772,85 @@
      * Issue a capture request and return the result. If inputResult is null, it's a regular
      * request. Otherwise, it's a reprocess request.
      */
-    private TotalCaptureResult submitCaptureRequest(Surface output, TotalCaptureResult inputResult)
-            throws Exception {
-        SimpleCaptureCallback captureCallback = new SimpleCaptureCallback();
-        CaptureRequest.Builder builder;
-        if (inputResult != null) {
-            builder = mCamera.createReprocessCaptureRequest(inputResult);
-        } else {
-            builder = mCamera.createCaptureRequest(CAPTURE_TEMPLATE);
+    private TotalCaptureResult submitCaptureRequest(Surface output,
+            TotalCaptureResult inputResult) throws Exception {
+        Surface[] outputs = new Surface[1];
+        outputs[0] = output;
+        TotalCaptureResult[] inputResults = new TotalCaptureResult[1];
+        inputResults[0] = inputResult;
+
+        return submitMixedCaptureBurstRequest(outputs, inputResults)[0];
+    }
+
+    /**
+     * Submit a burst request mixed with regular and reprocess requests.
+     *
+     * @param outputs An array of output surfaces. One output surface will be used in one request
+     *                so the length of the array is the number of requests in a burst request.
+     * @param inputResults An array of input results. If it's null, all requests are regular
+     *                     requests. If an element is null, that element represents a regular
+     *                     request. If an element if not null, that element represents a reprocess
+     *                     request.
+     *
+     */
+    private TotalCaptureResult[] submitMixedCaptureBurstRequest(Surface[] outputs,
+            TotalCaptureResult[] inputResults) throws Exception {
+        if (outputs == null || outputs.length <= 0) {
+            throw new IllegalArgumentException("outputs must have at least 1 surface");
+        } else if (inputResults != null && inputResults.length != outputs.length) {
+            throw new IllegalArgumentException("The lengths of outputs and inputResults " +
+                    "don't match");
         }
 
-        builder.addTarget(output);
-        CaptureRequest request = builder.build();
-        mSession.capture(request, captureCallback, mHandler);
+        int numReprocessCaptures = 0;
+        SimpleCaptureCallback captureCallback = new SimpleCaptureCallback();
+        ArrayList<CaptureRequest> captureRequests = new ArrayList<>(outputs.length);
 
-        // wait for regular capture result
-        return captureCallback.getTotalCaptureResultForRequest(request, CAPTURE_TIMEOUT_FRAMES);
+        // Prepare a list of capture requests. Whether it's a regular or reprocess capture request
+        // is based on inputResults array.
+        for (int i = 0; i < outputs.length; i++) {
+            CaptureRequest.Builder builder;
+            boolean isReprocess = (inputResults != null && inputResults[i] != null);
+            if (isReprocess) {
+                builder = mCamera.createReprocessCaptureRequest(inputResults[i]);
+                numReprocessCaptures++;
+            } else {
+                builder = mCamera.createCaptureRequest(CAPTURE_TEMPLATE);
+            }
+            builder.addTarget(outputs[i]);
+            CaptureRequest request = builder.build();
+            assertTrue("Capture request reprocess type " + request.isReprocess() + " is wrong.",
+                request.isReprocess() == isReprocess);
+
+            captureRequests.add(request);
+        }
+
+        if (captureRequests.size() == 1) {
+            mSession.capture(captureRequests.get(0), captureCallback, mHandler);
+        } else {
+            mSession.captureBurst(captureRequests, captureCallback, mHandler);
+        }
+
+        TotalCaptureResult[] results;
+        if (numReprocessCaptures == 0 || numReprocessCaptures == outputs.length) {
+            results = new TotalCaptureResult[outputs.length];
+            // If the requests are not mixed, they should come in order.
+            for (int i = 0; i < results.length; i++){
+                results[i] = captureCallback.getTotalCaptureResultForRequest(
+                        captureRequests.get(i), CAPTURE_TIMEOUT_FRAMES);
+            }
+        } else {
+            // If the requests are mixed, they may not come in order.
+            results = captureCallback.getTotalCaptureResultsForRequests(
+                    captureRequests, CAPTURE_TIMEOUT_FRAMES * captureRequests.size());
+        }
+
+        // make sure all input surfaces are released.
+        for (int i = 0; i < numReprocessCaptures; i++) {
+            mImageWriterListener.waitForImageReleased(CAPTURE_TIMEOUT_MS);
+        }
+
+        return results;
     }
 
     private Size getMaxSize(int format, StaticMetadata.StreamDirection direction) {
@@ -511,18 +859,51 @@
     }
 
     private boolean isYuvReprocessSupported(String cameraId) throws Exception {
-        StaticMetadata info =
-                new StaticMetadata(mCameraManager.getCameraCharacteristics(cameraId),
-                                   CheckLevel.ASSERT, /*collector*/ null);
-        return info.isCapabilitySupported(
-                CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
+        return isReprocessSupported(cameraId, ImageFormat.YUV_420_888);
     }
 
     private boolean isOpaqueReprocessSupported(String cameraId) throws Exception {
-        StaticMetadata info =
-                new StaticMetadata(mCameraManager.getCameraCharacteristics(cameraId),
-                                   CheckLevel.ASSERT, /*collector*/ null);
-        return info.isCapabilitySupported(
-                CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_OPAQUE_REPROCESSING);
+        return isReprocessSupported(cameraId, ImageFormat.PRIVATE);
     }
-}
\ No newline at end of file
+
+    private void dumpImage(Image image, String name) {
+        String filename = DEBUG_FILE_NAME_BASE + name;
+        switch(image.getFormat()) {
+            case ImageFormat.JPEG:
+                filename += ".jpg";
+                break;
+            case ImageFormat.NV16:
+            case ImageFormat.NV21:
+            case ImageFormat.YUV_420_888:
+                filename += ".yuv";
+                break;
+            default:
+                filename += "." + image.getFormat();
+                break;
+        }
+
+        Log.d(TAG, "dumping an image to " + filename);
+        dumpFile(filename , getDataFromImage(image));
+    }
+
+    /**
+     * A class that holds an Image and a TotalCaptureResult.
+     */
+    private static class ImageResultHolder {
+        private final Image mImage;
+        private final TotalCaptureResult mResult;
+
+        public ImageResultHolder(Image image, TotalCaptureResult result) {
+            mImage = image;
+            mResult = result;
+        }
+
+        public Image getImage() {
+            return mImage;
+        }
+
+        public TotalCaptureResult getTotalCaptureResult() {
+            return mResult;
+        }
+    }
+}
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/StaticMetadataTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/StaticMetadataTest.java
index 3076d09..31f9188 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/StaticMetadataTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/StaticMetadataTest.java
@@ -380,7 +380,7 @@
                 break;
 
             case REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING:
-            case REQUEST_AVAILABLE_CAPABILITIES_OPAQUE_REPROCESSING:
+            case REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING:
                 // Tested in ExtendedCameraCharacteristicsTest
                 return;
             default:
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/SurfaceViewPreviewTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/SurfaceViewPreviewTest.java
index 01da4c8..7d377d6 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/SurfaceViewPreviewTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/SurfaceViewPreviewTest.java
@@ -18,6 +18,8 @@
 
 import static android.hardware.camera2.cts.CameraTestUtils.*;
 
+import android.graphics.ImageFormat;
+import android.view.Surface;
 import android.hardware.camera2.CameraCaptureSession;
 import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
 import android.hardware.camera2.CameraDevice;
@@ -29,6 +31,7 @@
 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase;
 import android.util.Log;
+import android.util.Pair;
 import android.util.Range;
 
 import org.mockito.ArgumentCaptor;
@@ -36,6 +39,7 @@
 
 import static org.mockito.Mockito.*;
 
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
@@ -118,6 +122,192 @@
     }
 
     /**
+     * Test to verify the {@link CameraCaptureSession#prepare} method works correctly, and has the
+     * expected effects on performance.
+     *
+     * - Ensure that prepare() results in onSurfacePrepared() being invoked
+     * - Ensure that prepare() does not cause preview glitches while operating
+     * - Ensure that starting to use a newly-prepared output does not cause additional
+     *   preview glitches to occur
+     */
+    public void testPreparePerformance() throws Throwable {
+        for (int i = 0; i < mCameraIds.length; i++) {
+            try {
+                openDevice(mCameraIds[i]);
+
+                preparePerformanceTestByCamera(mCameraIds[i]);
+            }
+            finally {
+                closeDevice();
+            }
+        }
+    }
+
+    private void preparePerformanceTestByCamera(String cameraId) throws Exception {
+        final int MAX_IMAGES_TO_PREPARE = 10;
+        final int UNKNOWN_LATENCY_RESULT_WAIT = 5;
+        final int MAX_RESULTS_TO_WAIT = 10;
+        final int FRAMES_FOR_AVERAGING = 100;
+        final int PREPARE_TIMEOUT_MS = 10000; // 10 s
+        final float PREPARE_FRAME_RATE_BOUNDS = 0.05f; // fraction allowed difference
+        final float PREPARE_PEAK_RATE_BOUNDS = 0.5f; // fraction allowed difference
+
+        Size maxYuvSize = getSupportedPreviewSizes(cameraId, mCameraManager, null).get(0);
+        Size maxPreviewSize = mOrderedPreviewSizes.get(0);
+
+        // Don't need image data, just drop it right away to minimize overhead
+        ImageDropperListener imageListener = new ImageDropperListener();
+
+        SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
+
+        CaptureRequest.Builder previewRequest =
+                mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+
+        // Configure outputs and session
+
+        updatePreviewSurface(maxPreviewSize);
+
+        createImageReader(maxYuvSize, ImageFormat.YUV_420_888, MAX_IMAGES_TO_PREPARE, imageListener);
+
+        List<Surface> outputSurfaces = new ArrayList<Surface>();
+        outputSurfaces.add(mPreviewSurface);
+        outputSurfaces.add(mReaderSurface);
+
+        CameraCaptureSession.StateCallback mockSessionListener =
+                mock(CameraCaptureSession.StateCallback.class);
+
+        mSession = configureCameraSession(mCamera, outputSurfaces, mockSessionListener, mHandler);
+
+        previewRequest.addTarget(mPreviewSurface);
+        Range<Integer> maxFpsTarget = mStaticInfo.getAeMaxTargetFpsRange();
+        previewRequest.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, maxFpsTarget);
+
+        mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
+
+        // Converge AE
+        waitForAeStable(resultListener, UNKNOWN_LATENCY_RESULT_WAIT);
+
+        if (mStaticInfo.isAeLockSupported()) {
+            // Lock AE if possible to improve stability
+            previewRequest.set(CaptureRequest.CONTROL_AE_LOCK, true);
+            mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
+            waitForResultValue(resultListener, CaptureResult.CONTROL_AE_STATE,
+                    CaptureResult.CONTROL_AE_STATE_LOCKED, MAX_RESULTS_TO_WAIT);
+        }
+
+        // Measure frame rate for a bit
+        Pair<Long, Long> frameDurationStats =
+                measureMeanFrameInterval(resultListener, FRAMES_FOR_AVERAGING, /*prevTimestamp*/ 0);
+
+        Log.i(TAG, String.format("Frame interval avg during normal preview: %f ms, peak %f ms",
+                        frameDurationStats.first / 1e6, frameDurationStats.second / 1e6));
+
+        // Drain results, do prepare
+        resultListener.drain();
+
+        mSession.prepare(mReaderSurface);
+
+        verify(mockSessionListener,
+                timeout(PREPARE_TIMEOUT_MS).times(1)).
+                onSurfacePrepared(eq(mSession), eq(mReaderSurface));
+
+        // Calculate frame rate during prepare
+
+        int resultsReceived = (int) resultListener.getTotalNumFrames();
+        if (resultsReceived > 2) {
+            // Only verify frame rate if there are a couple of results
+            Pair<Long, Long> whilePreparingFrameDurationStats =
+                    measureMeanFrameInterval(resultListener, resultsReceived, /*prevTimestamp*/ 0);
+
+            Log.i(TAG, String.format("Frame interval during prepare avg: %f ms, peak %f ms",
+                            whilePreparingFrameDurationStats.first / 1e6,
+                            whilePreparingFrameDurationStats.second / 1e6));
+
+            if (mStaticInfo.isHardwareLevelLimitedOrBetter()) {
+                mCollector.expectTrue(
+                    String.format("Camera %s: Preview peak frame interval affected by prepare " +
+                            "call: preview avg frame duration: %f ms, peak during prepare: %f ms",
+                            cameraId,
+                            frameDurationStats.first / 1e6,
+                            whilePreparingFrameDurationStats.second / 1e6),
+                    (whilePreparingFrameDurationStats.second <=
+                            frameDurationStats.first * (1 + PREPARE_PEAK_RATE_BOUNDS)));
+                mCollector.expectTrue(
+                    String.format("Camera %s: Preview average frame interval affected by prepare " +
+                            "call: preview avg frame duration: %f ms, during prepare: %f ms",
+                            cameraId,
+                            frameDurationStats.first / 1e6,
+                            whilePreparingFrameDurationStats.first / 1e6),
+                    (whilePreparingFrameDurationStats.first <=
+                            frameDurationStats.first * (1 + PREPARE_FRAME_RATE_BOUNDS)));
+            }
+        }
+
+        resultListener.drain();
+
+        // Get at least one more preview result without prepared target
+        CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
+        long prevTimestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
+
+        // Now use the prepared stream and ensure there are no hiccups from using it
+        previewRequest.addTarget(mReaderSurface);
+
+        mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
+
+        Pair<Long, Long> preparedFrameDurationStats =
+                measureMeanFrameInterval(resultListener, MAX_IMAGES_TO_PREPARE*2, prevTimestamp);
+
+        Log.i(TAG, String.format("Frame interval with prepared stream added avg: %f ms, peak %f ms",
+                        preparedFrameDurationStats.first / 1e6,
+                        preparedFrameDurationStats.second / 1e6));
+
+        if (mStaticInfo.isHardwareLevelLimitedOrBetter()) {
+            mCollector.expectTrue(
+                String.format("Camera %s: Preview peak frame interval affected by use of new " +
+                        " stream: preview avg frame duration: %f ms, peak with new stream: %f ms",
+                        cameraId,
+                        frameDurationStats.first / 1e6, preparedFrameDurationStats.second / 1e6),
+                (preparedFrameDurationStats.second <=
+                        frameDurationStats.first * (1 + PREPARE_PEAK_RATE_BOUNDS)));
+            mCollector.expectTrue(
+                String.format("Camera %s: Preview average frame interval affected by use of new " +
+                        "stream: preview avg frame duration: %f ms, with new stream: %f ms",
+                        cameraId,
+                        frameDurationStats.first / 1e6, preparedFrameDurationStats.first / 1e6),
+                (preparedFrameDurationStats.first <=
+                        frameDurationStats.first * (1 + PREPARE_FRAME_RATE_BOUNDS)));
+        }
+    }
+
+    /**
+     * Measure the inter-frame interval based on SENSOR_TIMESTAMP for frameCount frames from the
+     * provided capture listener.  If prevTimestamp is positive, it is used for the first interval
+     * calculation; otherwise, the first result is used to establish the starting time.
+     *
+     * Returns the mean interval in the first pair entry, and the largest interval in the second
+     * pair entry
+     */
+    Pair<Long, Long> measureMeanFrameInterval(SimpleCaptureCallback resultListener, int frameCount,
+            long prevTimestamp) throws Exception {
+        long summedIntervals = 0;
+        long maxInterval = 0;
+        int measurementCount = frameCount - ((prevTimestamp > 0) ? 0 : 1);
+
+        for (int i = 0; i < frameCount; i++) {
+            CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
+            long timestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
+            if (prevTimestamp > 0) {
+                long interval = timestamp - prevTimestamp;
+                if (interval > maxInterval) maxInterval = interval;
+                summedIntervals += interval;
+            }
+            prevTimestamp = timestamp;
+        }
+        return new Pair<Long, Long>(summedIntervals / measurementCount, maxInterval);
+    }
+
+
+    /**
      * Test preview fps range for all supported ranges. The exposure time are frame duration are
      * validated.
      */
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/CameraErrorCollector.java b/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/CameraErrorCollector.java
index 0ee5ffc..9f0c012 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/CameraErrorCollector.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/CameraErrorCollector.java
@@ -22,6 +22,7 @@
 import android.hardware.camera2.CaptureRequest.Builder;
 import android.hardware.camera2.CaptureResult;
 import android.hardware.camera2.params.MeteringRectangle;
+import android.media.Image;
 import android.util.Log;
 import android.util.Size;
 
@@ -1049,4 +1050,13 @@
         Set<T> sizeSet = new HashSet<T>(list);
         expectTrue(msg + " each element must be distinct", sizeSet.size() == list.size());
     }
+
+    public void expectImageProperties(String msg, Image image, int format, Size size,
+            long timestampNs) {
+        expectEquals(msg + "Image format is wrong.", image.getFormat(), format);
+        expectEquals(msg + "Image width is wrong.", image.getWidth(), size.getWidth());
+        expectEquals(msg + "Image height is wrong.", image.getHeight(), size.getHeight());
+        expectEquals(msg + "Image timestamp is wrong.", image.getTimestamp(), timestampNs);
+    }
+
 }
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/StaticMetadata.java b/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/StaticMetadata.java
index 2acebc4..a22db8f 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/StaticMetadata.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/StaticMetadata.java
@@ -64,6 +64,7 @@
     private static final int CONTROL_AE_COMPENSATION_RANGE_DEFAULT_MAX = 2;
     private static final Rational CONTROL_AE_COMPENSATION_STEP_DEFAULT = new Rational(1, 2);
     private static final byte REQUEST_PIPELINE_MAX_DEPTH_MAX = 8;
+    private static final int MAX_REPROCESS_MAX_CAPTURE_STALL = 4;
 
     // TODO: Consider making this work across any metadata object, not just camera characteristics
     private final CameraCharacteristics mCharacteristics;
@@ -1296,6 +1297,30 @@
     }
 
     /**
+     * Get the highest supported target FPS range.
+     * Prioritizes maximizing the min FPS, then the max FPS without lowering min FPS.
+     */
+    public Range<Integer> getAeMaxTargetFpsRange() {
+        Range<Integer>[] fpsRanges = getAeAvailableTargetFpsRangesChecked();
+
+        Range<Integer> targetRange = fpsRanges[0];
+        // Assume unsorted list of target FPS ranges, so use two passes, first maximize min FPS
+        for (Range<Integer> candidateRange : fpsRanges) {
+            if (candidateRange.getLower() > targetRange.getLower()) {
+                targetRange = candidateRange;
+            }
+        }
+        // Then maximize max FPS while not lowering min FPS
+        for (Range<Integer> candidateRange : fpsRanges) {
+            if (candidateRange.getLower() >= targetRange.getLower() &&
+                    candidateRange.getUpper() > targetRange.getUpper()) {
+                targetRange = candidateRange;
+            }
+        }
+        return targetRange;
+    }
+
+    /**
      * Get max frame duration.
      *
      * @return 0 if maxFrameDuration is null
@@ -1902,6 +1927,26 @@
     }
 
     /**
+     * Get maxCaptureStall frames or default value (if value doesn't exist)
+     * @return maxCaptureStall frames or default value.
+     */
+    public int getMaxCaptureStallOrDefault() {
+        Key<Integer> key =
+                CameraCharacteristics.REPROCESS_MAX_CAPTURE_STALL;
+        Integer value = getValueFromKeyNonNull(key);
+
+        if (value == null) {
+            return MAX_REPROCESS_MAX_CAPTURE_STALL;
+        }
+
+        checkTrueForKey(key, " value is out of range ",
+                value >= 0 &&
+                value <= MAX_REPROCESS_MAX_CAPTURE_STALL);
+
+        return value;
+    }
+
+    /**
      * Get the scaler's cropping type (center only or freeform)
      * @return cropping type, return default value (CENTER_ONLY) if value is unavailable
      */
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/rs/RawConverter.java b/tests/tests/hardware/src/android/hardware/camera2/cts/rs/RawConverter.java
index 2cd2469..8ca650f 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/rs/RawConverter.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/rs/RawConverter.java
@@ -163,6 +163,7 @@
      * @param rs a {@link RenderScript} context to use.
      * @param inputWidth width of the input RAW16 image in pixels.
      * @param inputHeight height of the input RAW16 image in pixels.
+     * @param inputStride stride of the input RAW16 image in bytes.
      * @param rawImageInput a byte array containing a RAW16 image.
      * @param staticMetadata the {@link CameraCharacteristics} for this RAW capture.
      * @param dynamicMetadata the {@link CaptureResult} for this RAW capture.
@@ -176,7 +177,7 @@
      *                   image to be rendered.
      */
     public static void convertToSRGB(RenderScript rs, int inputWidth, int inputHeight,
-            byte[] rawImageInput, CameraCharacteristics staticMetadata,
+            int inputStride, byte[] rawImageInput, CameraCharacteristics staticMetadata,
             CaptureResult dynamicMetadata, int outputOffsetX, int outputOffsetY,
             /*out*/Bitmap argbOutput) {
         int cfa = staticMetadata.get(CameraCharacteristics.SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
@@ -209,7 +210,7 @@
 
         LensShadingMap shadingMap = dynamicMetadata.get(CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP);
 
-        convertToSRGB(rs, inputWidth, inputHeight, cfa, blackLevelPattern, whiteLevel,
+        convertToSRGB(rs, inputWidth, inputHeight, inputStride, cfa, blackLevelPattern, whiteLevel,
                 rawImageInput, ref1, ref2, calib1, calib2, color1, color2,
                 forward1, forward2, neutral, shadingMap, outputOffsetX, outputOffsetY, argbOutput);
     }
@@ -219,8 +220,8 @@
      *
      * @see #convertToSRGB
      */
-    private static void convertToSRGB(RenderScript rs, int inputWidth, int inputHeight, int cfa,
-            int[] blackLevelPattern, int whiteLevel, byte[] rawImageInput,
+    private static void convertToSRGB(RenderScript rs, int inputWidth, int inputHeight,
+            int inputStride, int cfa, int[] blackLevelPattern, int whiteLevel, byte[] rawImageInput,
             int referenceIlluminant1, int referenceIlluminant2, float[] calibrationTransform1,
             float[] calibrationTransform2, float[] colorMatrix1, float[] colorMatrix2,
             float[] forwardTransform1, float[] forwardTransform2, Rational[/*3*/] neutralColorPoint,
@@ -238,6 +239,12 @@
         if (outputOffsetX < 0 || outputOffsetY < 0) {
             throw new IllegalArgumentException("Negative offset passed to convertToSRGB");
         }
+        if ((inputStride / 2) < inputWidth) {
+            throw new IllegalArgumentException("Stride too small.");
+        }
+        if ((inputStride % 2) != 0) {
+            throw new IllegalArgumentException("Invalid stride for RAW16 format, see graphics.h.");
+        }
         int outWidth = argbOutput.getWidth();
         int outHeight = argbOutput.getHeight();
         if (outWidth + outputOffsetX > inputWidth || outHeight + outputOffsetY > inputHeight) {
@@ -314,7 +321,7 @@
 
         // Setup input allocation (16-bit raw pixels)
         Type.Builder typeBuilder = new Type.Builder(rs, Element.U16(rs));
-        typeBuilder.setX(inputWidth);
+        typeBuilder.setX((inputStride / 2));
         typeBuilder.setY(inputHeight);
         Type inputType = typeBuilder.create();
         Allocation input = Allocation.createTyped(rs, inputType);
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2AndroidTestCase.java b/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2AndroidTestCase.java
index e7f1e7a..78370b3 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2AndroidTestCase.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2AndroidTestCase.java
@@ -294,13 +294,8 @@
             ImageReader.OnImageAvailableListener listener) throws Exception {
 
         ImageReader reader = null;
-        if (format == ImageFormat.PRIVATE) {
-            // Create opaque ImageReader
-            reader = ImageReader.newOpaqueInstance(size.getWidth(), size.getHeight(), maxNumImages);
-        } else {
-            reader = ImageReader.newInstance(size.getWidth(), size.getHeight(),
-                    format, maxNumImages);
-        }
+        reader = ImageReader.newInstance(size.getWidth(), size.getHeight(),
+                format, maxNumImages);
 
         reader.setOnImageAvailableListener(listener, mHandler);
         if (VERBOSE) Log.v(TAG, "Created ImageReader size " + size.toString());
@@ -462,15 +457,6 @@
             // Expected.
         }
 
-        // Image#isOpaque test
-        try {
-            closedImage.isOpaque();
-            fail("Image should throw IllegalStateException when calling isOpaque"
-                    + " after the image is closed");
-        } catch (IllegalStateException e) {
-            // Expected.
-        }
-
         // Image#getCropRect test
         try {
             closedImage.getCropRect();
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2SurfaceViewTestCase.java b/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2SurfaceViewTestCase.java
index bcc4061..3ca696b 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2SurfaceViewTestCase.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2SurfaceViewTestCase.java
@@ -694,4 +694,21 @@
 
         return null;
     }
+
+    protected boolean isReprocessSupported(String cameraId, int format)
+            throws CameraAccessException {
+        if (format != ImageFormat.YUV_420_888 && format != ImageFormat.PRIVATE) {
+            throw new IllegalArgumentException(
+                    "format " + format + " is not supported for reprocessing");
+        }
+
+        StaticMetadata info =
+                new StaticMetadata(mCameraManager.getCameraCharacteristics(cameraId),
+                                   CheckLevel.ASSERT, /*collector*/ null);
+        int cap = CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING;
+        if (format == ImageFormat.PRIVATE) {
+            cap = CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING;
+        }
+        return info.isCapabilitySupported(cap);
+    }
 }
diff --git a/tests/tests/hardware/src/android/hardware/cts/SingleSensorTests.java b/tests/tests/hardware/src/android/hardware/cts/SingleSensorTests.java
index 42cbdfb..0fbd8fa 100644
--- a/tests/tests/hardware/src/android/hardware/cts/SingleSensorTests.java
+++ b/tests/tests/hardware/src/android/hardware/cts/SingleSensorTests.java
@@ -22,6 +22,7 @@
 import android.hardware.cts.helpers.SensorStats;
 import android.hardware.cts.helpers.TestSensorEnvironment;
 import android.hardware.cts.helpers.sensoroperations.TestSensorOperation;
+import android.content.pm.PackageManager;
 
 import java.util.HashMap;
 import java.util.Map;
@@ -106,8 +107,13 @@
     public void testSensorProperties() {
         // sensor type: [getMinDelay()]
         Map<Integer, Object[]> expectedProperties = new HashMap<>(3);
-        expectedProperties.put(Sensor.TYPE_ACCELEROMETER, new Object[]{10000});
-        expectedProperties.put(Sensor.TYPE_GYROSCOPE, new Object[]{10000});
+        if(getContext().getPackageManager().hasSystemFeature(PackageManager.FEATURE_WATCH)) {
+                expectedProperties.put(Sensor.TYPE_ACCELEROMETER, new Object[]{20000});
+                expectedProperties.put(Sensor.TYPE_GYROSCOPE, new Object[]{20000});
+        }else {
+                expectedProperties.put(Sensor.TYPE_ACCELEROMETER, new Object[]{10000});
+                expectedProperties.put(Sensor.TYPE_GYROSCOPE, new Object[]{10000});
+        }
         expectedProperties.put(Sensor.TYPE_MAGNETIC_FIELD, new Object[]{100000});
 
         SensorManager sensorManager =
diff --git a/tests/tests/hardware/src/android/hardware/multiprocess/ErrorLoggingService.java b/tests/tests/hardware/src/android/hardware/multiprocess/ErrorLoggingService.java
new file mode 100644
index 0000000..1b713ba
--- /dev/null
+++ b/tests/tests/hardware/src/android/hardware/multiprocess/ErrorLoggingService.java
@@ -0,0 +1,611 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.multiprocess;
+
+import android.app.Service;
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.content.ServiceConnection;
+import android.os.AsyncTask;
+import android.os.Bundle;
+import android.os.ConditionVariable;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.IBinder;
+import android.os.Looper;
+import android.os.Message;
+import android.os.Messenger;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.os.RemoteException;
+import android.util.Log;
+import android.util.Pair;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.ListIterator;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.FutureTask;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+/**
+ * Service for collecting error messages from other processes.
+ *
+ * <p />
+ * Used by CTS for multi-process error logging.
+ */
+public class ErrorLoggingService extends Service {
+    public static final String TAG = "ErrorLoggingService";
+
+    /**
+     * Receive all currently logged error strings in replyTo Messenger.
+     */
+    public static final int MSG_GET_LOG = 0;
+
+    /**
+     * Append a new error string to the log maintained in this service.
+     */
+    public static final int MSG_LOG_EVENT = 1;
+
+    /**
+     * Logged errors being reported in a replyTo Messenger by this service.
+     */
+    public static final int MSG_LOG_REPORT = 2;
+
+    /**
+     * A list of strings containing all error messages reported to this service.
+     */
+    private final ArrayList<LogEvent> mLog = new ArrayList<>();
+
+    /**
+     * A list of Messengers waiting for logs for any event.
+     */
+    private final ArrayList<Pair<Integer, Messenger>> mEventWaiters = new ArrayList<>();
+
+    private static final int DO_EVENT_FILTER = 1;
+    private static final String LOG_EVENT = "log_event";
+    private static final String LOG_EVENT_ARRAY = "log_event_array";
+
+
+    /**
+     * The messenger binder used by clients of this service to report/retrieve errors.
+     */
+    private final Messenger mMessenger = new Messenger(new MainHandler(mLog, mEventWaiters));
+
+    @Override
+    public void onDestroy() {
+        super.onDestroy();
+        mLog.clear();
+    }
+
+    @Override
+    public IBinder onBind(Intent intent) {
+        return mMessenger.getBinder();
+    }
+
+    /**
+     * Handler implementing the message interface for this service.
+     */
+    private static class MainHandler extends Handler {
+
+        ArrayList<LogEvent> mErrorLog;
+        ArrayList<Pair<Integer, Messenger>> mEventWaiters;
+
+        MainHandler(ArrayList<LogEvent> log, ArrayList<Pair<Integer, Messenger>> waiters) {
+            mErrorLog = log;
+            mEventWaiters = waiters;
+        }
+
+        private void sendMessages() {
+            if (mErrorLog.size() > 0) {
+                ListIterator<Pair<Integer, Messenger>> iter = mEventWaiters.listIterator();
+                boolean messagesHandled = false;
+                while (iter.hasNext()) {
+                    Pair<Integer, Messenger> elem = iter.next();
+                    for (LogEvent i : mErrorLog) {
+                        if (elem.first == null || elem.first == i.getEvent()) {
+                            Message m = Message.obtain(null, MSG_LOG_REPORT);
+                            Bundle b = m.getData();
+                            b.putParcelableArray(LOG_EVENT_ARRAY,
+                                    mErrorLog.toArray(new LogEvent[mErrorLog.size()]));
+                            m.setData(b);
+                            try {
+                                elem.second.send(m);
+                                messagesHandled = true;
+                            } catch (RemoteException e) {
+                                Log.e(TAG, "Could not report log message to remote, " +
+                                        "received exception from remote: " + e +
+                                        "\n  Original errors: " +
+                                        Arrays.toString(mErrorLog.toArray()));
+                            }
+                            iter.remove();
+                        }
+                    }
+                }
+                if (messagesHandled) {
+                    mErrorLog.clear();
+                }
+            }
+        }
+
+        @Override
+        public void handleMessage(Message msg) {
+            switch(msg.what) {
+                case MSG_GET_LOG:
+                    if (msg.replyTo == null) {
+                        break;
+                    }
+
+                    if (msg.arg1 == DO_EVENT_FILTER) {
+                        mEventWaiters.add(new Pair<Integer, Messenger>(msg.arg2, msg.replyTo));
+                    } else {
+                        mEventWaiters.add(new Pair<Integer, Messenger>(null, msg.replyTo));
+                    }
+
+                    sendMessages();
+
+                    break;
+                case MSG_LOG_EVENT:
+                    Bundle b = msg.getData();
+                    b.setClassLoader(LogEvent.class.getClassLoader());
+                    LogEvent error = b.getParcelable(LOG_EVENT);
+                    mErrorLog.add(error);
+
+                    sendMessages();
+
+                    break;
+                default:
+                    Log.e(TAG, "Unknown message type: " + msg.what);
+                    super.handleMessage(msg);
+            }
+        }
+    }
+
+    /**
+     * Parcelable object to use with logged events.
+     */
+    public static class LogEvent implements Parcelable {
+
+        private final int mEvent;
+        private final String mLogText;
+
+        @Override
+        public int describeContents() {
+            return 0;
+        }
+
+        @Override
+        public void writeToParcel(Parcel out, int flags) {
+            out.writeInt(mEvent);
+            out.writeString(mLogText);
+        }
+
+        public int getEvent() {
+            return mEvent;
+        }
+
+        public String getLogText() {
+            return mLogText;
+        }
+
+        public static final Parcelable.Creator<LogEvent> CREATOR
+                = new Parcelable.Creator<LogEvent>() {
+
+            public LogEvent createFromParcel(Parcel in) {
+                return new LogEvent(in);
+            }
+
+            public LogEvent[] newArray(int size) {
+                return new LogEvent[size];
+            }
+        };
+
+        private LogEvent(Parcel in) {
+            mEvent = in.readInt();
+            mLogText = in.readString();
+        }
+
+        public LogEvent(int id, String msg) {
+            mEvent = id;
+            mLogText = msg;
+        }
+
+        @Override
+        public String toString() {
+            return "LogEvent{" +
+                    "Event=" + mEvent +
+                    ", LogText='" + mLogText + '\'' +
+                    '}';
+        }
+
+        @Override
+        public boolean equals(Object o) {
+            if (this == o) return true;
+            if (o == null || getClass() != o.getClass()) return false;
+
+            LogEvent logEvent = (LogEvent) o;
+
+            if (mEvent != logEvent.mEvent) return false;
+            if (mLogText != null ? !mLogText.equals(logEvent.mLogText) : logEvent.mLogText != null)
+                return false;
+
+            return true;
+        }
+
+        @Override
+        public int hashCode() {
+            int result = mEvent;
+            result = 31 * result + (mLogText != null ? mLogText.hashCode() : 0);
+            return result;
+        }
+    }
+
+    /**
+     * Implementation of Future to use when retrieving error messages from service.
+     *
+     * <p />
+     * To use this, either pass a {@link Runnable} or {@link Callable} in the constructor,
+     * or use the default constructor and set the result externally with {@link #setResult(Object)}.
+     */
+    private static class SettableFuture<T> extends FutureTask<T> {
+
+        public SettableFuture() {
+            super(new Callable<T>() {
+                @Override
+                public T call() throws Exception {
+                    throw new IllegalStateException(
+                            "Empty task, use #setResult instead of calling run.");
+                }
+            });
+        }
+
+        public SettableFuture(Callable<T> callable) {
+            super(callable);
+        }
+
+        public SettableFuture(Runnable runnable, T result) {
+            super(runnable, result);
+        }
+
+        public void setResult(T result) {
+            set(result);
+        }
+    }
+
+    /**
+     * Helper class for setting up and using a connection to {@link ErrorLoggingService}.
+     */
+    public static class ErrorServiceConnection implements AutoCloseable {
+
+        private Messenger mService = null;
+        private boolean mBind = false;
+        private final Object mLock = new Object();
+        private final Context mContext;
+        private final HandlerThread mReplyThread;
+        private ReplyHandler mReplyHandler;
+        private Messenger mReplyMessenger;
+
+        /**
+         * Construct a connection to the {@link ErrorLoggingService} in the given {@link Context}.
+         *
+         * @param context the {@link Context} to bind the service in.
+         */
+        public ErrorServiceConnection(final Context context) {
+            mContext = context;
+            mReplyThread = new HandlerThread("ErrorServiceConnection");
+            mReplyThread.start();
+            mReplyHandler = new ReplyHandler(mReplyThread.getLooper());
+            mReplyMessenger = new Messenger(mReplyHandler);
+        }
+
+        @Override
+        public void close() {
+            stop();
+            mReplyThread.quit();
+            synchronized (mLock) {
+                mService = null;
+                mBind = false;
+                mReplyHandler.cancelAll();
+            }
+        }
+
+        @Override
+        protected void finalize() throws Throwable {
+            close();
+            super.finalize();
+        }
+
+        private static final class ReplyHandler extends Handler {
+
+            private final LinkedBlockingQueue<SettableFuture<List<LogEvent>>> mFuturesQueue =
+                    new LinkedBlockingQueue<>();
+
+            private ReplyHandler(Looper looper) {
+                super(looper);
+            }
+
+            /**
+             * Cancel all pending futures for this handler.
+             */
+            public void cancelAll() {
+                List<SettableFuture<List<LogEvent>>> logFutures = new ArrayList<>();
+                mFuturesQueue.drainTo(logFutures);
+                for (SettableFuture<List<LogEvent>> i : logFutures) {
+                    i.cancel(true);
+                }
+            }
+
+            /**
+             * Cancel a given future, and remove from the pending futures for this handler.
+             *
+             * @param report future to remove.
+             */
+            public void cancel(SettableFuture<List<LogEvent>> report) {
+                mFuturesQueue.remove(report);
+                report.cancel(true);
+            }
+
+            /**
+             * Add future for the next received report from this service.
+             *
+             * @param report a future to get the next received event report from.
+             */
+            public void addFuture(SettableFuture<List<LogEvent>> report) {
+                if (!mFuturesQueue.offer(report)) {
+                    Log.e(TAG, "Could not request another error report, too many requests queued.");
+                }
+            }
+
+            @SuppressWarnings("unchecked")
+            @Override
+            public void handleMessage(Message msg) {
+                switch (msg.what) {
+                    case MSG_LOG_REPORT:
+                        SettableFuture<List<LogEvent>> task = mFuturesQueue.poll();
+                        if (task == null) break;
+                        Bundle b = msg.getData();
+                        b.setClassLoader(LogEvent.class.getClassLoader());
+                        Parcelable[] array = b.getParcelableArray(LOG_EVENT_ARRAY);
+                        LogEvent[] events = Arrays.copyOf(array, array.length, LogEvent[].class);
+                        List<LogEvent> res = Arrays.asList(events);
+                        task.setResult(res);
+                        break;
+                    default:
+                        Log.e(TAG, "Unknown message type: " + msg.what);
+                        super.handleMessage(msg);
+                }
+            }
+        }
+
+        private ServiceConnection mConnection = new ServiceConnection() {
+            @Override
+            public void onServiceConnected(ComponentName componentName, IBinder iBinder) {
+                Log.i(TAG, "Service connected.");
+                synchronized (mLock) {
+                    mService = new Messenger(iBinder);
+                    mBind = true;
+                    mLock.notifyAll();
+                }
+            }
+
+            @Override
+            public void onServiceDisconnected(ComponentName componentName) {
+                Log.i(TAG, "Service disconnected.");
+                synchronized (mLock) {
+                    mService = null;
+                    mBind = false;
+                    mReplyHandler.cancelAll();
+                }
+            }
+        };
+
+        private Messenger blockingGetBoundService() {
+            synchronized (mLock) {
+                if (!mBind) {
+                    mContext.bindService(new Intent(mContext, ErrorLoggingService.class), mConnection,
+                            Context.BIND_AUTO_CREATE);
+                    mBind = true;
+                }
+                try {
+                    while (mService == null && mBind) {
+                        mLock.wait();
+                    }
+                } catch (InterruptedException e) {
+                    Log.e(TAG, "Waiting for error service interrupted: " + e);
+                }
+                if (!mBind) {
+                    Log.w(TAG, "Could not get service, service disconnected.");
+                }
+                return mService;
+            }
+        }
+
+        private Messenger getBoundService() {
+            synchronized (mLock) {
+                if (!mBind) {
+                    mContext.bindService(new Intent(mContext, ErrorLoggingService.class), mConnection,
+                            Context.BIND_AUTO_CREATE);
+                    mBind = true;
+                }
+                return mService;
+            }
+        }
+
+        /**
+         * If the {@link ErrorLoggingService} is not yet bound, begin service connection attempt.
+         *
+         * <p />
+         * Note: This will not block.
+         */
+        public void start() {
+            synchronized (mLock) {
+                if (!mBind) {
+                    mContext.bindService(new Intent(mContext, ErrorLoggingService.class), mConnection,
+                            Context.BIND_AUTO_CREATE);
+                    mBind = true;
+                }
+            }
+        }
+
+        /**
+         * Unbind from the {@link ErrorLoggingService} if it has been bound.
+         *
+         * <p />
+         * Note: This will not block.
+         */
+        public void stop() {
+            synchronized (mLock) {
+                if (mBind) {
+                    mContext.unbindService(mConnection);
+                    mBind = false;
+                }
+            }
+        }
+
+        /**
+         * Send an logged event to the bound {@link ErrorLoggingService}.
+         *
+         * <p />
+         * If the service is not yet bound, this will bind the service and wait until it has been
+         * connected.
+         *
+         * <p />
+         * This is not safe to call from the UI thread, as this will deadlock with the looper used
+         * when connecting the service.
+         *
+         * @param id an int indicating the ID of this event.
+         * @param msg a {@link String} message to send.
+         */
+        public void log(final int id, final String msg) {
+            Messenger service = blockingGetBoundService();
+            Message m = Message.obtain(null, MSG_LOG_EVENT);
+            m.getData().putParcelable(LOG_EVENT, new LogEvent(id, msg));
+            try {
+                service.send(m);
+            } catch (RemoteException e) {
+                Log.e(TAG, "Received exception while logging error: " + e);
+            }
+        }
+
+        /**
+         * Send an logged event to the bound {@link ErrorLoggingService} when it becomes available.
+         *
+         * <p />
+         * If the service is not yet bound, this will bind the service.
+         *
+         * @param id an int indicating the ID of this event.
+         * @param msg a {@link String} message to send.
+         */
+        public void logAsync(final int id, final String msg) {
+            AsyncTask.SERIAL_EXECUTOR.execute(new Runnable() {
+                @Override
+                public void run() {
+                    log(id, msg);
+                }
+            });
+        }
+
+        /**
+         * Retrieve all events logged in the {@link ErrorLoggingService}.
+         *
+         * <p />
+         * If the service is not yet bound, this will bind the service and wait until it has been
+         * connected.  Likewise, after the service has been bound, this method will block until
+         * the given timeout passes or an event is logged in the service.  Passing a negative
+         * timeout is equivalent to using an infinite timeout value.
+         *
+         * <p />
+         * This is not safe to call from the UI thread, as this will deadlock with the looper used
+         * when connecting the service.
+         *
+         * <p />
+         * Note: This method clears the events stored in the bound {@link ErrorLoggingService}.
+         *
+         * @param timeoutMs the number of milliseconds to wait for a logging event.
+         * @return a list of {@link String} error messages reported to the bound
+         *          {@link ErrorLoggingService} since the last call to getLog.
+         *
+         * @throws TimeoutException if the given timeout elapsed with no events logged.
+         */
+        public List<LogEvent> getLog(long timeoutMs) throws TimeoutException {
+            return retrieveLog(false, 0, timeoutMs);
+        }
+
+        /**
+         * Retrieve all events logged in the {@link ErrorLoggingService}.
+         *
+         * <p />
+         * If the service is not yet bound, this will bind the service and wait until it has been
+         * connected.  Likewise, after the service has been bound, this method will block until
+         * the given timeout passes or an event with the given event ID is logged in the service.
+         * Passing a negative timeout is equivalent to using an infinite timeout value.
+         *
+         * <p />
+         * This is not safe to call from the UI thread, as this will deadlock with the looper used
+         * when connecting the service.
+         *
+         * <p />
+         * Note: This method clears the events stored in the bound {@link ErrorLoggingService}.
+         *
+         * @param timeoutMs the number of milliseconds to wait for a logging event.
+         * @param event the ID of the event to wait for.
+         * @return a list of {@link String} error messages reported to the bound
+         *          {@link ErrorLoggingService} since the last call to getLog.
+         *
+         * @throws TimeoutException if the given timeout elapsed with no events of the given type
+         *          logged.
+         */
+        public List<LogEvent> getLog(long timeoutMs, int event) throws TimeoutException {
+            return retrieveLog(true, event, timeoutMs);
+        }
+
+        private List<LogEvent> retrieveLog(boolean hasEvent, int event, long timeout)
+                throws TimeoutException {
+            Messenger service = blockingGetBoundService();
+
+            SettableFuture<List<LogEvent>> task = new SettableFuture<>();
+
+            Message m = (hasEvent) ?
+                    Message.obtain(null, MSG_GET_LOG, DO_EVENT_FILTER, event, null) :
+                    Message.obtain(null, MSG_GET_LOG);
+            m.replyTo = mReplyMessenger;
+
+            synchronized(this) {
+                mReplyHandler.addFuture(task);
+                try {
+                    service.send(m);
+                } catch (RemoteException e) {
+                    Log.e(TAG, "Received exception while retrieving errors: " + e);
+                    return null;
+                }
+            }
+
+            List<LogEvent> res = null;
+            try {
+                res = (timeout < 0) ? task.get() : task.get(timeout, TimeUnit.MILLISECONDS);
+            } catch (InterruptedException|ExecutionException e) {
+                Log.e(TAG, "Received exception while retrieving errors: " + e);
+            }
+            return res;
+        }
+    }
+}
diff --git a/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/Camera1Activity.java b/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/Camera1Activity.java
new file mode 100644
index 0000000..5c27111
--- /dev/null
+++ b/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/Camera1Activity.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.multiprocess.camera.cts;
+
+import android.app.Activity;
+import android.hardware.Camera;
+import android.hardware.multiprocess.ErrorLoggingService;
+import android.os.Bundle;
+import android.util.Log;
+
+/**
+ * Activity implementing basic access of the Camera1 API.
+ *
+ * <p />
+ * This will log all errors to {@link android.hardware.multiprocess.ErrorLoggingService}.
+ */
+public class Camera1Activity extends Activity {
+    private static final String TAG = "Camera1Activity";
+
+    Camera mCamera;
+    ErrorLoggingService.ErrorServiceConnection mErrorServiceConnection;
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        Log.i(TAG, "onCreate called.");
+        super.onCreate(savedInstanceState);
+        mErrorServiceConnection = new ErrorLoggingService.ErrorServiceConnection(this);
+        mErrorServiceConnection.start();
+    }
+
+    @Override
+    protected void onResume() {
+        Log.i(TAG, "onResume called.");
+        super.onResume();
+        try {
+            mCamera = Camera.open();
+            if (mCamera == null) {
+                mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_ERROR, TAG +
+                        " no cameras available.");
+            }
+            mCamera.setErrorCallback(new Camera.ErrorCallback() {
+                @Override
+                public void onError(int i, Camera camera) {
+                    if (i == Camera.CAMERA_ERROR_EVICTED) {
+                        mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_EVICTED,
+                                TAG + " camera evicted");
+                        Log.e(TAG, "onError called with event " + i + ", camera evicted");
+                    } else {
+                        mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_ERROR,
+                                TAG + " camera experienced error: " + i);
+                        Log.e(TAG, "onError called with event " + i + ", camera error");
+                    }
+                }
+            });
+            mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_CONNECT,
+                    TAG + " camera connected");
+        } catch (RuntimeException e) {
+            mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_ERROR, TAG +
+                    " camera exception during connection: " + e);
+            Log.e(TAG, "Runtime error: " + e);
+        }
+    }
+
+    @Override
+    protected void onPause() {
+        Log.i(TAG, "onPause called.");
+        super.onPause();
+    }
+
+    @Override
+    protected void onDestroy() {
+        Log.i(TAG, "onDestroy called.");
+        super.onDestroy();
+        if (mErrorServiceConnection != null) {
+            mErrorServiceConnection.stop();
+            mErrorServiceConnection = null;
+        }
+    }
+}
\ No newline at end of file
diff --git a/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/Camera2Activity.java b/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/Camera2Activity.java
new file mode 100644
index 0000000..2a78649
--- /dev/null
+++ b/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/Camera2Activity.java
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.multiprocess.camera.cts;
+
+import android.app.Activity;
+import android.content.Context;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.multiprocess.ErrorLoggingService;
+import android.os.Bundle;
+import android.os.Handler;
+import android.util.Log;
+
+/**
+ * Activity implementing basic access of the Camera2 API.
+ *
+ * <p />
+ * This will log all errors to {@link android.hardware.multiprocess.ErrorLoggingService}.
+ */
+public class Camera2Activity extends Activity {
+    private static final String TAG = "Camera2Activity";
+
+    ErrorLoggingService.ErrorServiceConnection mErrorServiceConnection;
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        Log.i(TAG, "onCreate called.");
+        super.onCreate(savedInstanceState);
+        mErrorServiceConnection = new ErrorLoggingService.ErrorServiceConnection(this);
+        mErrorServiceConnection.start();
+    }
+
+    @Override
+    protected void onPause() {
+        Log.i(TAG, "onPause called.");
+        super.onPause();
+    }
+
+    @Override
+    protected void onResume() {
+        Log.i(TAG, "onResume called.");
+        super.onResume();
+
+        try {
+            CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
+
+            if (manager == null) {
+                mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_ERROR, TAG +
+                        " could not connect camera service");
+                return;
+            }
+            String[] cameraIds = manager.getCameraIdList();
+
+            if (cameraIds == null || cameraIds.length == 0) {
+                mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_ERROR, TAG +
+                        " device reported having no cameras");
+                return;
+            }
+
+            manager.registerAvailabilityCallback(new CameraManager.AvailabilityCallback() {
+                @Override
+                public void onCameraAvailable(String cameraId) {
+                    super.onCameraAvailable(cameraId);
+                    mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_AVAILABLE,
+                            cameraId);
+                    Log.i(TAG, "Camera " + cameraId + " is available");
+                }
+
+                @Override
+                public void onCameraUnavailable(String cameraId) {
+                    super.onCameraUnavailable(cameraId);
+                    mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_UNAVAILABLE,
+                            cameraId);
+                    Log.i(TAG, "Camera " + cameraId + " is unavailable");
+                }
+            }, null);
+
+            final String chosen = cameraIds[0];
+
+            manager.openCamera(chosen, new CameraDevice.StateCallback() {
+                @Override
+                public void onOpened(CameraDevice cameraDevice) {
+                    mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_CONNECT,
+                            chosen);
+                    Log.i(TAG, "Camera " + chosen + " is opened");
+                }
+
+                @Override
+                public void onDisconnected(CameraDevice cameraDevice) {
+                    mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_EVICTED,
+                            chosen);
+                    Log.i(TAG, "Camera " + chosen + " is disconnected");
+                }
+
+                @Override
+                public void onError(CameraDevice cameraDevice, int i) {
+                    mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_ERROR, TAG +
+                            " Camera " + chosen + " experienced error " + i);
+                    Log.e(TAG, "Camera " + chosen + " onError called with error " + i);
+                }
+            }, null);
+        } catch (CameraAccessException e) {
+            mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_ERROR, TAG +
+                    " camera exception during connection: " + e);
+            Log.e(TAG, "Access exception: " + e);
+        }
+    }
+
+    @Override
+    protected void onDestroy() {
+        Log.i(TAG, "onDestroy called.");
+        super.onDestroy();
+        if (mErrorServiceConnection != null) {
+            mErrorServiceConnection.stop();
+            mErrorServiceConnection = null;
+        }
+    }
+}
diff --git a/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/CameraEvictionTest.java b/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/CameraEvictionTest.java
new file mode 100644
index 0000000..3cf1dc7
--- /dev/null
+++ b/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/CameraEvictionTest.java
@@ -0,0 +1,494 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.multiprocess.camera.cts;
+
+import android.app.ActivityManager;
+import android.content.Context;
+import android.content.Intent;
+import android.hardware.Camera;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.cts.CameraCtsActivity;
+import android.hardware.multiprocess.ErrorLoggingService;
+import android.os.Handler;
+import android.test.ActivityInstrumentationTestCase2;
+import android.util.Log;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+import java.util.concurrent.TimeoutException;
+
+import static org.mockito.Mockito.*;
+
+/**
+ * Tests for multi-process camera usage behavior.
+ */
+public class CameraEvictionTest extends ActivityInstrumentationTestCase2<CameraCtsActivity> {
+
+    public static final String TAG = "CameraEvictionTest";
+
+    private static final int OPEN_TIMEOUT = 2000; // Timeout for camera to open (ms).
+    private static final int SETUP_TIMEOUT = 5000; // Remote camera setup timeout (ms).
+    private static final int EVICTION_TIMEOUT = 1000; // Remote camera eviction timeout (ms).
+    private static final int WAIT_TIME = 2000; // Time to wait for process to launch (ms).
+    private static final int UI_TIMEOUT = 10000; // Time to wait for UI event before timeout (ms).
+    ErrorLoggingService.ErrorServiceConnection mErrorServiceConnection;
+
+    private ActivityManager mActivityManager;
+    private Context mContext;
+    private Camera mCamera;
+    private CameraDevice mCameraDevice;
+    private final Object mLock = new Object();
+    private boolean mCompleted = false;
+    private int mProcessPid = -1;
+
+    public CameraEvictionTest() {
+        super(CameraCtsActivity.class);
+    }
+
+    public static class StateCallbackImpl extends CameraDevice.StateCallback {
+        CameraDevice mCameraDevice;
+
+        public StateCallbackImpl() {
+            super();
+        }
+
+        @Override
+        public void onOpened(CameraDevice cameraDevice) {
+            synchronized(this) {
+                mCameraDevice = cameraDevice;
+            }
+            Log.i(TAG, "CameraDevice onOpened called for main CTS test process.");
+        }
+
+        @Override
+        public void onClosed(CameraDevice camera) {
+            super.onClosed(camera);
+            synchronized(this) {
+                mCameraDevice = null;
+            }
+            Log.i(TAG, "CameraDevice onClosed called for main CTS test process.");
+        }
+
+        @Override
+        public void onDisconnected(CameraDevice cameraDevice) {
+            synchronized(this) {
+                mCameraDevice = null;
+            }
+            Log.i(TAG, "CameraDevice onDisconnected called for main CTS test process.");
+
+        }
+
+        @Override
+        public void onError(CameraDevice cameraDevice, int i) {
+            Log.i(TAG, "CameraDevice onError called for main CTS test process with error " +
+                    "code: " + i);
+        }
+
+        public synchronized CameraDevice getCameraDevice() {
+            return mCameraDevice;
+        }
+    }
+
+    @Override
+    protected void setUp() throws Exception {
+        super.setUp();
+
+        mCompleted = false;
+        mContext = getActivity();
+        System.setProperty("dexmaker.dexcache", mContext.getCacheDir().toString());
+        mActivityManager = (ActivityManager) mContext.getSystemService(Context.ACTIVITY_SERVICE);
+        mErrorServiceConnection = new ErrorLoggingService.ErrorServiceConnection(mContext);
+        mErrorServiceConnection.start();
+    }
+
+    @Override
+    protected void tearDown() throws Exception {
+        super.tearDown();
+        if (mProcessPid != -1) {
+            android.os.Process.killProcess(mProcessPid);
+            mProcessPid = -1;
+        }
+        if (mErrorServiceConnection != null) {
+            mErrorServiceConnection.stop();
+            mErrorServiceConnection = null;
+        }
+        if (mCamera != null) {
+            mCamera.release();
+            mCamera = null;
+        }
+        if (mCameraDevice != null) {
+            mCameraDevice.close();
+            mCameraDevice = null;
+        }
+        mContext = null;
+        mActivityManager = null;
+    }
+
+    /**
+     * Test basic eviction scenarios for the Camera1 API.
+     */
+    public void testCamera1ActivityEviction() throws Throwable {
+
+        // Open a camera1 client in the main CTS process's activity
+        final Camera.ErrorCallback mockErrorCb1 = mock(Camera.ErrorCallback.class);
+        final boolean[] skip = {false};
+        runTestOnUiThread(new Runnable() {
+            @Override
+            public void run() {
+                // Open camera
+                mCamera = Camera.open();
+                if (mCamera == null) {
+                    skip[0] = true;
+                } else {
+                    mCamera.setErrorCallback(mockErrorCb1);
+                }
+                notifyFromUI();
+            }
+        });
+        waitForUI();
+
+        if (skip[0]) {
+            Log.i(TAG, "Skipping testCamera1ActivityEviction, device has no cameras.");
+            return;
+        }
+
+        verifyZeroInteractions(mockErrorCb1);
+
+        startRemoteProcess(Camera1Activity.class, "camera1ActivityProcess");
+
+        // Make sure camera was setup correctly in remote activity
+        List<ErrorLoggingService.LogEvent> events = null;
+        try {
+            events = mErrorServiceConnection.getLog(SETUP_TIMEOUT,
+                    TestConstants.EVENT_CAMERA_CONNECT);
+        } finally {
+            if (events != null) assertOnly(TestConstants.EVENT_CAMERA_CONNECT, events);
+        }
+
+        Thread.sleep(WAIT_TIME);
+
+        // Ensure UI thread has a chance to process callbacks.
+        runTestOnUiThread(new Runnable() {
+            @Override
+            public void run() {
+                Log.i("CTS", "Did something on UI thread.");
+                notifyFromUI();
+            }
+        });
+        waitForUI();
+
+        // Make sure we received correct callback in error listener, and nothing else
+        verify(mockErrorCb1, only()).onError(eq(Camera.CAMERA_ERROR_EVICTED), isA(Camera.class));
+        mCamera = null;
+
+        // Try to open the camera again (even though other TOP process holds the camera).
+        final boolean[] pass = {false};
+        runTestOnUiThread(new Runnable() {
+            @Override
+            public void run() {
+                // Open camera
+                try {
+                    mCamera = Camera.open();
+                } catch (RuntimeException e) {
+                    pass[0] = true;
+                }
+                notifyFromUI();
+            }
+        });
+        waitForUI();
+
+        assertTrue("Did not receive exception when opening camera while camera is held by a" +
+                " higher priority client process.", pass[0]);
+
+        // Verify that attempting to open the camera didn't cause anything weird to happen in the
+        // other process.
+        List<ErrorLoggingService.LogEvent> eventList2 = null;
+        boolean timeoutExceptionHit = false;
+        try {
+            eventList2 = mErrorServiceConnection.getLog(EVICTION_TIMEOUT);
+        } catch (TimeoutException e) {
+            timeoutExceptionHit = true;
+        }
+
+        assertNone("Remote camera service received invalid events: ", eventList2);
+        assertTrue("Remote camera service exited early", timeoutExceptionHit);
+        android.os.Process.killProcess(mProcessPid);
+        mProcessPid = -1;
+    }
+
+    /**
+     * Test basic eviction scenarios for the Camera2 API.
+     */
+    public void testBasicCamera2ActivityEviction() throws Throwable {
+        CameraManager manager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
+        assertNotNull(manager);
+        String[] cameraIds = manager.getCameraIdList();
+        assertNotEmpty(cameraIds);
+        assertTrue(mContext.getMainLooper() != null);
+
+        // Setup camera manager
+        String chosenCamera = cameraIds[0];
+        Handler cameraHandler = new Handler(mContext.getMainLooper());
+        final CameraManager.AvailabilityCallback mockAvailCb =
+                mock(CameraManager.AvailabilityCallback.class);
+
+        manager.registerAvailabilityCallback(mockAvailCb, cameraHandler);
+
+        Thread.sleep(WAIT_TIME);
+
+        verify(mockAvailCb, times(1)).onCameraAvailable(chosenCamera);
+        verify(mockAvailCb, never()).onCameraUnavailable(chosenCamera);
+
+        // Setup camera device
+        final CameraDevice.StateCallback spyStateCb = spy(new StateCallbackImpl());
+        manager.openCamera(chosenCamera, spyStateCb, cameraHandler);
+
+        verify(spyStateCb, timeout(OPEN_TIMEOUT).times(1)).onOpened(any(CameraDevice.class));
+        verify(spyStateCb, never()).onClosed(any(CameraDevice.class));
+        verify(spyStateCb, never()).onDisconnected(any(CameraDevice.class));
+        verify(spyStateCb, never()).onError(any(CameraDevice.class), anyInt());
+
+        // Open camera from remote process
+        startRemoteProcess(Camera2Activity.class, "camera2ActivityProcess");
+
+        // Verify that the remote camera was opened correctly
+        List<ErrorLoggingService.LogEvent> allEvents  = mErrorServiceConnection.getLog(SETUP_TIMEOUT,
+                TestConstants.EVENT_CAMERA_CONNECT);
+        assertNotNull("Camera device not setup in remote process!", allEvents);
+
+        // Filter out relevant events for other camera devices
+        ArrayList<ErrorLoggingService.LogEvent> events = new ArrayList<>();
+        for (ErrorLoggingService.LogEvent e : allEvents) {
+            int eventTag = e.getEvent();
+            if (eventTag == TestConstants.EVENT_CAMERA_UNAVAILABLE ||
+                    eventTag == TestConstants.EVENT_CAMERA_CONNECT ||
+                    eventTag == TestConstants.EVENT_CAMERA_AVAILABLE) {
+                if (!Objects.equals(e.getLogText(), chosenCamera)) {
+                    continue;
+                }
+            }
+            events.add(e);
+        }
+        int[] eventList = new int[events.size()];
+        int eventIdx = 0;
+        for (ErrorLoggingService.LogEvent e : events) {
+            eventList[eventIdx++] = e.getEvent();
+        }
+        String[] actualEvents = TestConstants.convertToStringArray(eventList);
+        String[] expectedEvents = new String[] {TestConstants.EVENT_CAMERA_UNAVAILABLE_STR,
+                TestConstants.EVENT_CAMERA_CONNECT_STR};
+        String[] ignoredEvents = new String[] { TestConstants.EVENT_CAMERA_AVAILABLE_STR,
+                TestConstants.EVENT_CAMERA_UNAVAILABLE_STR };
+        assertOrderedEvents(actualEvents, expectedEvents, ignoredEvents);
+
+        // Verify that the local camera was evicted properly
+        verify(spyStateCb, times(1)).onDisconnected(any(CameraDevice.class));
+        verify(spyStateCb, never()).onClosed(any(CameraDevice.class));
+        verify(spyStateCb, never()).onError(any(CameraDevice.class), anyInt());
+        verify(spyStateCb, times(1)).onOpened(any(CameraDevice.class));
+
+        // Verify that we can no longer open the camera, as it is held by a higher priority process
+        boolean openException = false;
+        try {
+            manager.openCamera(chosenCamera, spyStateCb, cameraHandler);
+        } catch(CameraAccessException e) {
+            assertTrue("Received incorrect camera exception when opening camera: " + e,
+                    e.getReason() == CameraAccessException.CAMERA_IN_USE);
+            openException = true;
+        }
+
+        assertTrue("Didn't receive exception when trying to open camera held by higher priority " +
+                "process.", openException);
+
+        // Verify that attempting to open the camera didn't cause anything weird to happen in the
+        // other process.
+        List<ErrorLoggingService.LogEvent> eventList2 = null;
+        boolean timeoutExceptionHit = false;
+        try {
+            eventList2 = mErrorServiceConnection.getLog(EVICTION_TIMEOUT);
+        } catch (TimeoutException e) {
+            timeoutExceptionHit = true;
+        }
+
+        assertNone("Remote camera service received invalid events: ", eventList2);
+        assertTrue("Remote camera service exited early", timeoutExceptionHit);
+        android.os.Process.killProcess(mProcessPid);
+        mProcessPid = -1;
+    }
+
+    /**
+     * Block until UI thread calls {@link #notifyFromUI()}.
+     * @throws InterruptedException
+     */
+    private void waitForUI() throws InterruptedException {
+        synchronized(mLock) {
+            if (mCompleted) return;
+            while (!mCompleted) {
+                mLock.wait();
+            }
+            mCompleted = false;
+        }
+    }
+
+    /**
+     * Wake up any threads waiting in calls to {@link #waitForUI()}.
+     */
+    private void notifyFromUI() {
+        synchronized (mLock) {
+            mCompleted = true;
+            mLock.notifyAll();
+        }
+    }
+
+    /**
+     * Return the PID for the process with the given name in the given list of process info.
+     *
+     * @param processName the name of the process who's PID to return.
+     * @param list a list of {@link ActivityManager.RunningAppProcessInfo} to check.
+     * @return the PID of the given process, or -1 if it was not included in the list.
+     */
+    private static int getPid(String processName,
+                              List<ActivityManager.RunningAppProcessInfo> list) {
+        for (ActivityManager.RunningAppProcessInfo rai : list) {
+            if (processName.equals(rai.processName))
+                return rai.pid;
+        }
+        return -1;
+    }
+
+    /**
+     * Start an activity of the given class running in a remote process with the given name.
+     *
+     * @param klass the class of the {@link android.app.Activity} to start.
+     * @param processName the remote activity name.
+     * @throws InterruptedException
+     */
+    public void startRemoteProcess(java.lang.Class<?> klass, String processName)
+            throws InterruptedException {
+        // Ensure no running activity process with same name
+        String cameraActivityName = mContext.getPackageName() + ":" + processName;
+        List<ActivityManager.RunningAppProcessInfo> list =
+                mActivityManager.getRunningAppProcesses();
+        assertEquals(-1, getPid(cameraActivityName, list));
+
+        // Start activity in a new top foreground process
+        Intent activityIntent = new Intent(mContext, klass);
+        activityIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
+        mContext.startActivity(activityIntent);
+        Thread.sleep(WAIT_TIME);
+
+        // Fail if activity isn't running
+        list = mActivityManager.getRunningAppProcesses();
+        mProcessPid = getPid(cameraActivityName, list);
+        assertTrue(-1 != mProcessPid);
+    }
+
+    /**
+     * Assert that there is only one event of the given type in the event list.
+     *
+     * @param event event type to check for.
+     * @param events {@link List} of events.
+     */
+    public static void assertOnly(int event, List<ErrorLoggingService.LogEvent> events) {
+        assertTrue("Remote camera activity never received event: " + event, events != null);
+        for (ErrorLoggingService.LogEvent e : events) {
+            assertFalse("Remote camera activity received invalid event (" + e +
+                    ") while waiting for event: " + event,
+                    e.getEvent() < 0 || e.getEvent() != event);
+        }
+        assertTrue("Remote camera activity never received event: " + event, events.size() >= 1);
+        assertTrue("Remote camera activity received too many " + event + " events, received: " +
+                events.size(), events.size() == 1);
+    }
+
+    /**
+     * Assert there were no logEvents in the given list.
+     *
+     * @param msg message to show on assertion failure.
+     * @param events {@link List} of events.
+     */
+    public static void assertNone(String msg, List<ErrorLoggingService.LogEvent> events) {
+        if (events == null) return;
+        StringBuilder builder = new StringBuilder(msg + "\n");
+        for (ErrorLoggingService.LogEvent e : events) {
+            builder.append(e).append("\n");
+        }
+        assertTrue(builder.toString(), events.isEmpty());
+    }
+
+    /**
+     * Assert array is null or empty.
+     *
+     * @param array array to check.
+     */
+    public static <T> void assertNotEmpty(T[] array) {
+        assertNotNull(array);
+        assertFalse("Array is empty: " + Arrays.toString(array), array.length == 0);
+    }
+
+    /**
+     * Given an 'actual' array of objects, check that the objects given in the 'expected'
+     * array are also present in the 'actual' array in the same order.  Objects in the 'actual'
+     * array that are not in the 'expected' array are skipped and ignored if they are given
+     * in the 'ignored' array, otherwise this assertion will fail.
+     *
+     * @param actual the ordered array of objects to check.
+     * @param expected the ordered array of expected objects.
+     * @param ignored the array of objects that will be ignored if present in actual,
+     *                but not in expected (or are out of order).
+     * @param <T>
+     */
+    public static <T> void assertOrderedEvents(T[] actual, T[] expected, T[] ignored) {
+        assertNotNull(actual);
+        assertNotNull(expected);
+        assertNotNull(ignored);
+
+        int expIndex = 0;
+        int index = 0;
+        for (T i : actual) {
+            // If explicitly expected, move to next
+            if (expIndex < expected.length && Objects.equals(i, expected[expIndex])) {
+                expIndex++;
+                continue;
+            }
+
+            // Fail if not ignored
+            boolean canIgnore = false;
+            for (T j : ignored) {
+                if (Objects.equals(i, j)) {
+                    canIgnore = true;
+                    break;
+                }
+
+            }
+
+            // Fail if not ignored.
+            assertTrue("Event at index " + index + " in actual array " +
+                    Arrays.toString(actual) + " was unexpected: expected array was " +
+                    Arrays.toString(expected) + ", ignored array was: " +
+                    Arrays.toString(ignored), canIgnore);
+            index++;
+        }
+        assertTrue("Only had " + expIndex + " of " + expected.length +
+                " expected objects in array " + Arrays.toString(actual) + ", expected was " +
+                Arrays.toString(expected), expIndex == expected.length);
+    }
+}
diff --git a/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/TestConstants.java b/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/TestConstants.java
new file mode 100644
index 0000000..2805e02
--- /dev/null
+++ b/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/TestConstants.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.multiprocess.camera.cts;
+
+/**
+ * Constants used throughout the multi-process unit tests.
+ */
+public class TestConstants {
+
+    public static final int EVENT_CAMERA_ERROR = -1;
+    public static final int EVENT_CAMERA_CONNECT = 1;
+    public static final int EVENT_CAMERA_EVICTED = 2;
+    public static final int EVENT_CAMERA_AVAILABLE = 3;
+    public static final int EVENT_CAMERA_UNAVAILABLE = 4;
+
+    public static final String EVENT_CAMERA_ERROR_STR = "error";
+    public static final String EVENT_CAMERA_CONNECT_STR = "connect";
+    public static final String EVENT_CAMERA_EVICTED_STR = "evicted";
+    public static final String EVENT_CAMERA_AVAILABLE_STR = "available";
+    public static final String EVENT_CAMERA_UNAVAILABLE_STR = "unavailable";
+
+    public static final String EVENT_CAMERA_UNKNOWN_STR = "unknown";
+
+    /**
+     * Convert the given error code to a string.
+     *
+     * @param err error code from {@link TestConstants}.
+     * @return string for this error code.
+     */
+    public static String errToStr(int err) {
+        switch(err) {
+            case EVENT_CAMERA_ERROR:
+                return EVENT_CAMERA_ERROR_STR;
+            case EVENT_CAMERA_CONNECT:
+                return EVENT_CAMERA_CONNECT_STR;
+            case EVENT_CAMERA_EVICTED:
+                return EVENT_CAMERA_EVICTED_STR;
+            case EVENT_CAMERA_AVAILABLE:
+                return EVENT_CAMERA_AVAILABLE_STR;
+            case EVENT_CAMERA_UNAVAILABLE:
+                return EVENT_CAMERA_UNAVAILABLE_STR;
+            default:
+                return EVENT_CAMERA_UNKNOWN_STR + " " + err;
+        }
+    }
+
+    /**
+     * Convert the given array of error codes to an array of strings.
+     *
+     * @param err array of error codes from {@link TestConstants}.
+     * @return string array for the given error codes.
+     */
+    public static String[] convertToStringArray(int[] err) {
+        if (err == null) return null;
+        String[] ret = new String[err.length];
+        for (int i = 0; i < err.length; i++) {
+            ret[i] = errToStr(err[i]);
+        }
+        return ret;
+    }
+
+}
diff --git a/tests/tests/keystore/src/android/keystore/cts/KeyPairGeneratorSpecTest.java b/tests/tests/keystore/src/android/keystore/cts/KeyPairGeneratorSpecTest.java
index a923844..33c8955 100644
--- a/tests/tests/keystore/src/android/keystore/cts/KeyPairGeneratorSpecTest.java
+++ b/tests/tests/keystore/src/android/keystore/cts/KeyPairGeneratorSpecTest.java
@@ -25,11 +25,6 @@
 import javax.security.auth.x500.X500Principal;
 
 public class KeyPairGeneratorSpecTest extends AndroidTestCase {
-    private static final X500Principal DEFAULT_CERT_SUBJECT = new X500Principal("CN=fake");
-    private static final BigInteger DEFAULT_CERT_SERIAL_NUMBER = new BigInteger("1");
-    private static final Date DEFAULT_CERT_NOT_BEFORE = new Date(0L); // Jan 1 1980
-    private static final Date DEFAULT_CERT_NOT_AFTER = new Date(2461449600000L); // Jan 1 2048
-
     private static final String TEST_ALIAS_1 = "test1";
 
     private static final X500Principal TEST_DN_1 = new X500Principal("CN=test1");
@@ -110,44 +105,56 @@
         }
     }
 
-    public void testBuilder_MissingSubjectDN_Success() throws Exception {
-        KeyPairGeneratorSpec spec = new KeyPairGeneratorSpec.Builder(getContext())
-                .setAlias(TEST_ALIAS_1)
-                .setSerialNumber(SERIAL_1)
-                .setStartDate(NOW)
-                .setEndDate(NOW_PLUS_10_YEARS)
-                .build();
-        assertEquals(DEFAULT_CERT_SUBJECT, spec.getSubjectDN());
+    public void testBuilder_MissingSubjectDN_Failure() throws Exception {
+        try {
+            new KeyPairGeneratorSpec.Builder(getContext())
+                    .setAlias(TEST_ALIAS_1)
+                    .setSerialNumber(SERIAL_1)
+                    .setStartDate(NOW)
+                    .setEndDate(NOW_PLUS_10_YEARS)
+                    .build();
+            fail("Should throw IllegalArgumentException when subject is missing");
+        } catch (IllegalArgumentException expected) {
+        }
     }
 
-    public void testBuilder_MissingSerialNumber_Success() throws Exception {
-        KeyPairGeneratorSpec spec = new KeyPairGeneratorSpec.Builder(getContext())
-                .setAlias(TEST_ALIAS_1)
-                .setSubject(TEST_DN_1)
-                .setStartDate(NOW)
-                .setEndDate(NOW_PLUS_10_YEARS)
-                .build();
-        assertEquals(DEFAULT_CERT_SERIAL_NUMBER, spec.getSerialNumber());
+    public void testBuilder_MissingSerialNumber_Failure() throws Exception {
+        try {
+            new KeyPairGeneratorSpec.Builder(getContext())
+                    .setAlias(TEST_ALIAS_1)
+                    .setSubject(TEST_DN_1)
+                    .setStartDate(NOW)
+                    .setEndDate(NOW_PLUS_10_YEARS)
+                    .build();
+            fail("Should throw IllegalArgumentException when serialNumber is missing");
+        } catch (IllegalArgumentException expected) {
+        }
     }
 
-    public void testBuilder_MissingStartDate_Success() throws Exception {
-        KeyPairGeneratorSpec spec = new KeyPairGeneratorSpec.Builder(getContext())
-                .setAlias(TEST_ALIAS_1)
-                .setSubject(TEST_DN_1)
-                .setSerialNumber(SERIAL_1)
-                .setEndDate(NOW_PLUS_10_YEARS)
-                .build();
-        assertEquals(DEFAULT_CERT_NOT_BEFORE, spec.getStartDate());
+    public void testBuilder_MissingStartDate_Failure() throws Exception {
+        try {
+            new KeyPairGeneratorSpec.Builder(getContext())
+                    .setAlias(TEST_ALIAS_1)
+                    .setSubject(TEST_DN_1)
+                    .setSerialNumber(SERIAL_1)
+                    .setEndDate(NOW_PLUS_10_YEARS)
+                    .build();
+            fail("Should throw IllegalArgumentException when startDate is missing");
+        } catch (IllegalArgumentException expected) {
+        }
     }
 
-    public void testBuilder_MissingEndDate_Success() throws Exception {
-        KeyPairGeneratorSpec spec = new KeyPairGeneratorSpec.Builder(getContext())
-                .setAlias(TEST_ALIAS_1)
-                .setSubject(TEST_DN_1)
-                .setSerialNumber(SERIAL_1)
-                .setStartDate(NOW)
-                .build();
-        assertEquals(DEFAULT_CERT_NOT_AFTER, spec.getEndDate());
+    public void testBuilder_MissingEndDate_Failure() throws Exception {
+        try {
+            new KeyPairGeneratorSpec.Builder(getContext())
+                    .setAlias(TEST_ALIAS_1)
+                    .setSubject(TEST_DN_1)
+                    .setSerialNumber(SERIAL_1)
+                    .setStartDate(NOW)
+                    .build();
+            fail("Should throw IllegalArgumentException when endDate is missing");
+        } catch (IllegalArgumentException expected) {
+        }
     }
 
     public void testBuilder_EndBeforeStart_Failure() throws Exception {
diff --git a/tests/tests/libcorelegacy22/Android.mk b/tests/tests/libcorelegacy22/Android.mk
new file mode 100644
index 0000000..fb3c503
--- /dev/null
+++ b/tests/tests/libcorelegacy22/Android.mk
@@ -0,0 +1,32 @@
+# Copyright (C) 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+# don't include this package in any target
+LOCAL_MODULE_TAGS := optional
+# and when built explicitly put it in the data partition
+LOCAL_MODULE_PATH := $(TARGET_OUT_DATA_APPS)
+
+LOCAL_STATIC_JAVA_LIBRARIES := ctstestrunner
+
+LOCAL_SRC_FILES := $(call all-java-files-under, src)
+
+LOCAL_PACKAGE_NAME := CtsLibcoreLegacy22TestCases
+
+LOCAL_SDK_VERSION := 22
+
+include $(BUILD_CTS_PACKAGE)
diff --git a/tests/tests/libcorelegacy22/AndroidManifest.xml b/tests/tests/libcorelegacy22/AndroidManifest.xml
new file mode 100644
index 0000000..4ff9ec2
--- /dev/null
+++ b/tests/tests/libcorelegacy22/AndroidManifest.xml
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ -->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.android.cts.libcorelegacy22">
+
+    <uses-permission android:name="android.permission.DISABLE_KEYGUARD" />
+    <uses-permission android:name="android.permission.READ_LOGS" />
+    <application>
+        <uses-library android:name="android.test.runner" />
+    </application>
+
+    <instrumentation android:name="android.support.test.runner.AndroidJUnitRunner"
+                     android:targetPackage="com.android.cts.libcorelegacy22"
+                     android:label="CTS tests of android APIs last available in API 22">
+        <meta-data android:name="listener"
+            android:value="com.android.cts.runner.CtsTestRunListener" />
+    </instrumentation>
+
+</manifest>
+
diff --git a/tests/tests/util/src/android/util/cts/FloatMathTest.java b/tests/tests/libcorelegacy22/src/android/util/cts/FloatMathTest.java
similarity index 62%
rename from tests/tests/util/src/android/util/cts/FloatMathTest.java
rename to tests/tests/libcorelegacy22/src/android/util/cts/FloatMathTest.java
index 4d0b572..6b775fc 100644
--- a/tests/tests/util/src/android/util/cts/FloatMathTest.java
+++ b/tests/tests/libcorelegacy22/src/android/util/cts/FloatMathTest.java
@@ -19,27 +19,44 @@
 import android.util.FloatMath;
 
 public class FloatMathTest extends TestCase {
-    public void testFloatMathMethods() {
-        // ceil
-        assertEquals(8.0f, FloatMath.ceil(7.2f));
-        assertEquals(-6.0f, FloatMath.ceil(-6.3f));
 
-        // floor
+    public void testSqrt() {
+        assertEquals(5.0f, FloatMath.sqrt(25));
+        assertEquals(7, FloatMath.sqrt(49), 0);
+        assertEquals(10, FloatMath.sqrt(100), 0);
+        assertEquals(0, FloatMath.sqrt(0), 0);
+        assertEquals(1, FloatMath.sqrt(1), 0);
+    }
+
+    public void testFloor() {
+        assertEquals(78, FloatMath.floor(78.89f), 0);
+        assertEquals(-79, FloatMath.floor(-78.89f), 0);
         assertEquals(7.0f, FloatMath.floor(7.2f));
         assertEquals(-7.0f, FloatMath.floor(-6.3f));
+    }
 
-        // sin
+    public void testCeil() {
+        assertEquals(79, FloatMath.ceil(78.89f), 0);
+        assertEquals(-78, FloatMath.ceil(-78.89f), 0);
+        assertEquals(8.0f, FloatMath.ceil(7.2f));
+        assertEquals(-6.0f, FloatMath.ceil(-6.3f));
+    }
+
+    public void testCos() {
+        assertEquals(1.0f, FloatMath.cos(0), 0);
+        assertEquals(0.5403023058681398f, FloatMath.cos(1), 0);
+        assertEquals(0.964966f, FloatMath.cos(50));
+        assertEquals(0.69925081f, FloatMath.cos(150));
+        assertEquals(0.964966f, FloatMath.cos(-50));
+    }
+
+    public void testSin() {
+        assertEquals(0.0, FloatMath.sin(0), 0);
+        assertEquals(0.8414709848078965f, FloatMath.sin(1), 0);
         assertEquals(-0.26237485f, FloatMath.sin(50));
         assertEquals(-0.71487643f, FloatMath.sin(150));
         assertEquals(0.26237485f, FloatMath.sin(-50));
 
-        // cos
-        assertEquals(0.964966f, FloatMath.cos(50));
-        assertEquals(0.69925081f, FloatMath.cos(150));
-        assertEquals(0.964966f, FloatMath.cos(-50));
-
-        // sqrt
-        assertEquals(5.0f, FloatMath.sqrt(25));
     }
-
 }
+
diff --git a/tests/tests/media/assets/fileSequence0.ts b/tests/tests/media/assets/fileSequence0.ts
new file mode 100644
index 0000000..48f2bcd
--- /dev/null
+++ b/tests/tests/media/assets/fileSequence0.ts
Binary files differ
diff --git a/tests/tests/media/assets/fileSequence1.ts b/tests/tests/media/assets/fileSequence1.ts
new file mode 100644
index 0000000..737fbd0
--- /dev/null
+++ b/tests/tests/media/assets/fileSequence1.ts
Binary files differ
diff --git a/tests/tests/media/assets/prog_index.m3u8 b/tests/tests/media/assets/prog_index.m3u8
new file mode 100644
index 0000000..88f99d3
--- /dev/null
+++ b/tests/tests/media/assets/prog_index.m3u8
@@ -0,0 +1,10 @@
+#EXTM3U
+#EXT-X-TARGETDURATION:10
+#EXT-X-VERSION:3
+#EXT-X-MEDIA-SEQUENCE:0
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:9.90000,
+fileSequence0.ts
+#EXTINF:10.00000,
+fileSequence1.ts
+#EXT-X-ENDLIST
diff --git a/tests/tests/media/src/android/media/cts/AudioRecordTest.java b/tests/tests/media/src/android/media/cts/AudioRecordTest.java
index 459e575..d390c14 100644
--- a/tests/tests/media/src/android/media/cts/AudioRecordTest.java
+++ b/tests/tests/media/src/android/media/cts/AudioRecordTest.java
@@ -22,6 +22,7 @@
 import android.content.pm.PackageManager;
 import android.cts.util.CtsAndroidTestCase;
 import android.media.AudioFormat;
+import android.media.AudioManager;
 import android.media.AudioRecord;
 import android.media.AudioRecord.OnRecordPositionUpdateListener;
 import android.media.MediaRecorder;
@@ -29,6 +30,7 @@
 import android.os.Looper;
 import android.os.Message;
 import android.util.Log;
+
 import com.android.cts.util.ReportLog;
 import com.android.cts.util.ResultType;
 import com.android.cts.util.ResultUnit;
@@ -314,6 +316,104 @@
                 AudioFormat.ENCODING_PCM_16BIT);
     }
 
+    // Test AudioRecord.Builder to verify the observed configuration of an AudioRecord built with
+    // an empty Builder matches the documentation / expected values
+    public void testAudioRecordBuilderDefault() throws Exception {
+        // constants for test
+        final String TEST_NAME = "testAudioRecordBuilderDefault";
+        // expected values below match the AudioRecord.Builder documentation
+        final int expectedCapturePreset = MediaRecorder.AudioSource.DEFAULT;
+        final String rateStr = new AudioManager(getContext())
+                .getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
+        final int expectedRate = Integer.valueOf(rateStr).intValue();
+        final int expectedChannel = AudioFormat.CHANNEL_IN_MONO;
+        final int expectedEncoding = AudioFormat.ENCODING_PCM_16BIT;
+        final int expectedState = AudioRecord.STATE_INITIALIZED;
+        // use builder with default values
+        final AudioRecord rec = new AudioRecord.Builder().build();
+        // save results
+        final int observedRate = rec.getSampleRate();
+        final int observedSource = rec.getAudioSource();
+        final int observedChannel = rec.getChannelConfiguration();
+        final int observedEncoding = rec.getAudioFormat();
+        final int observedState = rec.getState();
+        // release recorder before the test exits (either successfully or with an exception)
+        rec.release();
+        // compare results
+        assertEquals(TEST_NAME + ": default capture preset", expectedCapturePreset, observedSource);
+        assertEquals(TEST_NAME + ": default rate", expectedRate, observedRate);
+        assertEquals(TEST_NAME + ": default channel config", expectedChannel, observedChannel);
+        assertEquals(TEST_NAME + ": default encoding", expectedEncoding, observedEncoding);
+        assertEquals(TEST_NAME + ": state", expectedState, observedState);
+    }
+
+    // Test AudioRecord.Builder to verify the observed configuration of an AudioRecord built with
+    // an incomplete AudioFormat matches the documentation / expected values
+    public void testAudioRecordBuilderPartialFormat() throws Exception {
+        // constants for test
+        final String TEST_NAME = "testAudioRecordBuilderPartialFormat";
+        final int expectedRate = 16000;
+        final int expectedState = AudioRecord.STATE_INITIALIZED;
+        // expected values below match the AudioRecord.Builder documentation
+        final int expectedChannel = AudioFormat.CHANNEL_IN_MONO;
+        final int expectedEncoding = AudioFormat.ENCODING_PCM_16BIT;
+        // use builder with a partial audio format
+        final AudioRecord rec = new AudioRecord.Builder()
+                .setAudioFormat(new AudioFormat.Builder().setSampleRate(expectedRate).build())
+                .build();
+        // save results
+        final int observedRate = rec.getSampleRate();
+        final int observedChannel = rec.getChannelConfiguration();
+        final int observedEncoding = rec.getAudioFormat();
+        final int observedState = rec.getState();
+        // release recorder before the test exits (either successfully or with an exception)
+        rec.release();
+        // compare results
+        assertEquals(TEST_NAME + ": configured rate", expectedRate, observedRate);
+        assertEquals(TEST_NAME + ": default channel config", expectedChannel, observedChannel);
+        assertEquals(TEST_NAME + ": default encoding", expectedEncoding, observedEncoding);
+        assertEquals(TEST_NAME + ": state", expectedState, observedState);
+    }
+
+    // Test AudioRecord.Builder to verify the observed configuration of an AudioRecord matches
+    // the parameters used in the builder
+    public void testAudioRecordBuilderParams() throws Exception {
+        // constants for test
+        final String TEST_NAME = "testAudioRecordBuilderParams";
+        final int expectedRate = 8000;
+        final int expectedChannel = AudioFormat.CHANNEL_IN_MONO;
+        final int expectedChannelCount = 1;
+        final int expectedEncoding = AudioFormat.ENCODING_PCM_16BIT;
+        final int expectedSource = MediaRecorder.AudioSource.VOICE_COMMUNICATION;
+        final int expectedState = AudioRecord.STATE_INITIALIZED;
+        // use builder with expected parameters
+        final AudioRecord rec = new AudioRecord.Builder()
+                .setAudioFormat(new AudioFormat.Builder()
+                        .setSampleRate(expectedRate)
+                        .setChannelMask(expectedChannel)
+                        .setEncoding(expectedEncoding)
+                        .build())
+                .setAudioSource(expectedSource)
+                .build();
+        // save results
+        final int observedRate = rec.getSampleRate();
+        final int observedChannel = rec.getChannelConfiguration();
+        final int observedChannelCount = rec.getChannelCount();
+        final int observedEncoding = rec.getAudioFormat();
+        final int observedSource = rec.getAudioSource();
+        final int observedState = rec.getState();
+        // release recorder before the test exits (either successfully or with an exception)
+        rec.release();
+        // compare results
+        assertEquals(TEST_NAME + ": configured rate", expectedRate, observedRate);
+        assertEquals(TEST_NAME + ": configured channel config", expectedChannel, observedChannel);
+        assertEquals(TEST_NAME + ": configured encoding", expectedEncoding, observedEncoding);
+        assertEquals(TEST_NAME + ": implicit channel count", expectedChannelCount,
+                observedChannelCount);
+        assertEquals(TEST_NAME + ": configured source", expectedSource, observedSource);
+        assertEquals(TEST_NAME + ": state", expectedState, observedState);
+    }
+
     private AudioRecord createAudioRecord(
             int audioSource, int sampleRateInHz,
             int channelConfig, int audioFormat, int bufferSizeInBytes,
diff --git a/tests/tests/media/src/android/media/cts/AudioTrackTest.java b/tests/tests/media/src/android/media/cts/AudioTrackTest.java
index 0e12f70..403d714 100644
--- a/tests/tests/media/src/android/media/cts/AudioTrackTest.java
+++ b/tests/tests/media/src/android/media/cts/AudioTrackTest.java
@@ -23,8 +23,9 @@
 import android.media.AudioManager;
 import android.media.AudioTimestamp;
 import android.media.AudioTrack;
-import android.media.PlaybackSettings;
+import android.media.PlaybackParams;
 import android.util.Log;
+
 import com.android.cts.util.ReportLog;
 import com.android.cts.util.ResultType;
 import com.android.cts.util.ResultUnit;
@@ -263,6 +264,116 @@
     }
 
     // -----------------------------------------------------------------
+    // AudioTrack construction with Builder
+    // ----------------------------------
+
+    // Test case 1: build AudioTrack with default parameters, test documented default params
+    public void testBuilderDefault() throws Exception {
+        // constants for test
+        final String TEST_NAME = "testBuilderDefault";
+        final int expectedDefaultEncoding = AudioFormat.ENCODING_PCM_16BIT;
+        final int expectedDefaultRate =
+                AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC);
+        final int expectedDefaultChannels = AudioFormat.CHANNEL_OUT_STEREO;
+        // use Builder
+        final int buffSizeInBytes = AudioTrack.getMinBufferSize(
+                expectedDefaultRate, expectedDefaultChannels, expectedDefaultEncoding);
+        final AudioTrack track = new AudioTrack.Builder()
+                .setBufferSizeInBytes(buffSizeInBytes)
+                .build();
+        // save results
+        final int observedState = track.getState();
+        final int observedFormat = track.getAudioFormat();
+        final int observedChannelConf = track.getChannelConfiguration();
+        final int observedRate = track.getSampleRate();
+        // release track before the test exits (either successfully or with an exception)
+        track.release();
+        // compare results
+        assertEquals(TEST_NAME + ": Track initialized", AudioTrack.STATE_INITIALIZED,
+                observedState);
+        assertEquals(TEST_NAME + ": Default track encoding", expectedDefaultEncoding,
+                observedFormat);
+        assertEquals(TEST_NAME + ": Default track channels", expectedDefaultChannels,
+                observedChannelConf);
+        assertEquals(TEST_NAME + ": Default track sample rate", expectedDefaultRate,
+                observedRate);
+    }
+
+    // Test case 2: build AudioTrack with AudioFormat, test it's used
+    public void testBuilderFormat() throws Exception {
+        // constants for test
+        final String TEST_NAME = "testBuilderFormat";
+        final int TEST_RATE = 32000;
+        final int TEST_CHANNELS = AudioFormat.CHANNEL_OUT_STEREO;
+        // use Builder
+        final int buffSizeInBytes = AudioTrack.getMinBufferSize(
+                TEST_RATE, TEST_CHANNELS, AudioFormat.ENCODING_PCM_16BIT);
+        final AudioTrack track = new AudioTrack.Builder()
+                .setAudioAttributes(new AudioAttributes.Builder().build())
+                .setBufferSizeInBytes(buffSizeInBytes)
+                .setAudioFormat(new AudioFormat.Builder()
+                        .setChannelMask(TEST_CHANNELS).setSampleRate(TEST_RATE).build())
+                .build();
+        // save results
+        final int observedState = track.getState();
+        final int observedChannelConf = track.getChannelConfiguration();
+        final int observedRate = track.getSampleRate();
+        // release track before the test exits (either successfully or with an exception)
+        track.release();
+        // compare results
+        assertEquals(TEST_NAME + ": Track initialized", AudioTrack.STATE_INITIALIZED,
+                observedState);
+        assertEquals(TEST_NAME + ": Track channels", TEST_CHANNELS, observedChannelConf);
+        assertEquals(TEST_NAME + ": Track sample rate", TEST_RATE, observedRate);
+    }
+
+    // Test case 3: build AudioTrack with session ID, test it's used
+    public void testBuilderSession() throws Exception {
+        // constants for test
+        final String TEST_NAME = "testBuilderSession";
+        // generate a session ID
+        final int expectedSessionId = new AudioManager(getContext()).generateAudioSessionId();
+        // use builder
+        final AudioTrack track = new AudioTrack.Builder()
+                .setSessionId(expectedSessionId)
+                .build();
+        // save results
+        final int observedSessionId = track.getAudioSessionId();
+        // release track before the test exits (either successfully or with an exception)
+        track.release();
+        // compare results
+        assertEquals(TEST_NAME + ": Assigned track session ID", expectedSessionId,
+                observedSessionId);
+    }
+
+    // Test case 4: build AudioTrack with AudioAttributes built from stream type, test it's used
+    public void testBuilderAttributesStream() throws Exception {
+        // constants for test
+        final String TEST_NAME = "testBuilderAttributesStream";
+        //     use a stream type documented in AudioAttributes.Builder.setLegacyStreamType(int)
+        final int expectedStreamType = AudioManager.STREAM_ALARM;
+        final int expectedContentType = AudioAttributes.CONTENT_TYPE_SPEECH;
+        final AudioAttributes aa = new AudioAttributes.Builder()
+                .setLegacyStreamType(expectedStreamType)
+                .setContentType(expectedContentType)
+                .build();
+        // use builder
+        final AudioTrack track = new AudioTrack.Builder()
+                .setAudioAttributes(aa)
+                .build();
+        // save results
+        final int observedStreamType = track.getStreamType();
+        // release track before the test exits (either successfully or with an exception)
+        track.release();
+        // compare results
+        assertEquals(TEST_NAME + ": track stream type", expectedStreamType, observedStreamType);
+        //    also test content type was preserved in the attributes even though they
+        //     were first configured with a legacy stream type
+        assertEquals(TEST_NAME + ": attributes content type", expectedContentType,
+                aa.getContentType());
+    }
+
+    // -----------------------------------------------------------------
     // Playback head position
     // ----------------------------------
 
@@ -2062,7 +2173,7 @@
             final float speedInc = (speedEnd - speedStart) / testSteps;
             final float pitchInc = (pitchEnd - pitchStart) / testSteps;
 
-            PlaybackSettings playbackSettings = new PlaybackSettings()
+            PlaybackParams playbackParams = new PlaybackParams()
                     .setPitch(pitchStart)
                     .setSpeed(speedStart)
                     .allowDefaults();
@@ -2076,23 +2187,23 @@
             int anticipatedPosition = track.getPlaybackHeadPosition();
             for (int j = 0; j < testSteps; ++j) {
                 // set playback settings
-                final float pitch = playbackSettings.getPitch();
-                final float speed = playbackSettings.getSpeed();
+                final float pitch = playbackParams.getPitch();
+                final float speed = playbackParams.getSpeed();
 
-                track.setPlaybackSettings(playbackSettings);
+                track.setPlaybackParams(playbackParams);
 
                 // verify that settings have changed
-                PlaybackSettings checkSettings = track.getPlaybackSettings();
-                assertEquals(TAG, pitch, checkSettings.getPitch());
-                assertEquals(TAG, speed, checkSettings.getSpeed());
+                PlaybackParams checkParams = track.getPlaybackParams();
+                assertEquals(TAG, pitch, checkParams.getPitch());
+                assertEquals(TAG, speed, checkParams.getSpeed());
 
                 // sleep for playback
                 Thread.sleep(TEST_DELTA_MS);
                 // Log.d(TAG, "position[" + j + "] " + track.getPlaybackHeadPosition());
                 anticipatedPosition +=
-                        playbackSettings.getSpeed() * TEST_DELTA_MS * TEST_SR / 1000;
-                playbackSettings.setPitch(playbackSettings.getPitch() + pitchInc);
-                playbackSettings.setSpeed(playbackSettings.getSpeed() + speedInc);
+                        playbackParams.getSpeed() * TEST_DELTA_MS * TEST_SR / 1000;
+                playbackParams.setPitch(playbackParams.getPitch() + pitchInc);
+                playbackParams.setSpeed(playbackParams.getSpeed() + speedInc);
             }
             final int endPosition = track.getPlaybackHeadPosition();
             final int tolerance100MsInFrames = 100 * TEST_SR / 1000;
diff --git a/tests/tests/media/src/android/media/cts/EncodeDecodeTest.java b/tests/tests/media/src/android/media/cts/EncodeDecodeTest.java
index 5152d98..40934f5 100644
--- a/tests/tests/media/src/android/media/cts/EncodeDecodeTest.java
+++ b/tests/tests/media/src/android/media/cts/EncodeDecodeTest.java
@@ -458,7 +458,7 @@
             encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
             if (inSurf != null) {
                 Log.d(TAG, "using persistent surface");
-                encoder.usePersistentInputSurface(inputSurface.getSurface());
+                encoder.setInputSurface(inputSurface.getSurface());
                 inputSurface.updateSize(mWidth, mHeight);
             } else {
                 inputSurface = new InputSurface(encoder.createInputSurface());
diff --git a/tests/tests/media/src/android/media/cts/MediaCodecCapabilitiesTest.java b/tests/tests/media/src/android/media/cts/MediaCodecCapabilitiesTest.java
index 159d13f..daf55a7 100644
--- a/tests/tests/media/src/android/media/cts/MediaCodecCapabilitiesTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaCodecCapabilitiesTest.java
@@ -39,6 +39,7 @@
 import java.util.HashSet;
 import java.util.Set;
 import java.util.Arrays;
+import java.util.Vector;
 
 /**
  * Basic sanity test of data returned by MediaCodeCapabilities.
@@ -528,4 +529,84 @@
             MediaUtils.skipTest("no non-tunneled/non-secure video decoders found");
         }
     }
+
+    private static MediaFormat createMinFormat(String mime, VideoCapabilities vcaps, int color) {
+        int minWidth = vcaps.getSupportedWidths().getLower();
+        int minHeight = vcaps.getSupportedHeightsFor(minWidth).getLower();
+        int minBitrate = vcaps.getBitrateRange().getLower();
+
+        MediaFormat format = MediaFormat.createVideoFormat(mime, minWidth, minHeight);
+        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, color);
+        format.setInteger(MediaFormat.KEY_BIT_RATE, minBitrate);
+        format.setInteger(MediaFormat.KEY_FRAME_RATE, 10);
+        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
+        return format;
+    }
+
+    private static int getActualMax(
+            boolean isEncoder, String name, String mime, CodecCapabilities caps, int max) {
+        int flag = isEncoder ? MediaCodec.CONFIGURE_FLAG_ENCODE : 0;
+        MediaFormat format =
+                createMinFormat(mime, caps.getVideoCapabilities(), caps.colorFormats[0]);
+        Vector<MediaCodec> codecs = new Vector<MediaCodec>();
+        for (int i = 0; i < max; ++i) {
+            try {
+                Log.d(TAG, "Create codec " + name + " #" + i);
+                MediaCodec codec = MediaCodec.createByCodecName(name);
+                codec.configure(format, null, null, flag);
+                codec.start();
+                codecs.add(codec);
+            } catch (IllegalArgumentException e) {
+                fail("Got unexpected IllegalArgumentException " + e.getMessage());
+            } catch (IOException e) {
+                fail("Got unexpected IOException " + e.getMessage());
+            } catch (MediaCodec.CodecException e) {
+                // ERROR_INSUFFICIENT_RESOURCE is expected as the test keep creating codecs.
+                // But other exception should be treated as failure.
+                if (e.getErrorCode() == MediaCodec.CodecException.ERROR_INSUFFICIENT_RESOURCE) {
+                    Log.d(TAG, "Got CodecException with ERROR_INSUFFICIENT_RESOURCE.");
+                    break;
+                } else {
+                    fail("Unexpected CodecException " + e.getDiagnosticInfo());
+                }
+            }
+        }
+        int actualMax = codecs.size();
+        for (int i = 0; i < codecs.size(); ++i) {
+            codecs.get(i).release();
+        }
+        return actualMax;
+    }
+
+    private static boolean shouldTestActual(CodecCapabilities caps) {
+        if (caps.getVideoCapabilities() == null) {
+            // TODO: test audio codecs.
+            return false;
+        }
+        return true;
+    }
+
+    public void testGetMaxSupportedInstances() {
+        MediaCodecList allCodecs = new MediaCodecList(MediaCodecList.ALL_CODECS);
+        for (MediaCodecInfo info : allCodecs.getCodecInfos()) {
+            Log.d(TAG, "codec: " + info.getName());
+            Log.d(TAG, "  isEncoder = " + info.isEncoder());
+
+            String[] types = info.getSupportedTypes();
+            for (int j = 0; j < types.length; ++j) {
+                Log.d(TAG, "calling getCapabilitiesForType " + types[j]);
+                CodecCapabilities caps = info.getCapabilitiesForType(types[j]);
+                int max = caps.getMaxSupportedInstances();
+                Log.d(TAG, "getMaxSupportedInstances returns " + max);
+                assertTrue(max > 0);
+
+                if (shouldTestActual(caps)) {
+                    int actualMax = getActualMax(
+                            info.isEncoder(), info.getName(), types[j], caps, max + 1);
+                    Log.d(TAG, "actualMax " + actualMax + " vs reported max " + max);
+                    assertTrue(actualMax >= (int)(max * 0.9));
+                }
+            }
+        }
+    }
 }
diff --git a/tests/tests/media/src/android/media/cts/MediaExtractorTest.java b/tests/tests/media/src/android/media/cts/MediaExtractorTest.java
index 7ca498f..9db54ff 100644
--- a/tests/tests/media/src/android/media/cts/MediaExtractorTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaExtractorTest.java
@@ -83,7 +83,7 @@
 
     public void testExtractorFailsIfMediaDataSourceReturnsAnError() throws Exception {
         TestMediaDataSource dataSource = getDataSourceFor(R.raw.testvideo);
-        dataSource.returnFromReadAt(-1);
+        dataSource.returnFromReadAt(-2);
         try {
             mExtractor.setDataSource(dataSource);
             fail("Expected IOException.");
diff --git a/tests/tests/media/src/android/media/cts/MediaMetadataRetrieverTest.java b/tests/tests/media/src/android/media/cts/MediaMetadataRetrieverTest.java
index 622c0ec..562656b 100644
--- a/tests/tests/media/src/android/media/cts/MediaMetadataRetrieverTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaMetadataRetrieverTest.java
@@ -146,7 +146,7 @@
 
     public void testRetrieveFailsIfMediaDataSourceReturnsAnError() throws Exception {
         TestMediaDataSource dataSource = setDataSourceCallback(R.raw.testvideo);
-        dataSource.returnFromReadAt(-1);
+        dataSource.returnFromReadAt(-2);
         assertTrue(mRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_TITLE) == null);
     }
 }
diff --git a/tests/tests/media/src/android/media/cts/MediaPlayerTest.java b/tests/tests/media/src/android/media/cts/MediaPlayerTest.java
index 661b815..1fcb8b9 100644
--- a/tests/tests/media/src/android/media/cts/MediaPlayerTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaPlayerTest.java
@@ -30,6 +30,7 @@
 import android.media.MediaPlayer.OnErrorListener;
 import android.media.MediaRecorder;
 import android.media.MediaMetadataRetriever;
+import android.media.PlaybackParams;
 import android.media.TimedText;
 import android.media.audiofx.AudioEffect;
 import android.media.audiofx.Visualizer;
@@ -845,8 +846,7 @@
             mMediaPlayer.seekTo(0);
             Thread.sleep(1000);
             int playTime = 4000;  // The testing clip is about 10 second long.
-            mMediaPlayer.setPlaybackRate(playbackRate,
-                                         MediaPlayer.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
+            mMediaPlayer.setPlaybackParams(new PlaybackParams().setSpeed(playbackRate));
             mMediaPlayer.start();
             Thread.sleep(playTime);
             assertTrue("MediaPlayer should still be playing", mMediaPlayer.isPlaying());
@@ -1589,7 +1589,7 @@
         mMediaPlayer.setDataSource(dataSource);
         mMediaPlayer.prepare();
 
-        dataSource.returnFromReadAt(-1);
+        dataSource.returnFromReadAt(-2);
         mMediaPlayer.start();
         assertTrue(mOnErrorCalled.waitForSignal());
     }
diff --git a/tests/tests/media/src/android/media/cts/MediaSyncTest.java b/tests/tests/media/src/android/media/cts/MediaSyncTest.java
index 50e12da..dbbda32 100644
--- a/tests/tests/media/src/android/media/cts/MediaSyncTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaSyncTest.java
@@ -31,6 +31,7 @@
 import android.media.MediaFormat;
 import android.media.MediaSync;
 import android.media.MediaTimestamp;
+import android.media.PlaybackParams;
 import android.test.ActivityInstrumentationTestCase2;
 import android.util.Log;
 import android.view.Surface;
@@ -151,12 +152,12 @@
     }
 
     /**
-     * Tests setPlaybackRate is handled correctly for wrong rate.
+     * Tests setPlaybackParams is handled correctly for wrong rate.
      */
-    public void testSetPlaybackRateFail() throws InterruptedException {
+    public void testSetPlaybackParamsFail() throws InterruptedException {
         final float rate = -1.0f;
         try {
-            mMediaSync.setPlaybackRate(rate, MediaSync.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
+            mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(rate));
             fail("playback rate " + rate + " is not handled correctly");
         } catch (IllegalArgumentException e) {
         }
@@ -170,7 +171,7 @@
         mMediaSync.setAudioTrack(mAudioTrack);
 
         try {
-            mMediaSync.setPlaybackRate(rate, MediaSync.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
+            mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(rate));
             fail("With audio track set, playback rate " + rate
                     + " is not handled correctly");
         } catch (IllegalArgumentException e) {
@@ -178,13 +179,13 @@
     }
 
     /**
-     * Tests setPlaybackRate is handled correctly for good rate without audio track set.
+     * Tests setPlaybackParams is handled correctly for good rate without audio track set.
      * The case for good rate with audio track set is tested in testPlaybackRate*.
      */
-    public void testSetPlaybackRateSucceed() throws InterruptedException {
+    public void testSetPlaybackParamsSucceed() throws InterruptedException {
         final float rate = (float)TEST_MAX_SPEED;
         try {
-            mMediaSync.setPlaybackRate(rate, MediaSync.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
+            mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(rate));
         } catch (IllegalArgumentException e) {
             fail("playback rate " + rate + " is not handled correctly");
         }
@@ -201,6 +202,9 @@
         }
     }
 
+    private PlaybackParams PAUSED_RATE = new PlaybackParams().setSpeed(0.f);
+    private PlaybackParams NORMAL_RATE = new PlaybackParams().setSpeed(1.f);
+
     private boolean runCheckAudioBuffer(int inputResourceId, int timeOutMs) {
         final int NUM_LOOPS = 10;
         final Object condition = new Object();
@@ -233,7 +237,7 @@
             }
         }, null);
 
-        mMediaSync.setPlaybackRate(1.0f, MediaSync.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
+        mMediaSync.setPlaybackParams(PAUSED_RATE);
 
         synchronized (condition) {
             mDecoderAudio.start();
@@ -247,6 +251,75 @@
     }
 
     /**
+     * Tests flush.
+     */
+    public void testFlush() throws InterruptedException {
+        final int timeOutMs = 5000;
+        boolean completed = runFlush(INPUT_RESOURCE_ID, timeOutMs);
+        if (!completed) {
+            throw new RuntimeException("timed out waiting for flush");
+        }
+    }
+
+    private boolean runFlush(int inputResourceId, int timeOutMs) {
+        final int INDEX_BEFORE_FLUSH = 1;
+        final int INDEX_AFTER_FLUSH = 2;
+        final int BUFFER_SIZE = 1024;
+        final int[] returnedIndex = new int[1];
+        final Object condition = new Object();
+
+        returnedIndex[0] = -1;
+
+        mHasAudio = true;
+        if (mDecoderAudio.setup(inputResourceId, null, Long.MAX_VALUE) == false) {
+            return true;
+        }
+
+        // get audio track.
+        mAudioTrack = mDecoderAudio.getAudioTrack();
+
+        mMediaSync.setAudioTrack(mAudioTrack);
+
+        mMediaSync.setCallback(new MediaSync.Callback() {
+            @Override
+            public void onAudioBufferConsumed(
+                    MediaSync sync, ByteBuffer byteBuffer, int bufferIndex) {
+                synchronized (condition) {
+                    if (returnedIndex[0] == -1) {
+                        returnedIndex[0] = bufferIndex;
+                        condition.notify();
+                    }
+                }
+            }
+        }, null);
+
+        mMediaSync.setOnErrorListener(new MediaSync.OnErrorListener() {
+            @Override
+            public void onError(MediaSync sync, int what, int extra) {
+                fail("got error from media sync (" + what + ", " + extra + ")");
+            }
+        }, null);
+
+        mMediaSync.setPlaybackParams(PAUSED_RATE);
+
+        ByteBuffer buffer1 = ByteBuffer.allocate(BUFFER_SIZE);
+        ByteBuffer buffer2 = ByteBuffer.allocate(BUFFER_SIZE);
+        mMediaSync.queueAudio(buffer1, INDEX_BEFORE_FLUSH, 0 /* presentationTimeUs */);
+        mMediaSync.flush();
+        mMediaSync.queueAudio(buffer2, INDEX_AFTER_FLUSH, 0 /* presentationTimeUs */);
+
+        synchronized (condition) {
+            mMediaSync.setPlaybackParams(NORMAL_RATE);
+
+            try {
+                condition.wait(timeOutMs);
+            } catch (InterruptedException e) {
+            }
+            return (returnedIndex[0] == INDEX_AFTER_FLUSH);
+        }
+    }
+
+    /**
      * Tests playing back audio successfully.
      */
     public void testPlayVideo() throws InterruptedException {
@@ -369,7 +442,7 @@
             mHasAudio = true;
         }
 
-        mMediaSync.setPlaybackRate(playbackRate, MediaSync.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
+        mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(playbackRate));
 
         synchronized (conditionFirstAudioBuffer) {
             if (video) {
@@ -629,7 +702,6 @@
                         mMediaSync.queueAudio(
                                 outputByteBuffer,
                                 index,
-                                info.size,
                                 info.presentationTimeUs);
                     } else {
                         codec.releaseOutputBuffer(index, info.presentationTimeUs * 1000);
diff --git a/tests/tests/media/src/android/media/cts/StreamingMediaPlayerTest.java b/tests/tests/media/src/android/media/cts/StreamingMediaPlayerTest.java
index dd7c1f6..7497da2 100644
--- a/tests/tests/media/src/android/media/cts/StreamingMediaPlayerTest.java
+++ b/tests/tests/media/src/android/media/cts/StreamingMediaPlayerTest.java
@@ -18,12 +18,16 @@
 import android.cts.util.MediaUtils;
 import android.media.MediaFormat;
 import android.media.MediaPlayer;
+import android.media.MediaPlayer.TrackInfo;
+import android.media.TimedMetaData;
 import android.os.Looper;
+import android.os.PowerManager;
 import android.os.SystemClock;
 import android.util.Log;
 import android.webkit.cts.CtsTestServer;
 
 import java.io.IOException;
+import java.util.concurrent.atomic.AtomicInteger;
 
 /**
  * Tests of MediaPlayer streaming capabilities.
@@ -307,6 +311,94 @@
         localHlsTest("hls.m3u8", false, true);
     }
 
+    public void testPlayHlsStreamWithTimedId3() throws Throwable {
+        mServer = new CtsTestServer(mContext);
+        try {
+            // counter must be final if we want to access it inside onTimedMetaData;
+            // use AtomicInteger so we can have a final counter object with mutable integer value.
+            final AtomicInteger counter = new AtomicInteger();
+            String stream_url = mServer.getAssetUrl("prog_index.m3u8");
+            mMediaPlayer.setDataSource(stream_url);
+            mMediaPlayer.setDisplay(getActivity().getSurfaceHolder());
+            mMediaPlayer.setScreenOnWhilePlaying(true);
+            mMediaPlayer.setWakeMode(mContext, PowerManager.PARTIAL_WAKE_LOCK);
+            mMediaPlayer.setOnTimedMetaDataAvailableListener(new MediaPlayer.OnTimedMetaDataAvailableListener() {
+                @Override
+                public void onTimedMetaDataAvailable(MediaPlayer mp, TimedMetaData md) {
+                    counter.incrementAndGet();
+                    int pos = mp.getCurrentPosition();
+                    long timeUs = md.getTimestamp();
+                    byte[] rawData = md.getMetaData();
+                    // Raw data contains an id3 tag holding the decimal string representation of
+                    // the associated time stamp rounded to the closest half second.
+
+                    int offset = 0;
+                    offset += 3; // "ID3"
+                    offset += 2; // version
+                    offset += 1; // flags
+                    offset += 4; // size
+                    offset += 4; // "TXXX"
+                    offset += 4; // frame size
+                    offset += 2; // frame flags
+                    offset += 1; // "\x03" : UTF-8 encoded Unicode
+                    offset += 1; // "\x00" : null-terminated empty description
+
+                    int length = rawData.length;
+                    length -= offset;
+                    length -= 1; // "\x00" : terminating null
+
+                    String data = new String(rawData, offset, length);
+                    int dataTimeUs = Integer.parseInt(data);
+                    assertTrue("Timed ID3 timestamp does not match content",
+                            Math.abs(dataTimeUs - timeUs) < 500000);
+                    assertTrue("Timed ID3 arrives after timestamp", pos * 1000 < timeUs);
+                }
+            });
+
+            final Object completion = new Object();
+            mMediaPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
+                int run;
+                @Override
+                public void onCompletion(MediaPlayer mp) {
+                    if (run++ == 0) {
+                        mMediaPlayer.seekTo(0);
+                        mMediaPlayer.start();
+                    } else {
+                        mMediaPlayer.stop();
+                        synchronized (completion) {
+                            completion.notify();
+                        }
+                    }
+                }
+            });
+
+            mMediaPlayer.prepare();
+            mMediaPlayer.start();
+            assertTrue("MediaPlayer not playing", mMediaPlayer.isPlaying());
+
+            int i = -1;
+            TrackInfo[] trackInfos = mMediaPlayer.getTrackInfo();
+            for (i = 0; i < trackInfos.length; i++) {
+                TrackInfo trackInfo = trackInfos[i];
+                if (trackInfo.getTrackType() == TrackInfo.MEDIA_TRACK_TYPE_METADATA) {
+                    break;
+                }
+            }
+            assertTrue("Stream has no timed ID3 track", i >= 0);
+            mMediaPlayer.selectTrack(i);
+
+            synchronized (completion) {
+                completion.wait();
+            }
+
+            // There are a total of 19 metadata access units in the test stream; every one of them
+            // should be received twice: once before the seek and once after.
+            assertTrue("Incorrect number of timed ID3s recieved", counter.get() == 38);
+        } finally {
+            mServer.shutdown();
+        }
+    }
+
     private static class WorkerWithPlayer implements Runnable {
         private final Object mLock = new Object();
         private Looper mLooper;
diff --git a/tests/tests/media/src/android/media/cts/TestMediaDataSource.java b/tests/tests/media/src/android/media/cts/TestMediaDataSource.java
index 87b4c59..a10840b 100644
--- a/tests/tests/media/src/android/media/cts/TestMediaDataSource.java
+++ b/tests/tests/media/src/android/media/cts/TestMediaDataSource.java
@@ -28,7 +28,7 @@
 /**
  * A MediaDataSource that reads from a byte array for use in tests.
  */
-public class TestMediaDataSource implements MediaDataSource {
+public class TestMediaDataSource extends MediaDataSource {
     private static final String TAG = "TestMediaDataSource";
 
     private byte[] mData;
@@ -62,29 +62,30 @@
     }
 
     @Override
-    public synchronized int readAt(long offset, byte[] buffer, int size) {
+    public synchronized int readAt(long position, byte[] buffer, int offset, int size)
+            throws IOException {
         if (mThrowFromReadAt) {
-            throw new RuntimeException("Test exception from readAt()");
+            throw new IOException("Test exception from readAt()");
         }
         if (mReturnFromReadAt != null) {
             return mReturnFromReadAt;
         }
 
         // Clamp reads past the end of the source.
-        if (offset >= mData.length) {
-            return 0;
+        if (position >= mData.length) {
+            return -1; // -1 indicates EOF
         }
-        if (offset + size > mData.length) {
-            size -= (offset + size) - mData.length;
+        if (position + size > mData.length) {
+            size -= (position + size) - mData.length;
         }
-        System.arraycopy(mData, (int)offset, buffer, 0, size);
+        System.arraycopy(mData, (int)position, buffer, offset, size);
         return size;
     }
 
     @Override
-    public synchronized long getSize() {
+    public synchronized long getSize() throws IOException {
         if (mThrowFromGetSize) {
-            throw new RuntimeException("Test exception from getSize()");
+            throw new IOException("Test exception from getSize()");
         }
         if (mReturnFromGetSize != null) {
             return mReturnFromGetSize;
diff --git a/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicBLAS.java b/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicBLAS.java
index ff5bf84..f6b3176 100644
--- a/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicBLAS.java
+++ b/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicBLAS.java
@@ -1999,14 +1999,8 @@
             if (cM != cN) {
                 return false;
             }
-            if (TransA != ScriptIntrinsicBLAS.NO_TRANSPOSE) {
-                if (aN != cM) {
-                    return false;
-                }
-            } else {
-                if (aM != cM) {
-                    return false;
-                }
+            if (aM != cM) {
+                return false;
             }
         } else if (A != null && B != null) {
             // A and B only
diff --git a/tests/tests/renderscript/src/android/renderscript/cts/rsAllocationCopyTest.java b/tests/tests/renderscript/src/android/renderscript/cts/rsAllocationCopyTest.java
new file mode 100644
index 0000000..f74fa38
--- /dev/null
+++ b/tests/tests/renderscript/src/android/renderscript/cts/rsAllocationCopyTest.java
@@ -0,0 +1,539 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.renderscript.cts;
+
+import android.renderscript.Allocation;
+import android.renderscript.Element;
+import android.renderscript.Type;
+import java.util.Random;
+import android.util.Log;
+
+public class rsAllocationCopyTest extends RSBaseCompute {
+
+    public void test_rsAllocationCopy1D_Byte() {
+        Random random = new Random(0x172d8ab9);
+        int width = random.nextInt(512);
+        int arr_len = width;
+        int offset = random.nextInt(arr_len);
+        int count = random.nextInt(arr_len - offset);
+
+        byte[] inArray = new byte[arr_len];
+        byte[] outArray = new byte[arr_len];
+        random.nextBytes(inArray);
+
+        Type.Builder typeBuilder = new Type.Builder(mRS, Element.I8(mRS));
+        typeBuilder.setX(width);
+        Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+        Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+        aIn.copyFrom(inArray);
+        aOut.copyFrom(outArray);
+
+        ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+        s.set_aIn1D(aIn);
+        s.set_aOut1D(aOut);
+        s.set_xOff(offset);
+        s.set_xCount(count);
+        s.invoke_test1D();
+        mRS.finish();
+        aOut.copyTo(outArray);
+
+        boolean result = true;
+        for (int i = 0; i < arr_len; i++) {
+            if (offset <= i && i < offset + count) {
+                if (inArray[i] != outArray[i]) {
+                    result = false;
+                    break;
+                }
+            } else {
+                if (outArray[i] != 0) {
+                    result = false;
+                    break;
+                }
+            }
+        }
+        assertTrue("test_rsAllocationCopy1D_Byte failed, output array does not match input",
+                   result);
+    }
+
+    public void test_rsAllocationCopy1D_Short() {
+        Random random = new Random(0x172d8ab9);
+        int width = random.nextInt(512);
+        int arr_len = width;
+        int offset = random.nextInt(arr_len);
+        int count = random.nextInt(arr_len - offset);
+
+        short[] inArray = new short[arr_len];
+        short[] outArray = new short[arr_len];
+        for (int i = 0; i < arr_len; i++) {
+            inArray[i] = (short)random.nextInt();
+        }
+
+        Type.Builder typeBuilder = new Type.Builder(mRS, Element.I16(mRS));
+        typeBuilder.setX(width);
+        Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+        Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+        aIn.copyFrom(inArray);
+        aOut.copyFrom(outArray);
+
+        ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+        s.set_aIn1D(aIn);
+        s.set_aOut1D(aOut);
+        s.set_xOff(offset);
+        s.set_xCount(count);
+        s.invoke_test1D();
+        mRS.finish();
+        aOut.copyTo(outArray);
+
+        boolean result = true;
+        for (int i = 0; i < arr_len; i++) {
+            if (offset <= i && i < offset + count) {
+                if (inArray[i] != outArray[i]) {
+                    result = false;
+                    break;
+                }
+            } else {
+                if (outArray[i] != 0) {
+                    result = false;
+                    break;
+                }
+            }
+        }
+        assertTrue("test_rsAllocationCopy1D_Short failed, output array does not match input",
+                   result);
+    }
+
+    public void test_rsAllocationCopy1D_Int() {
+        Random random = new Random(0x172d8ab9);
+        int width = random.nextInt(512);
+        int arr_len = width;
+        int offset = random.nextInt(arr_len);
+        int count = random.nextInt(arr_len - offset);
+
+        int[] inArray = new int[arr_len];
+        int[] outArray = new int[arr_len];
+        for (int i = 0; i < arr_len; i++) {
+            inArray[i] = random.nextInt();
+        }
+
+        Type.Builder typeBuilder = new Type.Builder(mRS, Element.I32(mRS));
+        typeBuilder.setX(width);
+        Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+        Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+        aIn.copyFrom(inArray);
+        aOut.copyFrom(outArray);
+
+        ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+        s.set_aIn1D(aIn);
+        s.set_aOut1D(aOut);
+        s.set_xOff(offset);
+        s.set_xCount(count);
+        s.invoke_test1D();
+        mRS.finish();
+        aOut.copyTo(outArray);
+
+        boolean result = true;
+        for (int i = 0; i < arr_len; i++) {
+            if (offset <= i && i < offset + count) {
+                if (inArray[i] != outArray[i]) {
+                    result = false;
+                    break;
+                }
+            } else {
+                if (outArray[i] != 0) {
+                    result = false;
+                    break;
+                }
+            }
+        }
+        assertTrue("test_rsAllocationCopy1D_Int failed, output array does not match input",
+                   result);
+    }
+
+    public void test_rsAllocationCopy1D_Float() {
+        Random random = new Random(0x172d8ab9);
+        int width = random.nextInt(512);
+        int arr_len = width;
+        int offset = random.nextInt(arr_len);
+        int count = random.nextInt(arr_len - offset);
+
+        float[] inArray = new float[arr_len];
+        float[] outArray = new float[arr_len];
+        for (int i = 0; i < arr_len; i++) {
+            inArray[i] = random.nextFloat();
+        }
+
+        Type.Builder typeBuilder = new Type.Builder(mRS, Element.F32(mRS));
+        typeBuilder.setX(width);
+        Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+        Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+        aIn.copyFrom(inArray);
+        aOut.copyFrom(outArray);
+
+        ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+        s.set_aIn1D(aIn);
+        s.set_aOut1D(aOut);
+        s.set_xOff(offset);
+        s.set_xCount(count);
+        s.invoke_test1D();
+        mRS.finish();
+        aOut.copyTo(outArray);
+
+
+        boolean result = true;
+        for (int i = 0; i < arr_len; i++) {
+            if (offset <= i && i < offset + count) {
+                if (inArray[i] != outArray[i]) {
+                    result = false;
+                    break;
+                }
+            } else {
+                if (outArray[i] != 0) {
+                    result = false;
+                    break;
+                }
+            }
+        }
+        assertTrue("test_rsAllocationCopy1D_Float failed, output array does not match input",
+                   result);
+    }
+
+    public void test_rsAllocationCopy1D_Long() {
+        Random random = new Random(0x172d8ab9);
+        int width = random.nextInt(512);
+        int arr_len = width;
+        int offset = random.nextInt(arr_len);
+        int count = random.nextInt(arr_len - offset);
+
+        long[] inArray = new long[arr_len];
+        long[] outArray = new long[arr_len];
+        for (int i = 0; i < arr_len; i++) {
+            inArray[i] = random.nextLong();
+        }
+
+        Type.Builder typeBuilder = new Type.Builder(mRS, Element.I64(mRS));
+        typeBuilder.setX(width);
+        Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+        Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+        aIn.copyFrom(inArray);
+        aOut.copyFrom(outArray);
+
+        ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+        s.set_aIn1D(aIn);
+        s.set_aOut1D(aOut);
+        s.set_xOff(offset);
+        s.set_xCount(count);
+        s.invoke_test1D();
+        mRS.finish();
+        aOut.copyTo(outArray);
+
+        boolean result = true;
+        for (int i = 0; i < arr_len; i++) {
+            if (offset <= i && i < offset + count) {
+                if (inArray[i] != outArray[i]) {
+                    result = false;
+                    break;
+                }
+            } else {
+                if (outArray[i] != 0) {
+                    result = false;
+                    break;
+                }
+            }
+        }
+        assertTrue("test_rsAllocationCopy1D_Long failed, output array does not match input",
+                   result);
+    }
+
+
+    public void test_rsAllocationCopy2D_Byte() {
+        Random random = new Random(0x172d8ab9);
+        int width = random.nextInt(128);
+        int height = random.nextInt(128);
+        int xOff = random.nextInt(width);
+        int yOff = random.nextInt(height);
+        int xCount = random.nextInt(width - xOff);
+        int yCount = random.nextInt(height - yOff);
+        int arr_len = width * height;
+
+        byte[] inArray = new byte[arr_len];
+        byte[] outArray = new byte[arr_len];
+        random.nextBytes(inArray);
+
+        Type.Builder typeBuilder = new Type.Builder(mRS, Element.I8(mRS));
+        typeBuilder.setX(width).setY(height);
+        Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+        Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+        aIn.copyFrom(inArray);
+        aOut.copyFrom(outArray);
+
+        ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+        s.set_aIn2D(aIn);
+        s.set_aOut2D(aOut);
+        s.set_xOff(xOff);
+        s.set_yOff(yOff);
+        s.set_xCount(xCount);
+        s.set_yCount(yCount);
+        s.invoke_test2D();
+        mRS.finish();
+        aOut.copyTo(outArray);
+
+        boolean result = true;
+        for (int i = 0; i < height; i++) {
+            for (int j = 0; j < width; j++) {
+                int pos = i * width + j;
+                if (yOff <= i && i < yOff + yCount &&
+                    xOff <= j && j < xOff + xCount) {
+                    if (inArray[pos] != outArray[pos]) {
+                        result = false;
+                        break;
+                    }
+                } else {
+                    if (outArray[pos] != 0) {
+                        result = false;
+                        break;
+                    }
+                }
+            }
+        }
+        assertTrue("test_rsAllocationCopy2D_Byte failed, output array does not match input",
+                   result);
+    }
+
+    public void test_rsAllocationCopy2D_Short() {
+        Random random = new Random(0x172d8ab9);
+        int width = random.nextInt(128);
+        int height = random.nextInt(128);
+        int xOff = random.nextInt(width);
+        int yOff = random.nextInt(height);
+        int xCount = random.nextInt(width - xOff);
+        int yCount = random.nextInt(height - yOff);
+        int arr_len = width * height;
+
+        short[] inArray = new short[arr_len];
+        short[] outArray = new short[arr_len];
+        for (int i = 0; i < arr_len; i++) {
+            inArray[i] = (short)random.nextInt();
+        }
+
+        Type.Builder typeBuilder = new Type.Builder(mRS, Element.I16(mRS));
+        typeBuilder.setX(width).setY(height);
+        Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+        Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+        aIn.copyFrom(inArray);
+        aOut.copyFrom(outArray);
+
+        ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+        s.set_aIn2D(aIn);
+        s.set_aOut2D(aOut);
+        s.set_xOff(xOff);
+        s.set_yOff(yOff);
+        s.set_xCount(xCount);
+        s.set_yCount(yCount);
+        s.invoke_test2D();
+        mRS.finish();
+        aOut.copyTo(outArray);
+
+        boolean result = true;
+        for (int i = 0; i < height; i++) {
+            for (int j = 0; j < width; j++) {
+                int pos = i * width + j;
+                if (yOff <= i && i < yOff + yCount &&
+                    xOff <= j && j < xOff + xCount) {
+                    if (inArray[pos] != outArray[pos]) {
+                        result = false;
+                        break;
+                    }
+                } else {
+                    if (outArray[pos] != 0) {
+                        result = false;
+                        break;
+                    }
+                }
+            }
+        }
+        assertTrue("test_rsAllocationCopy2D_Short failed, output array does not match input",
+                   result);
+    }
+
+    public void test_rsAllocationCopy2D_Int() {
+        Random random = new Random(0x172d8ab9);
+        int width = random.nextInt(128);
+        int height = random.nextInt(128);
+        int xOff = random.nextInt(width);
+        int yOff = random.nextInt(height);
+        int xCount = random.nextInt(width - xOff);
+        int yCount = random.nextInt(height - yOff);
+        int arr_len = width * height;
+
+        int[] inArray = new int[arr_len];
+        int[] outArray = new int[arr_len];
+        for (int i = 0; i < arr_len; i++) {
+            inArray[i] = random.nextInt();
+        }
+
+        Type.Builder typeBuilder = new Type.Builder(mRS, Element.I32(mRS));
+        typeBuilder.setX(width).setY(height);
+        Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+        Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+        aIn.copyFrom(inArray);
+        aOut.copyFrom(outArray);
+
+        ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+        s.set_aIn2D(aIn);
+        s.set_aOut2D(aOut);
+        s.set_xOff(xOff);
+        s.set_yOff(yOff);
+        s.set_xCount(xCount);
+        s.set_yCount(yCount);
+        s.invoke_test2D();
+        mRS.finish();
+        aOut.copyTo(outArray);
+
+        boolean result = true;
+        for (int i = 0; i < height; i++) {
+            for (int j = 0; j < width; j++) {
+                int pos = i * width + j;
+                if (yOff <= i && i < yOff + yCount &&
+                    xOff <= j && j < xOff + xCount) {
+                    if (inArray[pos] != outArray[pos]) {
+                        result = false;
+                        break;
+                    }
+                } else {
+                    if (outArray[pos] != 0) {
+                        result = false;
+                        break;
+                    }
+                }
+            }
+        }
+        assertTrue("test_rsAllocationCopy2D_Int failed, output array does not match input",
+                   result);
+    }
+
+    public void test_rsAllocationCopy2D_Float() {
+        Random random = new Random(0x172d8ab9);
+        int width = random.nextInt(128);
+        int height = random.nextInt(128);
+        int xOff = random.nextInt(width);
+        int yOff = random.nextInt(height);
+        int xCount = random.nextInt(width - xOff);
+        int yCount = random.nextInt(height - yOff);
+        int arr_len = width * height;
+
+        float[] inArray = new float[arr_len];
+        float[] outArray = new float[arr_len];
+        for (int i = 0; i < arr_len; i++) {
+            inArray[i] = random.nextFloat();
+        }
+
+        Type.Builder typeBuilder = new Type.Builder(mRS, Element.F32(mRS));
+        typeBuilder.setX(width).setY(height);
+        Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+        Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+        aIn.copyFrom(inArray);
+        aOut.copyFrom(outArray);
+
+        ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+        s.set_aIn2D(aIn);
+        s.set_aOut2D(aOut);
+        s.set_xOff(xOff);
+        s.set_yOff(yOff);
+        s.set_xCount(xCount);
+        s.set_yCount(yCount);
+        s.invoke_test2D();
+        mRS.finish();
+        aOut.copyTo(outArray);
+
+        boolean result = true;
+        for (int i = 0; i < height; i++) {
+            for (int j = 0; j < width; j++) {
+                int pos = i * width + j;
+                if (yOff <= i && i < yOff + yCount &&
+                    xOff <= j && j < xOff + xCount) {
+                    if (inArray[pos] != outArray[pos]) {
+                        result = false;
+                        break;
+                    }
+                } else {
+                    if (outArray[pos] != 0) {
+                        result = false;
+                        break;
+                    }
+                }
+            }
+        }
+        assertTrue("test_rsAllocationCopy2D_Float failed, output array does not match input",
+                   result);
+    }
+
+    public void test_rsAllocationCopy2D_Long() {
+        Random random = new Random(0x172d8ab9);
+        int width = random.nextInt(128);
+        int height = random.nextInt(128);
+        int xOff = random.nextInt(width);
+        int yOff = random.nextInt(height);
+        int xCount = random.nextInt(width - xOff);
+        int yCount = random.nextInt(height - yOff);
+        int arr_len = width * height;
+
+        long[] inArray = new long[arr_len];
+        long[] outArray = new long[arr_len];
+        for (int i = 0; i < arr_len; i++) {
+            inArray[i] = random.nextLong();
+        }
+
+        Type.Builder typeBuilder = new Type.Builder(mRS, Element.I64(mRS));
+        typeBuilder.setX(width).setY(height);
+        Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+        Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+        aIn.copyFrom(inArray);
+        aOut.copyFrom(outArray);
+
+        ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+        s.set_aIn2D(aIn);
+        s.set_aOut2D(aOut);
+        s.set_xOff(xOff);
+        s.set_yOff(yOff);
+        s.set_xCount(xCount);
+        s.set_yCount(yCount);
+        s.invoke_test2D();
+        mRS.finish();
+        aOut.copyTo(outArray);
+
+        boolean result = true;
+        for (int i = 0; i < height; i++) {
+            for (int j = 0; j < width; j++) {
+                int pos = i * width + j;
+                if (yOff <= i && i < yOff + yCount &&
+                    xOff <= j && j < xOff + xCount) {
+                    if (inArray[pos] != outArray[pos]) {
+                        result = false;
+                        break;
+                    }
+                } else {
+                    if (outArray[pos] != 0) {
+                        result = false;
+                        break;
+                    }
+                }
+            }
+        }
+        assertTrue("test_rsAllocationCopy2D_Long failed, output array does not match input",
+                   result);
+    }
+}
diff --git a/tests/tests/renderscript/src/android/renderscript/cts/rsallocationcopy.rs b/tests/tests/renderscript/src/android/renderscript/cts/rsallocationcopy.rs
new file mode 100644
index 0000000..4d76493
--- /dev/null
+++ b/tests/tests/renderscript/src/android/renderscript/cts/rsallocationcopy.rs
@@ -0,0 +1,19 @@
+#include "shared.rsh"
+
+rs_allocation aIn1D;
+rs_allocation aOut1D;
+rs_allocation aIn2D;
+rs_allocation aOut2D;
+
+int xOff = 0;
+int yOff = 0;
+int xCount = 0;
+int yCount = 0;
+
+void test1D() {
+    rsAllocationCopy1DRange(aOut1D, xOff, 0, xCount, aIn1D, xOff, 0);
+}
+
+void test2D() {
+    rsAllocationCopy2DRange(aOut2D, xOff, yOff, 0, 0, xCount, yCount, aIn2D, xOff, yOff, 0, 0);
+}
diff --git a/tests/tests/security/jni/android_security_cts_NativeCodeTest.cpp b/tests/tests/security/jni/android_security_cts_NativeCodeTest.cpp
index 3b63ba9..00765c6 100644
--- a/tests/tests/security/jni/android_security_cts_NativeCodeTest.cpp
+++ b/tests/tests/security/jni/android_security_cts_NativeCodeTest.cpp
@@ -212,7 +212,7 @@
     bool      vulnerable = false;
 
     if (nvmap >= 0) {
-        if (0 >= ioctl(nvmap, NVMAP_IOC_FROM_ID)) {
+        if (0 == ioctl(nvmap, NVMAP_IOC_FROM_ID)) {
             /* IOCTL succeeded */
             vulnerable = true;
         }
diff --git a/tests/tests/speech/src/android/speech/tts/cts/StubTextToSpeechService.java b/tests/tests/speech/src/android/speech/tts/cts/StubTextToSpeechService.java
index 7b5baca..88bdc74 100644
--- a/tests/tests/speech/src/android/speech/tts/cts/StubTextToSpeechService.java
+++ b/tests/tests/speech/src/android/speech/tts/cts/StubTextToSpeechService.java
@@ -20,8 +20,12 @@
 import android.speech.tts.SynthesisRequest;
 import android.speech.tts.TextToSpeech;
 import android.speech.tts.TextToSpeechService;
+import android.speech.tts.TtsEngines;
 import android.util.Log;
 
+import java.util.ArrayList;
+import java.util.Locale;
+
 /**
  * Stub implementation of {@link TextToSpeechService}. Used for testing the
  * TTS engine API.
@@ -32,6 +36,17 @@
     // Object that onSynthesizeText will #wait on, if set to non-null
     public static volatile Object sSynthesizeTextWait;
 
+    private ArrayList<Locale> supportedLanguages = new ArrayList<Locale>();
+    private ArrayList<Locale> supportedCountries = new ArrayList<Locale>();
+    private ArrayList<Locale> GBFallbacks = new ArrayList<Locale>();
+
+    public StubTextToSpeechService() {
+        supportedLanguages.add(new Locale("eng"));
+        supportedCountries.add(new Locale("eng", "USA"));
+        supportedCountries.add(new Locale("eng", "GBR"));
+        GBFallbacks.add(new Locale("eng", "NZL"));
+    }
+
     @Override
     protected String[] onGetLanguage() {
         return new String[] { "eng", "USA", "" };
@@ -39,12 +54,19 @@
 
     @Override
     protected int onIsLanguageAvailable(String lang, String country, String variant) {
-        return TextToSpeech.LANG_AVAILABLE;
+        if (supportedCountries.contains(new Locale(lang, country))) {
+            return TextToSpeech.LANG_COUNTRY_AVAILABLE;
+        }
+        if (supportedLanguages.contains(new Locale(lang))) {
+            return TextToSpeech.LANG_AVAILABLE;
+        }
+ 
+        return TextToSpeech.LANG_NOT_SUPPORTED;
     }
 
     @Override
     protected int onLoadLanguage(String lang, String country, String variant) {
-        return TextToSpeech.LANG_AVAILABLE;
+        return onIsLanguageAvailable(lang, country, variant);
     }
 
     @Override
@@ -77,4 +99,20 @@
         }
     }
 
+    @Override
+    public String onGetDefaultVoiceNameFor(String lang, String country, String variant) {
+        Locale locale = new Locale(lang, country);
+        if (supportedCountries.contains(locale)) {
+          return TtsEngines.normalizeTTSLocale(locale).toLanguageTag();
+        }
+        if (lang.equals("eng")) {
+            if (GBFallbacks.contains(new Locale(lang, country))) {
+                return "en-GB";
+            } else {
+                return "en-US";
+            }
+        }
+        return super.onGetDefaultVoiceNameFor(lang, country, variant);
+    }
+
 }
diff --git a/tests/tests/speech/src/android/speech/tts/cts/TextToSpeechTest.java b/tests/tests/speech/src/android/speech/tts/cts/TextToSpeechTest.java
index c83304c..013a5ea 100644
--- a/tests/tests/speech/src/android/speech/tts/cts/TextToSpeechTest.java
+++ b/tests/tests/speech/src/android/speech/tts/cts/TextToSpeechTest.java
@@ -94,6 +94,25 @@
         return false;
     }
 
+    private void assertContainsEngine(String engine, List<TextToSpeech.EngineInfo> engines) {
+        for (TextToSpeech.EngineInfo engineInfo : engines) {
+            if (engineInfo.name.equals(engine)) {
+                return;
+            }
+        }
+        fail("Engine " + engine + " not found");
+    }
+
+    private HashMap<String, String> createParams() {
+        HashMap<String, String> params = new HashMap<String,String>();
+        params.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, UTTERANCE_ID);
+        return params;
+    }
+
+    private boolean waitForUtterance() throws InterruptedException {
+        return mTts.waitForComplete(UTTERANCE_ID);
+    }
+
     public void testSynthesizeToFile() throws Exception {
         if (mTts == null) {
             return;
@@ -124,7 +143,6 @@
         assertTrue("speak() completion timeout", waitForUtterance());
     }
 
-
     public void testSpeakStop() throws Exception {
         getTts().stop();
         final int iterations = 20;
@@ -157,24 +175,4 @@
         assertNotNull("getEngines() returned null", engines);
         assertContainsEngine(TextToSpeechWrapper.MOCK_TTS_ENGINE, engines);
     }
-
-    private void assertContainsEngine(String engine, List<TextToSpeech.EngineInfo> engines) {
-        for (TextToSpeech.EngineInfo engineInfo : engines) {
-            if (engineInfo.name.equals(engine)) {
-                return;
-            }
-        }
-        fail("Engine " + engine + " not found");
-    }
-
-    private HashMap<String, String> createParams() {
-        HashMap<String, String> params = new HashMap<String,String>();
-        params.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, UTTERANCE_ID);
-        return params;
-    }
-
-    private boolean waitForUtterance() throws InterruptedException {
-        return mTts.waitForComplete(UTTERANCE_ID);
-    }
-
 }
diff --git a/tests/tests/telephony/src/android/telephony/cts/SmsManagerTest.java b/tests/tests/telephony/src/android/telephony/cts/SmsManagerTest.java
index 39f5177..d3d15a5 100644
--- a/tests/tests/telephony/src/android/telephony/cts/SmsManagerTest.java
+++ b/tests/tests/telephony/src/android/telephony/cts/SmsManagerTest.java
@@ -49,6 +49,10 @@
         "three separate messages.This is a very long text. This text should be broken " +
         "into three separate messages.This is a very long text. This text should be " +
         "broken into three separate messages.";;
+    private static final String LONG_TEXT_WITH_32BIT_CHARS =
+        "Long dkkshsh jdjsusj kbsksbdf jfkhcu hhdiwoqiwyrygrvn?*?*!\";:'/,."
+        + "__?9#9292736&4;\"$+$+((]\\[\\℅©℅™^®°¥°¥=¢£}}£∆~¶~÷|√×."
+        + " 😯😆😉😇😂😀👕🎓😀👙🐕🐀🐶🐰🐩⛪⛲ ";
 
     private static final String SMS_SEND_ACTION = "CTS_SMS_SEND_ACTION";
     private static final String SMS_DELIVERY_ACTION = "CTS_SMS_DELIVERY_ACTION";
@@ -209,17 +213,28 @@
     public void testDivideMessage() {
         ArrayList<String> dividedMessages = divideMessage(LONG_TEXT);
         assertNotNull(dividedMessages);
-        int numParts;
         if (TelephonyUtils.isSkt(mTelephonyManager)) {
-            assertTrue(isComplete(dividedMessages, 5) || isComplete(dividedMessages, 3));
+            assertTrue(isComplete(dividedMessages, 5, LONG_TEXT)
+                    || isComplete(dividedMessages, 3, LONG_TEXT));
         } else if (TelephonyUtils.isKt(mTelephonyManager)) {
-            assertTrue(isComplete(dividedMessages, 4) || isComplete(dividedMessages, 3));
+            assertTrue(isComplete(dividedMessages, 4, LONG_TEXT)
+                    || isComplete(dividedMessages, 3, LONG_TEXT));
         } else {
-            assertTrue(isComplete(dividedMessages, 3));
+            assertTrue(isComplete(dividedMessages, 3, LONG_TEXT));
         }
     }
 
-    private boolean isComplete(List<String> dividedMessages, int numParts) {
+    public void testDivideUnicodeMessage() {
+        ArrayList<String> dividedMessages = divideMessage(LONG_TEXT_WITH_32BIT_CHARS);
+        assertNotNull(dividedMessages);
+        assertTrue(isComplete(dividedMessages, 3, LONG_TEXT_WITH_32BIT_CHARS));
+        for (String messagePiece : dividedMessages) {
+            assertFalse(Character.isHighSurrogate(
+                    messagePiece.charAt(messagePiece.length() - 1)));
+        }
+    }
+
+    private boolean isComplete(List<String> dividedMessages, int numParts, String longText) {
         if (dividedMessages.size() != numParts) {
             return false;
         }
@@ -228,7 +243,7 @@
         for (int i = 0; i < numParts; i++) {
             actualMessage += dividedMessages.get(i);
         }
-        return LONG_TEXT.equals(actualMessage);
+        return longText.equals(actualMessage);
     }
 
     public void testSendMessages() throws InterruptedException {
@@ -369,9 +384,10 @@
                 Bundle bundle = intent.getExtras();
                 if (bundle != null) {
                     Object[] obj = (Object[]) bundle.get("pdus");
+                    String format = bundle.getString("format");
                     SmsMessage[] message = new SmsMessage[obj.length];
                     for (int i = 0; i < obj.length; i++) {
-                        message[i] = SmsMessage.createFromPdu((byte[]) obj[i]);
+                        message[i] = SmsMessage.createFromPdu((byte[]) obj[i], format);
                     }
 
                     for (SmsMessage currentMessage : message) {
diff --git a/tests/tests/telephony/src/android/telephony/cts/SmsMessageTest.java b/tests/tests/telephony/src/android/telephony/cts/SmsMessageTest.java
index ff7b097..d90e394 100644
--- a/tests/tests/telephony/src/android/telephony/cts/SmsMessageTest.java
+++ b/tests/tests/telephony/src/android/telephony/cts/SmsMessageTest.java
@@ -62,6 +62,10 @@
     private static final long TIMESTAMP_MILLIS = 1149631383000l;
     private static final int SEPTETS_SKT = 80;
     private static final int SEPTETS_KT = 90;
+    private static final String LONG_TEXT_WITH_32BIT_CHARS =
+        "Long dkkshsh jdjsusj kbsksbdf jfkhcu hhdiwoqiwyrygrvn?*?*!\";:'/,."
+        + "__?9#9292736&4;\"$+$+((]\\[\\℅©℅™^®°¥°¥=¢£}}£∆~¶~÷|√×."
+        + " 😯😆😉😇😂😀👕🎓😀👙🐕🐀🐶🐰🐩⛪⛲ ";
 
     @Override
     protected void setUp() throws Exception {
@@ -71,7 +75,6 @@
         mPackageManager = getContext().getPackageManager();
     }
 
-    @SuppressWarnings("deprecation")
     public void testCreateFromPdu() throws Exception {
         if (!mPackageManager.hasSystemFeature(PackageManager.FEATURE_TELEPHONY)
                 || mPackageManager.hasSystemFeature(PackageManager.FEATURE_TELEPHONY_CDMA)) {
@@ -80,7 +83,8 @@
         }
 
         String pdu = "07916164260220F0040B914151245584F600006060605130308A04D4F29C0E";
-        SmsMessage sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+        SmsMessage sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu),
+                SmsMessage.FORMAT_3GPP);
         assertEquals(SCA1, sms.getServiceCenterAddress());
         assertEquals(OA1, sms.getOriginatingAddress());
         assertEquals(MESSAGE_BODY1, sms.getMessageBody());
@@ -88,7 +92,7 @@
         int[] result = SmsMessage.calculateLength(sms.getMessageBody(), true);
         assertEquals(SMS_NUMBER1, result[0]);
         assertEquals(sms.getMessageBody().length(), result[1]);
-        assertRemaining(sms.getMessageBody().length(), result[2]);
+        assertRemaining(sms.getMessageBody().length(), result[2], SmsMessage.MAX_USER_DATA_SEPTETS);
         assertEquals(SmsMessage.ENCODING_7BIT, result[3]);
         assertEquals(pdu, toHexString(sms.getPdu()));
 
@@ -106,13 +110,13 @@
         assertEquals(TIMESTAMP_MILLIS, sms.getTimestampMillis());
 
         // Test create from null Pdu
-        sms = SmsMessage.createFromPdu(null);
+        sms = SmsMessage.createFromPdu(null, SmsMessage.FORMAT_3GPP);
         assertNotNull(sms);
 
-        //Test create from long Pdu
+        // Test create from long Pdu
         pdu = "07912160130310F2040B915121927786F300036060924180008A0DA"
             + "8695DAC2E8FE9296A794E07";
-        sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+        sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu), SmsMessage.FORMAT_3GPP);
         assertEquals(SCA2, sms.getServiceCenterAddress());
         assertEquals(OA2, sms.getOriginatingAddress());
         assertEquals(MESSAGE_BODY2, sms.getMessageBody());
@@ -120,30 +124,30 @@
         result = SmsMessage.calculateLength(msgBody, false);
         assertEquals(SMS_NUMBER2, result[0]);
         assertEquals(sms.getMessageBody().length(), result[1]);
-        assertRemaining(sms.getMessageBody().length(), result[2]);
+        assertRemaining(sms.getMessageBody().length(), result[2], SmsMessage.MAX_USER_DATA_SEPTETS);
         assertEquals(SmsMessage.ENCODING_7BIT, result[3]);
 
         // Test createFromPdu Ucs to Sms
         pdu = "07912160130300F4040B914151245584"
             + "F600087010807121352B10212200A900AE00680065006C006C006F";
-        sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+        sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu), SmsMessage.FORMAT_3GPP);
         assertEquals(MESSAGE_BODY3, sms.getMessageBody());
         result = SmsMessage.calculateLength(sms.getMessageBody(), true);
         assertEquals(SMS_NUMBER3, result[0]);
         assertEquals(sms.getMessageBody().length(), result[1]);
-        assertRemaining(sms.getMessageBody().length(), result[2]);
+        assertRemaining(sms.getMessageBody().length(), result[2], SmsMessage.MAX_USER_DATA_SEPTETS);
         assertEquals(SmsMessage.ENCODING_7BIT, result[3]);
     }
 
-    private void assertRemaining(int messageLength, int remaining) {
+    private void assertRemaining(int messageLength, int remaining, int maxChars) {
         if (TelephonyUtils.isSkt(mTelephonyManager)) {
             assertTrue(checkRemaining(SEPTETS_SKT, messageLength, remaining)
-                    || checkRemaining(SmsMessage.MAX_USER_DATA_SEPTETS, messageLength, remaining));
+                    || checkRemaining(maxChars, messageLength, remaining));
         } else if (TelephonyUtils.isKt(mTelephonyManager)) {
             assertTrue(checkRemaining(SEPTETS_KT, messageLength, remaining)
-                    || checkRemaining(SmsMessage.MAX_USER_DATA_SEPTETS, messageLength, remaining));
+                    || checkRemaining(maxChars, messageLength, remaining));
         } else {
-            assertTrue(checkRemaining(SmsMessage.MAX_USER_DATA_SEPTETS, messageLength, remaining));
+            assertTrue(checkRemaining(maxChars, messageLength, remaining));
         }
     }
 
@@ -160,7 +164,8 @@
 
         // "set MWI flag"
         String pdu = "07912160130310F20404D0110041006060627171118A0120";
-        SmsMessage sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+        SmsMessage sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu),
+                SmsMessage.FORMAT_3GPP);
         assertTrue(sms.isReplace());
         assertEquals(OA3, sms.getOriginatingAddress());
         assertEquals(MESSAGE_BODY4, sms.getMessageBody());
@@ -168,12 +173,12 @@
 
         // "clear mwi flag"
         pdu = "07912160130310F20404D0100041006021924193352B0120";
-        sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+        sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu), SmsMessage.FORMAT_3GPP);
         assertTrue(sms.isMWIClearMessage());
 
         // "clear MWI flag"
         pdu = "07912160130310F20404D0100041006060627161058A0120";
-        sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+        sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu), SmsMessage.FORMAT_3GPP);
         assertTrue(sms.isReplace());
         assertEquals(OA4, sms.getOriginatingAddress());
         assertEquals(MESSAGE_BODY5, sms.getMessageBody());
@@ -181,13 +186,13 @@
 
         // "set MWI flag"
         pdu = "07912180958750F84401800500C87020026195702B06040102000200";
-        sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+        sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu), SmsMessage.FORMAT_3GPP);
         assertTrue(sms.isMWISetMessage());
         assertTrue(sms.isMwiDontStore());
 
         // "clear mwi flag"
         pdu = "07912180958750F84401800500C07020027160112B06040102000000";
-        sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+        sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu), SmsMessage.FORMAT_3GPP);
 
         assertTrue(sms.isMWIClearMessage());
         assertTrue(sms.isMwiDontStore());
@@ -206,7 +211,8 @@
             + "66C414141414D7741414236514141414141008D908918802B3135313232393737"
             + "3638332F545950453D504C4D4E008A808E022B918805810306977F83687474703"
             + "A2F2F36";
-        SmsMessage sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+        SmsMessage sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu),
+                SmsMessage.FORMAT_3GPP);
         byte[] userData = sms.getUserData();
         assertNotNull(userData);
     }
@@ -265,7 +271,8 @@
         String pdu = "07914151551512f204038105f300007011103164638a28e6f71b50c687db" +
                          "7076d9357eb7412f7a794e07cdeb6275794c07bde8e5391d247e93f3";
 
-        SmsMessage sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+        SmsMessage sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu),
+                SmsMessage.FORMAT_3GPP);
         assertEquals(SCA4, sms.getServiceCenterAddress());
         assertTrue(sms.isEmail());
         assertEquals(EMAIL_ADD, sms.getEmailFrom());
@@ -277,7 +284,7 @@
 
         pdu = "07914151551512f204038105f400007011103105458a29e6f71b50c687db" +
                         "7076d9357eb741af0d0a442fcfe9c23739bfe16d289bdee6b5f1813629";
-        sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+        sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu), SmsMessage.FORMAT_3GPP);
         assertEquals(SCA3, sms.getServiceCenterAddress());
         assertTrue(sms.isEmail());
         assertEquals(OA, sms.getDisplayOriginatingAddress());
@@ -286,6 +293,22 @@
         assertEquals(MB, sms.getEmailBody());
     }
 
+    public void testCalculateLength() throws Exception {
+        if (!mPackageManager.hasSystemFeature(PackageManager.FEATURE_TELEPHONY)) {
+            return;
+        }
+
+        int[] result = SmsMessage.calculateLength(LONG_TEXT_WITH_32BIT_CHARS, false);
+        assertEquals(3, result[0]);
+        assertEquals(LONG_TEXT_WITH_32BIT_CHARS.length(), result[1]);
+        assertRemaining(LONG_TEXT_WITH_32BIT_CHARS.length(), result[2],
+                // 3 parts, each with (SmsMessage.MAX_USER_DATA_BYTES_WITH_HEADER / 2) 16-bit
+                // characters. We need to subtract one because a 32-bit character crosses the
+                // boundary of 2 parts.
+                3 * SmsMessage.MAX_USER_DATA_BYTES_WITH_HEADER / 2 - 1);
+        assertEquals(SmsMessage.ENCODING_16BIT, result[3]);
+    }
+
     private final static char[] HEX_DIGITS = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
             'A', 'B', 'C', 'D', 'E', 'F' };
 
diff --git a/tests/tests/telephony/src/android/telephony/cts/SubscriptionManagerTest.java b/tests/tests/telephony/src/android/telephony/cts/SubscriptionManagerTest.java
index 412bdc4..9b08397 100644
--- a/tests/tests/telephony/src/android/telephony/cts/SubscriptionManagerTest.java
+++ b/tests/tests/telephony/src/android/telephony/cts/SubscriptionManagerTest.java
@@ -52,89 +52,6 @@
         super.tearDown();
     }
 
-    // OnSubscriptionsChange event gets triggered when subscriptions present are either
-    // added/removed or when their contents are changed. Performing this test only when contents of
-    // SubscriptionInfoRecord are changed so that OnSubscriptionsChange event can be simulated. It
-    // is difficult to generate the event manually without pulling in/out sim card so generating
-    // testcase when there are no Subscriptions present is skipped.
-    public void testAddOnSubscriptionsChangedListener () throws Throwable {
-        if (mCm.getNetworkInfo(ConnectivityManager.TYPE_MOBILE) == null) {
-            Log.d(TAG, "Skipping test that requires ConnectivityManager.TYPE_MOBILE");
-            return;
-        }
-        final List<SubscriptionInfo> subList = mSubscriptionManager.getActiveSubscriptionInfoList();
-        if (subList == null || subList.size() == 0) {
-            Log.d(TAG, "Skipping test when there are no active subscriptions");
-            return;
-        }
-
-        TestThread t = new TestThread(new Runnable() {
-            public void run() {
-                Looper.prepare();
-
-                mListener = new SubscriptionManager.OnSubscriptionsChangedListener() {
-                    @Override
-                    public void onSubscriptionsChanged() {
-                        synchronized(mLock) {
-                            mOnSubscriptionsChangedCalled = true;
-                            mLock.notify();
-                        }
-                    }
-                };
-                mSubscriptionManager.addOnSubscriptionsChangedListener(mListener);
-                // Simulate onSubscriptionsChanged event
-                mSubscriptionManager.setDisplayName("Test1", subList.get(0).getSubscriptionId());
-                Looper.loop();
-            }
-        });
-        mOnSubscriptionsChangedCalled = false;
-        t.start();
-        synchronized (mLock) {
-            while (!mOnSubscriptionsChangedCalled) {
-                mLock.wait();
-            }
-        }
-        assertTrue(mOnSubscriptionsChangedCalled);
-    }
-
-    public void testRemoveOnSubscriptionsChangedListener () throws Throwable {
-        if (mCm.getNetworkInfo(ConnectivityManager.TYPE_MOBILE) == null) {
-            Log.d(TAG, "Skipping test that requires ConnectivityManager.TYPE_MOBILE");
-            return;
-        }
-        final List<SubscriptionInfo> subList = mSubscriptionManager.getActiveSubscriptionInfoList();
-        if (subList == null || subList.size() == 0) {
-            Log.d(TAG, "Skipping test when there are no active subscriptions");
-            return;
-        }
-        TestThread t = new TestThread(new Runnable() {
-            public void run() {
-                Looper.prepare();
-                mListener = new SubscriptionManager.OnSubscriptionsChangedListener() {
-                    @Override
-                    public void onSubscriptionsChanged() {
-                        synchronized(mLock) {
-                            mOnSubscriptionsChangedCalled = true;
-                            mLock.notify();
-                        }
-                    }
-                };
-                // unregister the listener
-                mSubscriptionManager.removeOnSubscriptionsChangedListener(mListener);
-                // Simulate onSubscriptionsChanged event
-                mSubscriptionManager.setDisplayName("Test2", subList.get(0).getSubscriptionId());
-                Looper.loop();
-            }
-        });
-
-        mOnSubscriptionsChangedCalled = false;
-        t.start();
-        synchronized (mLock) {
-            mLock.wait(TOLERANCE);
-        }
-        assertFalse(mOnSubscriptionsChangedCalled);
-    }
-
     public void testGetActiveSubscriptionInfoCount() {
         if (mCm.getNetworkInfo(ConnectivityManager.TYPE_MOBILE) == null) {
             Log.d(TAG, "Skipping test that requires ConnectivityManager.TYPE_MOBILE");
diff --git a/tests/tests/uirendering/res/layout/simple_shadow_layout.xml b/tests/tests/uirendering/res/layout/simple_shadow_layout.xml
new file mode 100644
index 0000000..2f21df0
--- /dev/null
+++ b/tests/tests/uirendering/res/layout/simple_shadow_layout.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+     Copyright (C) 2015 The Android Open Source Project
+
+       Licensed under the Apache License, Version 2.0 (the "License");
+       you may not use this file except in compliance with the License.
+       You may obtain a copy of the License at
+
+            http://www.apache.org/licenses/LICENSE-2.0
+
+       Unless required by applicable law or agreed to in writing, software
+       distributed under the License is distributed on an "AS IS" BASIS,
+       WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+       See the License for the specific language governing permissions and
+       limitations under the License.
+-->
+<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    android:layout_width="@dimen/test_width"
+    android:layout_height="@dimen/test_height">
+    <View android:layout_width="40px"
+        android:layout_height="40px"
+        android:translationX="25px"
+        android:translationY="25px"
+        android:elevation="10dp"
+        android:background="#fff" />
+</FrameLayout>
\ No newline at end of file
diff --git a/tests/tests/uirendering/src/android/uirendering/cts/testclasses/ShadowTests.java b/tests/tests/uirendering/src/android/uirendering/cts/testclasses/ShadowTests.java
new file mode 100644
index 0000000..878d5ae
--- /dev/null
+++ b/tests/tests/uirendering/src/android/uirendering/cts/testclasses/ShadowTests.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.uirendering.cts.testclasses;
+
+import android.graphics.Color;
+import android.graphics.Point;
+import android.uirendering.cts.bitmapverifiers.SamplePointVerifier;
+
+import com.android.cts.uirendering.R;
+
+import android.test.suitebuilder.annotation.SmallTest;
+import android.uirendering.cts.testinfrastructure.ActivityTestBase;
+
+public class ShadowTests extends ActivityTestBase {
+    @SmallTest
+    public void testShadowLayout() {
+        createTest()
+                .addLayout(R.layout.simple_shadow_layout, null, true/* HW only */)
+                .runWithVerifier(
+                new SamplePointVerifier(
+                        new Point[] {
+                                // view area
+                                new Point(25, 64),
+                                new Point(64, 64),
+                                // shadow area
+                                new Point(25, 65),
+                                new Point(64, 65)
+                        },
+                        new int[] {
+                                Color.WHITE,
+                                Color.WHITE,
+                                Color.rgb(222, 222, 222),
+                                Color.rgb(222, 222, 222),
+                        }));
+    }
+}
\ No newline at end of file
diff --git a/tests/tests/view/res/layout/view_layout.xml b/tests/tests/view/res/layout/view_layout.xml
index fa817dc..e6e1550 100644
--- a/tests/tests/view/res/layout/view_layout.xml
+++ b/tests/tests/view/res/layout/view_layout.xml
@@ -34,6 +34,7 @@
         android:layout_height="200px"
         android:scrollbars="horizontal|vertical"
         android:fadingEdge="horizontal|vertical"
+        android:scrollIndicators="top|bottom"
         android:fadingEdgeLength="20px"/>
 
     <android.view.cts.MockView
diff --git a/tests/tests/view/src/android/view/cts/ViewStubTest.java b/tests/tests/view/src/android/view/cts/ViewStubTest.java
index 53e251a..cbe498f 100644
--- a/tests/tests/view/src/android/view/cts/ViewStubTest.java
+++ b/tests/tests/view/src/android/view/cts/ViewStubTest.java
@@ -157,13 +157,16 @@
 
     public void testAccessInflatedId() {
         ViewStub viewStub = new ViewStub(mContext);
-        assertEquals(0, viewStub.getInflatedId());
+        assertEquals("Default ViewStub inflated ID is View.NO_ID",
+                View.NO_ID, viewStub.getInflatedId());
 
         viewStub.setInflatedId(R.id.inflated_id);
-        assertEquals(R.id.inflated_id, viewStub.getInflatedId());
+        assertEquals("Set ViewStub inflated ID to package resource ID",
+                R.id.inflated_id, viewStub.getInflatedId());
 
-        viewStub.setInflatedId(-1);
-        assertEquals(-1, viewStub.getInflatedId());
+        viewStub.setInflatedId(View.NO_ID);
+        assertEquals("Set ViewStub inflated ID to View.NO_ID",
+                View.NO_ID, viewStub.getInflatedId());
     }
 
     @UiThreadTest
diff --git a/tests/tests/view/src/android/view/cts/ViewTest.java b/tests/tests/view/src/android/view/cts/ViewTest.java
index 8194682..3024e40 100644
--- a/tests/tests/view/src/android/view/cts/ViewTest.java
+++ b/tests/tests/view/src/android/view/cts/ViewTest.java
@@ -2077,24 +2077,24 @@
         assertEquals(viewId, container.keyAt(0));
 
         container.clear();
-        container.put(viewId, new BaseSavedState(BaseSavedState.EMPTY_STATE));
+        container.put(viewId, new android.graphics.Rect());
         try {
             view.restoreHierarchyState(container);
-            fail("should throw IllegalArgumentException");
+            fail("Parcelable state must be an AbsSaveState, should throw IllegalArgumentException");
         } catch (IllegalArgumentException e) {
             // expected
         }
 
         try {
             view.restoreHierarchyState(null);
-            fail("should throw NullPointerException");
+            fail("Cannot pass null to restoreHierarchyState(), should throw NullPointerException");
         } catch (NullPointerException e) {
             // expected
         }
 
         try {
             view.saveHierarchyState(null);
-            fail("should throw NullPointerException");
+            fail("Cannot pass null to saveHierarchyState(), should throw NullPointerException");
         } catch (NullPointerException e) {
             // expected
         }
@@ -2434,7 +2434,6 @@
         assertFalse(view.hasCalledDrawableStateChanged());
         view.setPressed(true);
         assertTrue(view.hasCalledDrawableStateChanged());
-        assertFalse(view.hasCalledOnCreateDrawableState());
         assertTrue(Arrays.equals(MockView.getPressedEnabledStateSet(), view.getDrawableState()));
         assertTrue(view.hasCalledOnCreateDrawableState());
 
@@ -2445,7 +2444,6 @@
         view.refreshDrawableState();
         assertTrue(view.hasCalledDrawableStateChanged());
         assertTrue(mMockParent.hasChildDrawableStateChanged());
-        assertFalse(view.hasCalledOnCreateDrawableState());
         assertTrue(Arrays.equals(MockView.getPressedEnabledStateSet(), view.getDrawableState()));
         assertTrue(view.hasCalledOnCreateDrawableState());
     }
@@ -3111,6 +3109,24 @@
         assertEquals(fadingEdgeLength, view.getVerticalFadingEdgeLength());
     }
 
+    @UiThreadTest
+    public void testScrollIndicators() {
+        MockView view = (MockView) mActivity.findViewById(R.id.scroll_view);
+
+        assertEquals("Set indicators match those specified in XML",
+                View.SCROLL_INDICATOR_TOP | View.SCROLL_INDICATOR_BOTTOM,
+                view.getScrollIndicators());
+
+        view.setScrollIndicators(0);
+        assertEquals("Cleared indicators", 0, view.getScrollIndicators());
+
+        view.setScrollIndicators(View.SCROLL_INDICATOR_START | View.SCROLL_INDICATOR_RIGHT);
+        assertEquals("Set start and right indicators",
+                View.SCROLL_INDICATOR_START | View.SCROLL_INDICATOR_RIGHT,
+                view.getScrollIndicators());
+
+    }
+
     public void testOnStartAndFinishTemporaryDetach() throws Throwable {
         final MockListView listView = new MockListView(mActivity);
         List<String> items = Lists.newArrayList("1", "2", "3");
diff --git a/tests/tests/webkit/src/android/webkit/cts/PostMessageTest.java b/tests/tests/webkit/src/android/webkit/cts/PostMessageTest.java
new file mode 100644
index 0000000..e393bb6
--- /dev/null
+++ b/tests/tests/webkit/src/android/webkit/cts/PostMessageTest.java
@@ -0,0 +1,222 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.webkit.cts;
+
+import android.cts.util.NullWebViewUtils;
+import android.cts.util.PollingCheck;
+import android.net.Uri;
+import android.test.ActivityInstrumentationTestCase2;
+import android.test.UiThreadTest;
+import android.webkit.WebMessage;
+import android.webkit.WebMessagePort;
+import android.webkit.WebView;
+
+import java.util.concurrent.CountDownLatch;
+import junit.framework.Assert;
+
+public class PostMessageTest extends ActivityInstrumentationTestCase2<WebViewCtsActivity> {
+    public static final long TIMEOUT = 20000L;
+
+    private WebView mWebView;
+    private WebViewOnUiThread mOnUiThread;
+
+    private static final String WEBVIEW_MESSAGE = "from_webview";
+    private static final String BASE_URI = "http://www.example.com";
+
+    public PostMessageTest() {
+        super("com.android.cts.webkit", WebViewCtsActivity.class);
+    }
+
+    @Override
+    protected void setUp() throws Exception {
+        super.setUp();
+        final WebViewCtsActivity activity = getActivity();
+        mWebView = activity.getWebView();
+        if (mWebView != null) {
+            mOnUiThread = new WebViewOnUiThread(this, mWebView);
+            mOnUiThread.getSettings().setJavaScriptEnabled(true);
+        }
+    }
+
+    @Override
+    protected void tearDown() throws Exception {
+        if (mOnUiThread != null) {
+            mOnUiThread.cleanUp();
+        }
+        super.tearDown();
+    }
+
+    private static final String TITLE_FROM_POST_MESSAGE =
+            "<!DOCTYPE html><html><body>"
+            + "    <script>"
+            + "        var received = '';"
+            + "        onmessage = function (e) {"
+            + "            received += e.data;"
+            + "            document.title = received; };"
+            + "    </script>"
+            + "</body></html>";
+
+    // Acks each received message from the message channel with a seq number.
+    private static final String CHANNEL_MESSAGE =
+            "<!DOCTYPE html><html><body>"
+            + "    <script>"
+            + "        var counter = 0;"
+            + "        onmessage = function (e) {"
+            + "            var myPort = e.ports[0];"
+            + "            myPort.onmessage = function (f) {"
+            + "                myPort.postMessage(f.data + counter++);"
+            + "            }"
+            + "        }"
+            + "   </script>"
+            + "</body></html>";
+
+    private void loadPage(String data) {
+        mOnUiThread.loadDataWithBaseURLAndWaitForCompletion(BASE_URI, data,
+                "text/html", "UTF-8", null);
+    }
+
+    private void waitForTitle(final String title) {
+        new PollingCheck(TIMEOUT) {
+            @Override
+            protected boolean check() {
+                return mOnUiThread.getTitle().equals(title);
+            }
+        }.run();
+    }
+
+    // Post a string message to main frame and make sure it is received.
+    public void testSimpleMessageToMainFrame() throws Throwable {
+        if (!NullWebViewUtils.isWebViewAvailable()) {
+            return;
+        }
+        loadPage(TITLE_FROM_POST_MESSAGE);
+        WebMessage message = new WebMessage(WEBVIEW_MESSAGE);
+        mOnUiThread.postMessageToMainFrame(message, Uri.parse(BASE_URI));
+        waitForTitle(WEBVIEW_MESSAGE);
+    }
+
+    // Post multiple messages to main frame and make sure they are received in
+    // correct order.
+    public void testMultipleMessagesToMainFrame() throws Throwable {
+        if (!NullWebViewUtils.isWebViewAvailable()) {
+            return;
+        }
+        loadPage(TITLE_FROM_POST_MESSAGE);
+        for (int i = 0; i < 10; i++) {
+            mOnUiThread.postMessageToMainFrame(new WebMessage(Integer.toString(i)),
+                    Uri.parse(BASE_URI));
+        }
+        waitForTitle("0123456789");
+    }
+
+    // Create a message channel and make sure it can be used for data transfer to/from js.
+    public void testMessageChannel() throws Throwable {
+        if (!NullWebViewUtils.isWebViewAvailable()) {
+            return;
+        }
+        loadPage(CHANNEL_MESSAGE);
+        final WebMessagePort[] channel = mOnUiThread.createWebMessageChannel();
+        WebMessage message = new WebMessage(WEBVIEW_MESSAGE, new WebMessagePort[]{channel[1]});
+        mOnUiThread.postMessageToMainFrame(message, Uri.parse(BASE_URI));
+        final int messageCount = 3;
+        final CountDownLatch latch = new CountDownLatch(messageCount);
+        runTestOnUiThread(new Runnable() {
+            @Override
+            public void run() {
+                for (int i = 0; i < messageCount; i++) {
+                    channel[0].postMessage(new WebMessage(WEBVIEW_MESSAGE + i));
+                }
+                channel[0].setWebMessageCallback(new WebMessagePort.WebMessageCallback() {
+                    @Override
+                    public void onMessage(WebMessagePort port, WebMessage message) {
+                        int i = messageCount - (int)latch.getCount();
+                        assertEquals(WEBVIEW_MESSAGE + i + i, message.getData());
+                        latch.countDown();
+                    }
+                });
+            }
+        });
+        // Wait for all the responses to arrive.
+        boolean ignore = latch.await(TIMEOUT, java.util.concurrent.TimeUnit.MILLISECONDS);
+    }
+
+    // Test that a message port that is closed cannot used to send a message
+    public void testClose() throws Throwable {
+        if (!NullWebViewUtils.isWebViewAvailable()) {
+            return;
+        }
+        loadPage(CHANNEL_MESSAGE);
+        final WebMessagePort[] channel = mOnUiThread.createWebMessageChannel();
+        WebMessage message = new WebMessage(WEBVIEW_MESSAGE, new WebMessagePort[]{channel[1]});
+        mOnUiThread.postMessageToMainFrame(message, Uri.parse(BASE_URI));
+        runTestOnUiThread(new Runnable() {
+            @Override
+            public void run() {
+                try {
+                    channel[0].close();
+                    channel[0].postMessage(new WebMessage(WEBVIEW_MESSAGE));
+                } catch (IllegalStateException ex) {
+                    // expect to receive an exception
+                    return;
+                }
+                Assert.fail("A closed port cannot be used to transfer messages");
+            }
+         });
+    }
+
+    // Sends a new message channel from JS to Java.
+    private static final String CHANNEL_FROM_JS =
+            "<!DOCTYPE html><html><body>"
+            + "    <script>"
+            + "        var counter = 0;"
+            + "        var mc = new MessageChannel();"
+            + "        var received = '';"
+            + "        mc.port1.onmessage = function (e) {"
+            + "               received = e.data;"
+            + "               document.title = e.data;"
+            + "        };"
+            + "        onmessage = function (e) {"
+            + "            var myPort = e.ports[0];"
+            + "            myPort.postMessage('', [mc.port2]);"
+            + "        };"
+            + "   </script>"
+            + "</body></html>";
+
+    // Test a message port created in JS can be received and used for message transfer.
+    public void testReceiveMessagePort() throws Throwable {
+        final String hello = "HELLO";
+        if (!NullWebViewUtils.isWebViewAvailable()) {
+            return;
+        }
+        loadPage(CHANNEL_FROM_JS);
+        final WebMessagePort[] channel = mOnUiThread.createWebMessageChannel();
+        WebMessage message = new WebMessage(WEBVIEW_MESSAGE, new WebMessagePort[]{channel[1]});
+        mOnUiThread.postMessageToMainFrame(message, Uri.parse(BASE_URI));
+        runTestOnUiThread(new Runnable() {
+            @Override
+            public void run() {
+                channel[0].setWebMessageCallback(new WebMessagePort.WebMessageCallback() {
+                    @Override
+                    public void onMessage(WebMessagePort port, WebMessage message) {
+                        message.getPorts()[0].postMessage(new WebMessage(hello));
+                    }
+                });
+            }
+        });
+        waitForTitle(hello);
+    }
+}
diff --git a/tests/tests/webkit/src/android/webkit/cts/WebViewClientTest.java b/tests/tests/webkit/src/android/webkit/cts/WebViewClientTest.java
index b053784..0697429 100644
--- a/tests/tests/webkit/src/android/webkit/cts/WebViewClientTest.java
+++ b/tests/tests/webkit/src/android/webkit/cts/WebViewClientTest.java
@@ -30,7 +30,6 @@
 import android.webkit.WebResourceError;
 import android.webkit.WebResourceRequest;
 import android.webkit.WebResourceResponse;
-import android.webkit.WebResourceResponseBase;
 import android.webkit.WebSettings;
 import android.webkit.WebView;
 import android.webkit.WebViewClient;
@@ -538,7 +537,7 @@
         private boolean mOnLoadResourceCalled;
         private int mOnReceivedErrorCode;
         private WebResourceError mOnReceivedResourceError;
-        private WebResourceResponseBase mOnReceivedHttpError;
+        private WebResourceResponse mOnReceivedHttpError;
         private boolean mOnFormResubmissionCalled;
         private boolean mDoUpdateVisitedHistoryCalled;
         private boolean mOnReceivedHttpAuthRequestCalled;
@@ -571,7 +570,7 @@
             return mOnReceivedResourceError;
         }
 
-        public WebResourceResponseBase hasOnReceivedHttpError() {
+        public WebResourceResponse hasOnReceivedHttpError() {
             return mOnReceivedHttpError;
         }
 
@@ -640,7 +639,7 @@
 
         @Override
         public void onReceivedHttpError(WebView view,  WebResourceRequest request,
-                WebResourceResponseBase errorResponse) {
+                WebResourceResponse errorResponse) {
             super.onReceivedHttpError(view, request, errorResponse);
             mOnReceivedHttpError = errorResponse;
         }
diff --git a/tests/tests/webkit/src/android/webkit/cts/WebViewTest.java b/tests/tests/webkit/src/android/webkit/cts/WebViewTest.java
index c5b77a2..dba2243 100755
--- a/tests/tests/webkit/src/android/webkit/cts/WebViewTest.java
+++ b/tests/tests/webkit/src/android/webkit/cts/WebViewTest.java
@@ -2466,7 +2466,7 @@
 
         mOnUiThread.loadUrl("about:blank");
 
-        mOnUiThread.insertVisualStateCallback(kRequest, new VisualStateCallback() {
+        mOnUiThread.postVisualStateCallback(kRequest, new VisualStateCallback() {
             public void onComplete(long requestId) {
                 assertEquals(kRequest, requestId);
                 callbackLatch.countDown();
diff --git a/tools/tradefed-host/src/com/android/cts/tradefed/testtype/DeqpTestRunner.java b/tools/tradefed-host/src/com/android/cts/tradefed/testtype/DeqpTestRunner.java
index 677bc43..43aaf98 100644
--- a/tools/tradefed-host/src/com/android/cts/tradefed/testtype/DeqpTestRunner.java
+++ b/tools/tradefed-host/src/com/android/cts/tradefed/testtype/DeqpTestRunner.java
@@ -19,6 +19,9 @@
 import com.android.tradefed.testtype.IBuildReceiver;
 import com.android.tradefed.testtype.IDeviceTest;
 import com.android.tradefed.testtype.IRemoteTest;
+import com.android.tradefed.util.IRunUtil;
+import com.android.tradefed.util.RunInterruptedException;
+import com.android.tradefed.util.RunUtil;
 
 import java.io.File;
 import java.io.FileNotFoundException;
@@ -49,8 +52,8 @@
     private static final String DEQP_ONDEVICE_APK = "com.drawelements.deqp.apk";
     private static final String DEQP_ONDEVICE_PKG = "com.drawelements.deqp";
     private static final String INCOMPLETE_LOG_MESSAGE = "Crash: Incomplete test log";
-    private static final String DEVICE_LOST_MESSAGE = "Crash: Device lost";
     private static final String SKIPPED_INSTANCE_LOG_MESSAGE = "Configuration skipped";
+    private static final String NOT_EXECUTABLE_LOG_MESSAGE = "Abort: Test cannot be executed";
     private static final String CASE_LIST_FILE_NAME = "/sdcard/dEQP-TestCaseList.txt";
     private static final String LOG_FILE_NAME = "/sdcard/TestLog.qpa";
     public static final String FEATURE_LANDSCAPE = "android.hardware.screen.landscape";
@@ -67,12 +70,14 @@
     private final Collection<TestIdentifier> mRemainingTests;
     private final Map<TestIdentifier, Set<BatchRunConfiguration>> mTestInstances;
     private final TestInstanceResultListener mInstanceListerner = new TestInstanceResultListener();
+    private final Map<TestIdentifier, Integer> mTestInstabilityRatings;
     private IAbi mAbi;
     private CtsBuildHelper mCtsBuild;
     private boolean mLogData = false;
     private ITestDevice mDevice;
     private Set<String> mDeviceFeatures;
     private Map<String, Boolean> mConfigQuerySupportCache = new HashMap<>();
+    private IRunUtil mRunUtil = RunUtil.getDefault();
 
     private IRecovery mDeviceRecovery = new Recovery();
     {
@@ -85,6 +90,7 @@
         mName = name;
         mRemainingTests = new LinkedList<>(tests); // avoid modifying arguments
         mTestInstances = parseTestInstances(tests, testInstances);
+        mTestInstabilityRatings = new HashMap<>();
     }
 
     /**
@@ -145,6 +151,15 @@
         mDeviceRecovery = deviceRecovery;
     }
 
+    /**
+     * Set IRunUtil.
+     *
+     * Exposed for unit testing.
+     */
+    public void setRunUtil(IRunUtil runUtil) {
+        mRunUtil = runUtil;
+    }
+
     private static final class CapabilityQueryFailureException extends Exception {
     };
 
@@ -253,15 +268,15 @@
         /**
          * Forward result to sink
          */
-        private void forwardFinalizedPendingResult() {
-            if (mRemainingTests.contains(mCurrentTestId)) {
-                final PendingResult result = mPendingResults.get(mCurrentTestId);
+        private void forwardFinalizedPendingResult(TestIdentifier testId) {
+            if (mRemainingTests.contains(testId)) {
+                final PendingResult result = mPendingResults.get(testId);
 
-                mPendingResults.remove(mCurrentTestId);
-                mRemainingTests.remove(mCurrentTestId);
+                mPendingResults.remove(testId);
+                mRemainingTests.remove(testId);
 
                 // Forward results to the sink
-                mSink.testStarted(mCurrentTestId);
+                mSink.testStarted(testId);
 
                 // Test Log
                 if (mLogData) {
@@ -270,9 +285,8 @@
                         final ByteArrayInputStreamSource source
                                 = new ByteArrayInputStreamSource(entry.getValue().getBytes());
 
-                        mSink.testLog(mCurrentTestId.getClassName() + "."
-                                + mCurrentTestId.getTestName() + "@" + entry.getKey().getId(),
-                                LogDataType.XML, source);
+                        mSink.testLog(testId.getClassName() + "." + testId.getTestName() + "@"
+                                + entry.getKey().getId(), LogDataType.XML, source);
 
                         source.cancel();
                     }
@@ -292,11 +306,11 @@
                         errorLog.append(entry.getValue());
                     }
 
-                    mSink.testFailed(mCurrentTestId, errorLog.toString());
+                    mSink.testFailed(testId, errorLog.toString());
                 }
 
                 final Map<String, String> emptyMap = Collections.emptyMap();
-                mSink.testEnded(mCurrentTestId, emptyMap);
+                mSink.testEnded(testId, emptyMap);
             }
         }
 
@@ -346,10 +360,29 @@
             result.errorMessages.put(mRunConfig, SKIPPED_INSTANCE_LOG_MESSAGE);
             result.remainingConfigs.remove(mRunConfig);
 
+            // Pending result finished, report result
             if (result.remainingConfigs.isEmpty()) {
-                // fake as if we actually run the test
-                mCurrentTestId = testId;
-                forwardFinalizedPendingResult();
+                forwardFinalizedPendingResult(testId);
+            }
+        }
+
+        /**
+         * Fake failure of an instance with current config
+         */
+        public void abortTest(TestIdentifier testId, String errorMessage) {
+            final PendingResult result = mPendingResults.get(testId);
+
+            // Mark as executed
+            result.allInstancesPassed = false;
+            result.errorMessages.put(mRunConfig, errorMessage);
+            result.remainingConfigs.remove(mRunConfig);
+
+            // Pending result finished, report result
+            if (result.remainingConfigs.isEmpty()) {
+                forwardFinalizedPendingResult(testId);
+            }
+
+            if (testId.equals(mCurrentTestId)) {
                 mCurrentTestId = null;
             }
         }
@@ -402,7 +435,7 @@
 
                 // Pending result finished, report result
                 if (result.remainingConfigs.isEmpty()) {
-                    forwardFinalizedPendingResult();
+                    forwardFinalizedPendingResult(mCurrentTestId);
                 }
             } else {
                 CLog.w("Got unexpected end of %s", mCurrentTestId);
@@ -461,7 +494,7 @@
 
                 // Pending result finished, report result
                 if (result.remainingConfigs.isEmpty()) {
-                    forwardFinalizedPendingResult();
+                    forwardFinalizedPendingResult(mCurrentTestId);
                 }
             } else {
                 CLog.w("Got unexpected termination of %s", mCurrentTestId);
@@ -506,41 +539,20 @@
         }
 
         /**
-         * Signal listener that batch ended to flush incomplete results.
+         * Signal listener that batch ended and forget incomplete results.
          */
         public void endBatch() {
             // end open test if when stream ends
             if (mCurrentTestId != null) {
-                final Map<String, String> emptyMap = Collections.emptyMap();
-                handleEndTestCase(emptyMap);
-            }
-        }
-
-        /**
-         * Signal listener that device just died.
-         */
-        public void onDeviceLost() {
-            if (mCurrentTestId != null) {
-                final PendingResult result = mPendingResults.get(mCurrentTestId);
-
-                if (result == null) {
-                    CLog.e("Device lost in invalid state: %s", mCurrentTestId);
-                    return;
+                // Current instance was removed from remainingConfigs when case
+                // started. Mark current instance as pending.
+                if (mPendingResults.get(mCurrentTestId) != null) {
+                    mPendingResults.get(mCurrentTestId).remainingConfigs.add(mRunConfig);
+                } else {
+                    CLog.w("Got unexpected internal state of %s", mCurrentTestId);
                 }
-
-                // kill current test
-                result.allInstancesPassed = false;
-                result.errorMessages.put(mRunConfig, DEVICE_LOST_MESSAGE);
-
-                if (mLogData && mCurrentTestLog != null && mCurrentTestLog.length() > 0) {
-                    result.testLogs.put(mRunConfig, mCurrentTestLog);
-                }
-
-                // finish all pending instances
-                result.remainingConfigs.clear();
-                forwardFinalizedPendingResult();
-                mCurrentTestId = null;
             }
+            mCurrentTestId = null;
         }
     }
 
@@ -553,6 +565,8 @@
         private Map<String, String> mValues;
         private String mCurrentName;
         private String mCurrentValue;
+        private int mResultCode;
+        private boolean mGotExitValue = false;
 
 
         public InstrumentationParser(TestInstanceResultListener listener) {
@@ -592,6 +606,13 @@
 
                     mCurrentName = line.substring(nameBegin, nameEnd);
                     mCurrentValue = line.substring(valueBegin);
+                } else if (line.startsWith("INSTRUMENTATION_CODE: ")) {
+                    try {
+                        mResultCode = Integer.parseInt(line.substring(22));
+                        mGotExitValue = true;
+                    } catch (NumberFormatException ex) {
+                        CLog.w("Instrumentation code format unexpected");
+                    }
                 } else if (mCurrentValue != null) {
                     mCurrentValue = mCurrentValue + line;
                 }
@@ -623,6 +644,20 @@
         public boolean isCancelled() {
             return false;
         }
+
+        /**
+         * Returns whether target instrumentation exited normally.
+         */
+        public boolean wasSuccessful() {
+            return mGotExitValue;
+        }
+
+        /**
+         * Returns Instrumentation return code
+         */
+        public int getResultCode() {
+            return mResultCode;
+        }
     }
 
     /**
@@ -665,10 +700,16 @@
             return false;
         }
 
+        /**
+         * Returns whether target instrumentation exited normally.
+         */
         public boolean wasSuccessful() {
             return mGotExitValue;
         }
 
+        /**
+         * Returns Instrumentation return code
+         */
         public int getResultCode() {
             return mResultCode;
         }
@@ -740,6 +781,7 @@
      */
     public static class Recovery implements IRecovery {
         private int RETRY_COOLDOWN_MS = 6000; // 6 seconds
+        private int PROCESS_KILL_WAIT_MS = 1000; // 1 second
 
         private static enum MachineState {
             WAIT, // recover by waiting
@@ -752,6 +794,9 @@
         private ITestDevice mDevice;
         private ISleepProvider mSleepProvider;
 
+        private static class ProcessKillFailureException extends Exception {
+        }
+
         /**
          * {@inheritDoc}
          */
@@ -835,6 +880,9 @@
                     } catch (DeviceNotAvailableException ex) {
                         // chain forward
                         recoverComLinkKilled();
+                    } catch (ProcessKillFailureException ex) {
+                        // chain forward
+                        recoverComLinkKilled();
                     }
                     break;
 
@@ -849,6 +897,9 @@
                     } catch (DeviceNotAvailableException ex) {
                         // chain forward
                         recoverComLinkKilled();
+                    } catch (ProcessKillFailureException ex) {
+                        // chain forward
+                        recoverComLinkKilled();
                     }
                     break;
 
@@ -876,7 +927,8 @@
             mSleepProvider.sleep(RETRY_COOLDOWN_MS);
         }
 
-        private void killDeqpProcess() throws DeviceNotAvailableException {
+        private Iterable<Integer> getDeqpProcessPids() throws DeviceNotAvailableException {
+            final List<Integer> pids = new ArrayList<Integer>(2);
             final String processes = mDevice.executeShellCommand("ps | grep com.drawelements");
             final String[] lines = processes.split("(\\r|\\n)+");
             for (String line : lines) {
@@ -885,15 +937,29 @@
                     continue;
                 }
 
-                final int processId;
                 try {
-                    processId = Integer.parseInt(fields[1], 10);
+                    final int processId = Integer.parseInt(fields[1], 10);
+                    pids.add(processId);
                 } catch (NumberFormatException ex) {
                     continue;
                 }
+            }
+            return pids;
+        }
 
+        private void killDeqpProcess() throws DeviceNotAvailableException,
+                ProcessKillFailureException {
+            for (Integer processId : getDeqpProcessPids()) {
                 mDevice.executeShellCommand(String.format("kill -9 %d", processId));
             }
+
+            mSleepProvider.sleep(PROCESS_KILL_WAIT_MS);
+
+            // check that processes actually died
+            if (getDeqpProcessPids().iterator().hasNext()) {
+                // a process is still alive, killing failed
+                throw new ProcessKillFailureException();
+            }
         }
 
         public void recoverDevice() throws DeviceNotAvailableException {
@@ -1068,83 +1134,180 @@
         return generateTestCaseTrieFromPaths(testPaths);
     }
 
-    /**
-     * Executes tests on the device.
-     */
-    private void runTests() throws DeviceNotAvailableException, CapabilityQueryFailureException {
-        mDeviceRecovery.setDevice(mDevice);
+    private static class TestBatch {
+        public BatchRunConfiguration config;
+        public List<TestIdentifier> tests;
+    }
 
-        while (!mRemainingTests.isEmpty()) {
-            // select tests for the batch
-            final ArrayList<TestIdentifier> batchTests = new ArrayList<>(TESTCASE_BATCH_LIMIT);
-            for (TestIdentifier test : mRemainingTests) {
-                batchTests.add(test);
-                if (batchTests.size() >= TESTCASE_BATCH_LIMIT) {
+    private TestBatch selectRunBatch() {
+        return selectRunBatch(mRemainingTests, null);
+    }
+
+    /**
+     * Creates a TestBatch from the given tests or null if not tests remaining.
+     *
+     *  @param pool List of tests to select from
+     *  @param requiredConfig Select only instances with pending requiredConfig, or null to select
+     *         any run configuration.
+     */
+    private TestBatch selectRunBatch(Collection<TestIdentifier> pool,
+            BatchRunConfiguration requiredConfig) {
+        // select one test (leading test) that is going to be executed and then pack along as many
+        // other compatible instances as possible.
+
+        TestIdentifier leadingTest = null;
+        for (TestIdentifier test : pool) {
+            if (!mRemainingTests.contains(test)) {
+                continue;
+            }
+            if (requiredConfig != null &&
+                    !mInstanceListerner.isPendingTestInstance(test, requiredConfig)) {
+                continue;
+            }
+            leadingTest = test;
+            break;
+        }
+
+        // no remaining tests?
+        if (leadingTest == null) {
+            return null;
+        }
+
+        BatchRunConfiguration leadingTestConfig = null;
+        if (requiredConfig != null) {
+            leadingTestConfig = requiredConfig;
+        } else {
+            for (BatchRunConfiguration runConfig : getTestRunConfigs(leadingTest)) {
+                if (mInstanceListerner.isPendingTestInstance(leadingTest, runConfig)) {
+                    leadingTestConfig = runConfig;
                     break;
                 }
             }
+        }
 
-            // find union of all run configurations
-            final Set<BatchRunConfiguration> allConfigs = new LinkedHashSet<>();
-            for (TestIdentifier test : batchTests) {
-                allConfigs.addAll(getTestRunConfigs(test));
+        // test pending <=> test has a pending config
+        if (leadingTestConfig == null) {
+            throw new AssertionError("search postcondition failed");
+        }
+
+        final int leadingInstability = getTestInstabilityRating(leadingTest);
+
+        final TestBatch runBatch = new TestBatch();
+        runBatch.config = leadingTestConfig;
+        runBatch.tests = new ArrayList<>();
+        runBatch.tests.add(leadingTest);
+
+        for (TestIdentifier test : pool) {
+            if (test == leadingTest) {
+                // do not re-select the leading tests
+                continue;
             }
-
-            // prepare instance listener
-            for (TestIdentifier test : batchTests) {
-                mInstanceListerner.setTestInstances(test, getTestRunConfigs(test));
+            if (!mInstanceListerner.isPendingTestInstance(test, leadingTestConfig)) {
+                // select only compatible
+                continue;
             }
-
-            // run batch for all configurations
-            for (BatchRunConfiguration runConfig : allConfigs) {
-                final ArrayList<TestIdentifier> relevantTests =
-                        new ArrayList<>(TESTCASE_BATCH_LIMIT);
-
-                // run only for declared run configs and only if test has not already
-                // been attempted to run
-                for (TestIdentifier test : batchTests) {
-                    if (mInstanceListerner.isPendingTestInstance(test, runConfig)) {
-                        relevantTests.add(test);
-                    }
-                }
-
-                if (!relevantTests.isEmpty()) {
-                    runTestRunBatch(relevantTests, runConfig);
-                }
+            if (getTestInstabilityRating(test) != leadingInstability) {
+                // pack along only cases in the same stability category. Packing more dangerous
+                // tests along jeopardizes the stability of this run. Packing more stable tests
+                // along jeopardizes their stability rating.
+                continue;
             }
+            if (runBatch.tests.size() >= getBatchSizeLimitForInstability(leadingInstability)) {
+                // batch size is limited.
+                break;
+            }
+            runBatch.tests.add(test);
+        }
+
+        return runBatch;
+    }
+
+    private int getBatchNumPendingCases(TestBatch batch) {
+        int numPending = 0;
+        for (TestIdentifier test : batch.tests) {
+            if (mInstanceListerner.isPendingTestInstance(test, batch.config)) {
+                ++numPending;
+            }
+        }
+        return numPending;
+    }
+
+    private int getBatchSizeLimitForInstability(int batchInstabilityRating) {
+        // reduce group size exponentially down to one
+        return Math.max(1, TESTCASE_BATCH_LIMIT / (1 << batchInstabilityRating));
+    }
+
+    private int getTestInstabilityRating(TestIdentifier testId) {
+        if (mTestInstabilityRatings.containsKey(testId)) {
+            return mTestInstabilityRatings.get(testId);
+        } else {
+            return 0;
         }
     }
 
-    private void runTestRunBatch(Collection<TestIdentifier> tests, BatchRunConfiguration runConfig)
-            throws DeviceNotAvailableException, CapabilityQueryFailureException {
-        boolean isSupportedConfig = true;
+    private void recordTestInstability(TestIdentifier testId) {
+        mTestInstabilityRatings.put(testId, getTestInstabilityRating(testId) + 1);
+    }
 
+    private void clearTestInstability(TestIdentifier testId) {
+        mTestInstabilityRatings.put(testId, 0);
+    }
+
+    /**
+     * Executes all tests on the device.
+     */
+    private void runTests() throws DeviceNotAvailableException, CapabilityQueryFailureException {
+        for (;;) {
+            TestBatch batch = selectRunBatch();
+
+            if (batch == null) {
+                break;
+            }
+
+            runTestRunBatch(batch);
+        }
+    }
+
+    /**
+     * Runs a TestBatch by either faking it or executing it on a device.
+     */
+    private void runTestRunBatch(TestBatch batch) throws DeviceNotAvailableException,
+            CapabilityQueryFailureException {
+        // prepare instance listener
+        mInstanceListerner.setCurrentConfig(batch.config);
+        for (TestIdentifier test : batch.tests) {
+            mInstanceListerner.setTestInstances(test, getTestRunConfigs(test));
+        }
+
+        // execute only if config is executable, else fake results
+        if (isSupportedRunConfiguration(batch.config)) {
+            executeTestRunBatch(batch);
+        } else {
+            fakePassTestRunBatch(batch);
+        }
+    }
+
+    private boolean isSupportedRunConfiguration(BatchRunConfiguration runConfig)
+            throws DeviceNotAvailableException, CapabilityQueryFailureException {
         // orientation support
         if (!BatchRunConfiguration.ROTATION_UNSPECIFIED.equals(runConfig.getRotation())) {
             final Set<String> features = getDeviceFeatures(mDevice);
 
             if (isPortraitClassRotation(runConfig.getRotation()) &&
                     !features.contains(FEATURE_PORTRAIT)) {
-                isSupportedConfig = false;
+                return false;
             }
             if (isLandscapeClassRotation(runConfig.getRotation()) &&
                     !features.contains(FEATURE_LANDSCAPE)) {
-                isSupportedConfig = false;
+                return false;
             }
         }
 
-        // renderability support for OpenGL ES tests
-        if (isSupportedConfig && isOpenGlEsPackage()) {
-            isSupportedConfig = isSupportedGlesRenderConfig(runConfig);
-        }
-
-        mInstanceListerner.setCurrentConfig(runConfig);
-
-        // execute only if config is executable, else fake results
-        if (isSupportedConfig) {
-            executeTestRunBatch(tests, runConfig);
+        if (isOpenGlEsPackage()) {
+            // renderability support for OpenGL ES tests
+            return isSupportedGlesRenderConfig(runConfig);
         } else {
-            fakePassTestRunBatch(tests, runConfig);
+            return true;
         }
     }
 
@@ -1186,9 +1349,68 @@
         }
     }
 
-    private void executeTestRunBatch(Collection<TestIdentifier> tests,
-            BatchRunConfiguration runConfig) throws DeviceNotAvailableException {
-        final String testCases = generateTestCaseTrie(tests);
+    /**
+     * Executes given test batch on a device
+     */
+    private void executeTestRunBatch(TestBatch batch) throws DeviceNotAvailableException {
+        // attempt full run once
+        executeTestRunBatchRun(batch);
+
+        // split remaining tests to two sub batches and execute both. This will terminate
+        // since executeTestRunBatchRun will always progress for a batch of size 1.
+        final ArrayList<TestIdentifier> pendingTests = new ArrayList<>();
+
+        for (TestIdentifier test : batch.tests) {
+            if (mInstanceListerner.isPendingTestInstance(test, batch.config)) {
+                pendingTests.add(test);
+            }
+        }
+
+        final int divisorNdx = pendingTests.size() / 2;
+        final List<TestIdentifier> headList = pendingTests.subList(0, divisorNdx);
+        final List<TestIdentifier> tailList = pendingTests.subList(divisorNdx, pendingTests.size());
+
+        // head
+        for (;;) {
+            TestBatch subBatch = selectRunBatch(headList, batch.config);
+
+            if (subBatch == null) {
+                break;
+            }
+
+            executeTestRunBatch(subBatch);
+        }
+
+        // tail
+        for (;;) {
+            TestBatch subBatch = selectRunBatch(tailList, batch.config);
+
+            if (subBatch == null) {
+                break;
+            }
+
+            executeTestRunBatch(subBatch);
+        }
+
+        if (getBatchNumPendingCases(batch) != 0) {
+            throw new AssertionError("executeTestRunBatch postcondition failed");
+        }
+    }
+
+    /**
+     * Runs one execution pass over the given batch.
+     *
+     * Tries to run the batch. Always makes progress (executes instances or modifies stability
+     * scores).
+     */
+    private void executeTestRunBatchRun(TestBatch batch) throws DeviceNotAvailableException {
+        if (getBatchNumPendingCases(batch) != batch.tests.size()) {
+            throw new AssertionError("executeTestRunBatchRun precondition failed");
+        }
+
+        checkInterrupted(); // throws if interrupted
+
+        final String testCases = generateTestCaseTrie(batch.tests);
 
         mDevice.executeShellCommand("rm " + CASE_LIST_FILE_NAME);
         mDevice.executeShellCommand("rm " + LOG_FILE_NAME);
@@ -1201,7 +1423,7 @@
         deqpCmdLine.append("--deqp-caselist-file=");
         deqpCmdLine.append(CASE_LIST_FILE_NAME);
         deqpCmdLine.append(" ");
-        deqpCmdLine.append(getRunConfigDisplayCmdLine(runConfig));
+        deqpCmdLine.append(getRunConfigDisplayCmdLine(batch.config));
 
         // If we are not logging data, do not bother outputting the images from the test exe.
         if (!mLogData) {
@@ -1228,40 +1450,88 @@
             parser.flush();
         }
 
-        try {
-            final boolean progressedSinceLastCall =
-                    mInstanceListerner.getCurrentTestId() != null ||
-                    getNumRemainingInstances() < numRemainingInstancesBefore;
+        final boolean progressedSinceLastCall = mInstanceListerner.getCurrentTestId() != null ||
+                getNumRemainingInstances() < numRemainingInstancesBefore;
 
-            if (progressedSinceLastCall) {
-                mDeviceRecovery.onExecutionProgressed();
-            }
+        if (progressedSinceLastCall) {
+            mDeviceRecovery.onExecutionProgressed();
+        }
 
-            if (interruptingError == null) {
-                // execution finished successfully, do nothing
-            } else if (interruptingError instanceof AdbComLinkOpenError) {
+        // interrupted, try to recover
+        if (interruptingError != null) {
+            if (interruptingError instanceof AdbComLinkOpenError) {
                 mDeviceRecovery.recoverConnectionRefused();
             } else if (interruptingError instanceof AdbComLinkKilledError) {
                 mDeviceRecovery.recoverComLinkKilled();
+            } else if (interruptingError instanceof RunInterruptedException) {
+                // external run interruption request. Terminate immediately.
+                throw (RunInterruptedException)interruptingError;
             } else {
                 CLog.e(interruptingError);
                 throw new RuntimeException(interruptingError);
             }
-        } catch (DeviceNotAvailableException ex) {
-            // Device lost. We must signal the tradedef by rethrowing this execption. However,
-            // there is a possiblity that the device loss was caused by the currently run test
-            // instance. Since CtsTest is unaware of tests with only some instances executed,
-            // continuing the session after device has recovered will create a new DeqpTestRunner
-            // with current test in its run queue and this will cause the re-execution of this same
-            // instance. If the instance reliably can kill the device, the CTS cannot recover.
-            //
-            // Prevent this by terminating ALL instances of a tests if any of them causes a device
-            // loss.
-            mInstanceListerner.onDeviceLost();
-            throw ex;
-        } finally {
-            mInstanceListerner.endBatch();
+
+            // recoverXXX did not throw => recovery succeeded
+        } else if (!parser.wasSuccessful()) {
+            mDeviceRecovery.recoverComLinkKilled();
+            // recoverXXX did not throw => recovery succeeded
         }
+
+        // Progress guarantees.
+        if (batch.tests.size() == 1) {
+            final TestIdentifier onlyTest = batch.tests.iterator().next();
+            final boolean wasTestExecuted =
+                    !mInstanceListerner.isPendingTestInstance(onlyTest, batch.config) &&
+                    mInstanceListerner.getCurrentTestId() == null;
+            final boolean wasLinkFailure = !parser.wasSuccessful() || interruptingError != null;
+
+            // Link failures can be caused by external events, require at least two observations
+            // until bailing.
+            if (!wasTestExecuted && (!wasLinkFailure || getTestInstabilityRating(onlyTest) > 0)) {
+                recordTestInstability(onlyTest);
+                // If we cannot finish the test, mark the case as a crash.
+                //
+                // If we couldn't even start the test, fail the test instance as non-executable.
+                // This is required so that a consistently crashing or non-existent tests will
+                // not cause futile (non-terminating) re-execution attempts.
+                if (mInstanceListerner.getCurrentTestId() != null) {
+                    mInstanceListerner.abortTest(onlyTest, INCOMPLETE_LOG_MESSAGE);
+                } else {
+                    mInstanceListerner.abortTest(onlyTest, NOT_EXECUTABLE_LOG_MESSAGE);
+                }
+            } else if (wasTestExecuted) {
+                clearTestInstability(onlyTest);
+            }
+        }
+        else
+        {
+            // Analyze results to update test stability ratings. If there is no interrupting test
+            // logged, increase instability rating of all remaining tests. If there is a
+            // interrupting test logged, increase only its instability rating.
+            //
+            // A successful run of tests clears instability rating.
+            if (mInstanceListerner.getCurrentTestId() == null) {
+                for (TestIdentifier test : batch.tests) {
+                    if (mInstanceListerner.isPendingTestInstance(test, batch.config)) {
+                        recordTestInstability(test);
+                    } else {
+                        clearTestInstability(test);
+                    }
+                }
+            } else {
+                recordTestInstability(mInstanceListerner.getCurrentTestId());
+                for (TestIdentifier test : batch.tests) {
+                    // \note: isPendingTestInstance is false for getCurrentTestId. Current ID is
+                    // considered 'running' and will be restored to 'pending' in endBatch().
+                    if (!test.equals(mInstanceListerner.getCurrentTestId()) &&
+                            !mInstanceListerner.isPendingTestInstance(test, batch.config)) {
+                        clearTestInstability(test);
+                    }
+                }
+            }
+        }
+
+        mInstanceListerner.endBatch();
     }
 
     private static String getRunConfigDisplayCmdLine(BatchRunConfiguration runConfig) {
@@ -1303,13 +1573,21 @@
     }
 
     /**
+     * Checks if this execution has been marked as interrupted and throws if it has.
+     */
+    private void checkInterrupted() throws RunInterruptedException {
+        // Work around the API. RunUtil::checkInterrupted is private but we can call it indirectly
+        // by sleeping a value <= 0.
+        mRunUtil.sleep(0);
+    }
+
+    /**
      * Pass given batch tests without running it
      */
-    private void fakePassTestRunBatch(Collection<TestIdentifier> tests,
-            BatchRunConfiguration runConfig) {
-        for (TestIdentifier test : tests) {
+    private void fakePassTestRunBatch(TestBatch batch) {
+        for (TestIdentifier test : batch.tests) {
             CLog.d("Skipping test '%s' invocation in config '%s'", test.toString(),
-                    runConfig.getId());
+                    batch.config.getId());
             mInstanceListerner.skipTest(test);
         }
     }
@@ -1543,6 +1821,7 @@
                 installTestApk();
 
                 mInstanceListerner.setSink(listener);
+                mDeviceRecovery.setDevice(mDevice);
                 runTests();
 
                 uninstallTestApk();
diff --git a/tools/tradefed-host/tests/src/com/android/cts/tradefed/testtype/DeqpTestRunnerTest.java b/tools/tradefed-host/tests/src/com/android/cts/tradefed/testtype/DeqpTestRunnerTest.java
index 5a05049..7ec09c9 100644
--- a/tools/tradefed-host/tests/src/com/android/cts/tradefed/testtype/DeqpTestRunnerTest.java
+++ b/tools/tradefed-host/tests/src/com/android/cts/tradefed/testtype/DeqpTestRunnerTest.java
@@ -27,6 +27,8 @@
 import com.android.tradefed.device.ITestDevice;
 import com.android.tradefed.result.ITestInvocationListener;
 import com.android.tradefed.testtype.IAbi;
+import com.android.tradefed.util.IRunUtil;
+import com.android.tradefed.util.RunInterruptedException;
 
 import junit.framework.TestCase;
 
@@ -71,6 +73,43 @@
         DEFAULT_INSTANCE_ARGS.iterator().next().put("surfacetype", "window");
     }
 
+    private static class StubRecovery implements DeqpTestRunner.IRecovery {
+        /**
+         * {@inheritDoc}
+         */
+        @Override
+        public void setSleepProvider(DeqpTestRunner.ISleepProvider sleepProvider) {
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        @Override
+        public void setDevice(ITestDevice device) {
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        @Override
+        public void onExecutionProgressed() {
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        @Override
+        public void recoverConnectionRefused() throws DeviceNotAvailableException {
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        @Override
+        public void recoverComLinkKilled() throws DeviceNotAvailableException {
+        }
+    };
+
     /**
      * {@inheritDoc}
      */
@@ -512,6 +551,125 @@
     }
 
     /**
+     * Test running a unexecutable test.
+     */
+    public void testRun_unexecutableTests() throws Exception {
+        final String instrumentationAnswerNoExecs =
+                "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseName\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=2014.x\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseId\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=0xcafebabe\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=targetName\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=android\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginSession\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=EndSession\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_CODE: 0\r\n";
+
+        final TestIdentifier[] testIds = {
+                new TestIdentifier("dEQP-GLES3.missing", "no"),
+                new TestIdentifier("dEQP-GLES3.missing", "nope"),
+                new TestIdentifier("dEQP-GLES3.missing", "donotwant"),
+        };
+
+        final String[] testPaths = {
+                "dEQP-GLES3.missing.no",
+                "dEQP-GLES3.missing.nope",
+                "dEQP-GLES3.missing.donotwant",
+        };
+
+        ITestDevice mockDevice = EasyMock.createMock(ITestDevice.class);
+        ITestInvocationListener mockListener
+                = EasyMock.createStrictMock(ITestInvocationListener.class);
+        IDevice mockIDevice = EasyMock.createMock(IDevice.class);
+
+        Collection<TestIdentifier> tests = new ArrayList<TestIdentifier>();
+        Map<TestIdentifier, List<Map<String, String>>> instances = new HashMap<>();
+
+        for (TestIdentifier id : testIds) {
+            tests.add(id);
+            instances.put(id, DEFAULT_INSTANCE_ARGS);
+        }
+
+        DeqpTestRunner deqpTest = new DeqpTestRunner(NAME, NAME, tests, instances);
+        deqpTest.setAbi(UnitTests.ABI);
+
+        int version = 3 << 16;
+        EasyMock.expect(mockDevice.getProperty("ro.opengles.version"))
+                .andReturn(Integer.toString(version)).atLeastOnce();
+
+        EasyMock.expect(mockDevice.uninstallPackage(EasyMock.eq(DEQP_ONDEVICE_PKG))).andReturn("")
+                .once();
+        EasyMock.expect(mockDevice.installPackage(EasyMock.<File>anyObject(),
+                EasyMock.eq(true), EasyMock.eq(AbiUtils.createAbiFlag(UnitTests.ABI.getName()))))
+                .andReturn(null).once();
+
+        expectRenderConfigQuery(mockDevice, 3, 0);
+
+        String commandLine = String.format(
+                "--deqp-caselist-file=%s --deqp-gl-config-name=rgba8888d24s8 "
+                + "--deqp-screen-rotation=unspecified "
+                + "--deqp-surface-type=window "
+                + "--deqp-log-images=disable "
+                + "--deqp-watchdog=enable",
+                CASE_LIST_FILE_NAME);
+
+        // first try
+        runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+                "{dEQP-GLES3{missing{no,nope,donotwant}}}", commandLine, instrumentationAnswerNoExecs);
+
+        // splitting begins
+        runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+                "{dEQP-GLES3{missing{no}}}", commandLine, instrumentationAnswerNoExecs);
+        runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+                "{dEQP-GLES3{missing{nope,donotwant}}}", commandLine, instrumentationAnswerNoExecs);
+        runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+                "{dEQP-GLES3{missing{nope}}}", commandLine, instrumentationAnswerNoExecs);
+        runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+                "{dEQP-GLES3{missing{donotwant}}}", commandLine, instrumentationAnswerNoExecs);
+
+        mockListener.testRunStarted(ID, testPaths.length);
+        EasyMock.expectLastCall().once();
+
+        for (int i = 0; i < testPaths.length; i++) {
+            mockListener.testStarted(EasyMock.eq(testIds[i]));
+            EasyMock.expectLastCall().once();
+
+            mockListener.testFailed(EasyMock.eq(testIds[i]),
+                    EasyMock.eq("=== with config {glformat=rgba8888d24s8,rotation=unspecified,surfacetype=window} ===\n"
+                    + "Abort: Test cannot be executed"));
+            EasyMock.expectLastCall().once();
+
+            mockListener.testEnded(EasyMock.eq(testIds[i]),
+                    EasyMock.<Map<String, String>>notNull());
+            EasyMock.expectLastCall().once();
+        }
+
+        mockListener.testRunEnded(EasyMock.anyLong(), EasyMock.<Map<String, String>>notNull());
+        EasyMock.expectLastCall().once();
+
+        EasyMock.expect(mockDevice.uninstallPackage(EasyMock.eq(DEQP_ONDEVICE_PKG))).andReturn("")
+                .once();
+
+        EasyMock.replay(mockDevice, mockIDevice);
+        EasyMock.replay(mockListener);
+
+        deqpTest.setDevice(mockDevice);
+        deqpTest.setBuildHelper(new StubCtsBuildHelper());
+        deqpTest.run(mockListener);
+
+        EasyMock.verify(mockListener);
+        EasyMock.verify(mockDevice, mockIDevice);
+    }
+
+    /**
      * Test that test are left unexecuted if pm list query fails
      */
     public void testRun_queryPmListFailure()
@@ -1011,7 +1169,7 @@
      * Test dEQP with multiple instances
      */
     public void testRun_multipleInstances() throws Exception {
-        final String instrumentationAnswerConfigA =
+        final String instrumentationAnswerConfigAPass1 =
                 "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseName\r\n"
                 + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
                 + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=2014.x\r\n"
@@ -1042,10 +1200,30 @@
                 + "INSTRUMENTATION_STATUS: dEQP-TestCaseResult-Details=Pass\r\n"
                 + "INSTRUMENTATION_STATUS: dEQP-EventType=TestCaseResult\r\n"
                 + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=EndTestCase\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
                 + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginTestCase\r\n"
                 + "INSTRUMENTATION_STATUS: dEQP-BeginTestCase-TestCasePath=dEQP-GLES3.instances.crashtwo\r\n"
                 + "INSTRUMENTATION_STATUS_CODE: 0\r\n"; // early eof
-        final String instrumentationAnswerConfigB =
+        final String instrumentationAnswerConfigAPass2 =
+                "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseName\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=2014.x\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseId\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=0xcafebabe\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=targetName\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=android\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginSession\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginTestCase\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-BeginTestCase-TestCasePath=dEQP-GLES3.instances.crashtwo\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"; // early eof
+        final String instrumentationAnswerConfigBPass1 =
                 "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseName\r\n"
                 + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
                 + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=2014.x\r\n"
@@ -1070,12 +1248,6 @@
                 + "INSTRUMENTATION_STATUS: dEQP-EventType=EndTestCase\r\n"
                 + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
                 + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginTestCase\r\n"
-                + "INSTRUMENTATION_STATUS: dEQP-BeginTestCase-TestCasePath=dEQP-GLES3.instances.crashtwo\r\n"
-                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
-                + "INSTRUMENTATION_STATUS: dEQP-TerminateTestCase-Reason=Magic\r\n"
-                + "INSTRUMENTATION_STATUS: dEQP-EventType=TerminateTestCase\r\n"
-                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
-                + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginTestCase\r\n"
                 + "INSTRUMENTATION_STATUS: dEQP-BeginTestCase-TestCasePath=dEQP-GLES3.instances.skipone\r\n"
                 + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
                 + "INSTRUMENTATION_STATUS: dEQP-TestCaseResult-Code=Pass\r\n"
@@ -1087,7 +1259,31 @@
                 + "INSTRUMENTATION_STATUS: dEQP-EventType=EndSession\r\n"
                 + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
                 + "INSTRUMENTATION_CODE: 0\r\n";
-        final String instrumentationAnswerConfigC =
+        final String instrumentationAnswerConfigBPass2 =
+                "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseName\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=2014.x\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseId\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=0xcafebabe\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=targetName\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=android\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginSession\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginTestCase\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-BeginTestCase-TestCasePath=dEQP-GLES3.instances.crashtwo\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-TerminateTestCase-Reason=Magic\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=TerminateTestCase\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=EndSession\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_CODE: 0\r\n";
+        final String instrumentationAnswerConfigCPass1 =
                 "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseName\r\n"
                 + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
                 + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=2014.x\r\n"
@@ -1111,6 +1307,24 @@
                 + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
                 + "INSTRUMENTATION_STATUS: dEQP-EventType=EndTestCase\r\n"
                 + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=EndSession\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_CODE: 0\r\n";
+        final String instrumentationAnswerConfigCPass2 =
+                "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseName\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=2014.x\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseId\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=0xcafebabe\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=targetName\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=android\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginSession\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
                 + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginTestCase\r\n"
                 + "INSTRUMENTATION_STATUS: dEQP-BeginTestCase-TestCasePath=dEQP-GLES3.instances.crashtwo\r\n"
                 + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
@@ -1186,10 +1400,11 @@
             tests.add(id);
         }
 
-        ITestDevice mockDevice = EasyMock.createMock(ITestDevice.class);
         ITestInvocationListener mockListener
                 = EasyMock.createStrictMock(ITestInvocationListener.class);
-        IDevice mockIDevice = EasyMock.createMock(IDevice.class);
+        IMocksControl orderedControl = EasyMock.createStrictControl();
+        ITestDevice mockDevice = orderedControl.createMock(ITestDevice.class);
+        IDevice mockIDevice = orderedControl.createMock(IDevice.class);
 
         DeqpTestRunner deqpTest = new DeqpTestRunner(NAME, NAME, tests, instances);
         deqpTest.setAbi(UnitTests.ABI);
@@ -1199,8 +1414,6 @@
         int version = 3 << 16;
         EasyMock.expect(mockDevice.getProperty("ro.opengles.version"))
                 .andReturn(Integer.toString(version)).atLeastOnce();
-        EasyMock.expect(mockDevice.executeShellCommand("pm list features")).andReturn(ALL_FEATURES)
-                .anyTimes();
 
         EasyMock.expect(mockDevice.uninstallPackage(EasyMock.eq(DEQP_ONDEVICE_PKG))).
             andReturn("").once();
@@ -1218,7 +1431,7 @@
                 + "--deqp-gl-major-version=3 "
                 + "--deqp-gl-minor-version=0", "Yes");
 
-        // run config A
+        // run config A - first pass
         runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
                 "{dEQP-GLES3{instances{passall,failone,crashtwo}}}",
                 "--deqp-caselist-file=" + CASE_LIST_FILE_NAME
@@ -1226,9 +1439,23 @@
                 + "--deqp-screen-rotation=unspecified "
                 + "--deqp-surface-type=window "
                 + "--deqp-log-images=disable "
-                + "--deqp-watchdog=enable", instrumentationAnswerConfigA);
+                + "--deqp-watchdog=enable", instrumentationAnswerConfigAPass1);
+
+        // run config A - second pass
+        runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+                "{dEQP-GLES3{instances{crashtwo}}}",
+                "--deqp-caselist-file=" + CASE_LIST_FILE_NAME
+                + " --deqp-gl-config-name=rgba8888d24s8 "
+                + "--deqp-screen-rotation=unspecified "
+                + "--deqp-surface-type=window "
+                + "--deqp-log-images=disable "
+                + "--deqp-watchdog=enable", instrumentationAnswerConfigAPass2);
 
         // query for config B
+
+        EasyMock.expect(mockDevice.executeShellCommand("pm list features")).andReturn(ALL_FEATURES)
+                .once();
+
         expectRenderConfigQueryAndReturn(mockDevice,
                 "--deqp-gl-config-name=rgba8888d24s8 "
                 + "--deqp-screen-rotation=90 "
@@ -1236,15 +1463,15 @@
                 + "--deqp-gl-major-version=3 "
                 + "--deqp-gl-minor-version=0", "Yes");
 
-        // run for config B
+        // run for config B - first pass
         runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
-                "{dEQP-GLES3{instances{passall,crashtwo,skipone}}}",
+                "{dEQP-GLES3{instances{passall,skipone}}}",
                 "--deqp-caselist-file=" + CASE_LIST_FILE_NAME
                 + " --deqp-gl-config-name=rgba8888d24s8 "
                 + "--deqp-screen-rotation=90 "
                 + "--deqp-surface-type=window "
                 + "--deqp-log-images=disable "
-                + "--deqp-watchdog=enable", instrumentationAnswerConfigB);
+                + "--deqp-watchdog=enable", instrumentationAnswerConfigBPass1);
 
         // query for config C
         expectRenderConfigQueryAndReturn(mockDevice,
@@ -1254,15 +1481,35 @@
                 + "--deqp-gl-major-version=3 "
                 + "--deqp-gl-minor-version=0", "Yes");
 
-        // run for config C
+        // run for config C - first pass
         runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
-                "{dEQP-GLES3{instances{failone,crashtwo}}}",
+                "{dEQP-GLES3{instances{failone}}}",
                 "--deqp-caselist-file=" + CASE_LIST_FILE_NAME
                 + " --deqp-gl-config-name=rgba8888d24s8 "
                 + "--deqp-screen-rotation=180 "
                 + "--deqp-surface-type=window "
                 + "--deqp-log-images=disable "
-                + "--deqp-watchdog=enable" , instrumentationAnswerConfigC);
+                + "--deqp-watchdog=enable", instrumentationAnswerConfigCPass1);
+
+        // run for config C - second pass
+        runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+                "{dEQP-GLES3{instances{crashtwo}}}",
+                "--deqp-caselist-file=" + CASE_LIST_FILE_NAME
+                + " --deqp-gl-config-name=rgba8888d24s8 "
+                + "--deqp-screen-rotation=180 "
+                + "--deqp-surface-type=window "
+                + "--deqp-log-images=disable "
+                + "--deqp-watchdog=enable", instrumentationAnswerConfigCPass2);
+
+        // run for config B - second pass (crashtwo has been deferred due to its instability)
+        runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+                "{dEQP-GLES3{instances{crashtwo}}}",
+                "--deqp-caselist-file=" + CASE_LIST_FILE_NAME
+                + " --deqp-gl-config-name=rgba8888d24s8 "
+                + "--deqp-screen-rotation=90 "
+                + "--deqp-surface-type=window "
+                + "--deqp-log-images=disable "
+                + "--deqp-watchdog=enable", instrumentationAnswerConfigBPass2);
 
         // query for unsupported config
         expectRenderConfigQueryAndReturn(mockDevice,
@@ -1321,11 +1568,12 @@
         mockListener.testRunEnded(EasyMock.anyLong(), EasyMock.<Map<String, String>>notNull());
         EasyMock.expectLastCall().once();
 
-        EasyMock.replay(mockDevice, mockIDevice);
+        orderedControl.replay();
         EasyMock.replay(mockListener);
+        deqpTest.setRecovery(new StubRecovery());
         deqpTest.run(mockListener);
         EasyMock.verify(mockListener);
-        EasyMock.verify(mockDevice, mockIDevice);
+        orderedControl.verify();
     }
 
     private void testMultipleInstancesLossOfDeviceMidInstance(final boolean recoverySuccessful)
@@ -1377,7 +1625,6 @@
                 + "INSTRUMENTATION_STATUS_CODE: 0\r\n"; // early <EOF>
 
         final TestIdentifier testId = new TestIdentifier("dEQP-GLES3.loss", "instance");
-        final String testPath = "dEQP-GLES3.loss.instance";
 
         Map<String,String> supportedConfigA = new HashMap<>();
         supportedConfigA.put("glconfig", "rgba8888d24s8");
@@ -1512,6 +1759,16 @@
             mockRecovery.recoverComLinkKilled();
             EasyMock.expectLastCall().once();
 
+            // retry running config B
+            runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+                    "{dEQP-GLES3{loss{instance}}}",
+                    "--deqp-caselist-file=" + CASE_LIST_FILE_NAME
+                    + " --deqp-gl-config-name=rgba8888d24s8 "
+                    + "--deqp-screen-rotation=90 "
+                    + "--deqp-surface-type=window "
+                    + "--deqp-log-images=disable "
+                    + "--deqp-watchdog=enable", instrumentationAnswerFine);
+
             // query config C
             expectRenderConfigQueryAndReturn(mockDevice,
                     "--deqp-gl-config-name=rgba8888d24s8 "
@@ -1534,20 +1791,14 @@
         mockListener.testRunStarted(ID, 1);
         EasyMock.expectLastCall().once();
 
-        mockListener.testStarted(EasyMock.eq(testId));
-        EasyMock.expectLastCall().once();
-
-        final String crashDescription = (recoverySuccessful) ? ("Incomplete test log") : ("Device lost");
-        mockListener.testFailed(testId,
-                "=== with config {glformat=rgba8888d24s8,rotation=90,surfacetype=window} ===\n"
-                + "Crash: " + crashDescription);
-        EasyMock.expectLastCall().once();
-
-        mockListener.testEnded(EasyMock.eq(testId), EasyMock.<Map<String, String>>notNull());
-        EasyMock.expectLastCall().once();
-
-        // run is ended successfully only if device is available
+        // result is reported only if device is available
         if (recoverySuccessful) {
+            mockListener.testStarted(EasyMock.eq(testId));
+            EasyMock.expectLastCall().once();
+
+            mockListener.testEnded(EasyMock.eq(testId), EasyMock.<Map<String, String>>notNull());
+            EasyMock.expectLastCall().once();
+
             mockListener.testRunEnded(EasyMock.anyLong(), EasyMock.<Map<String, String>>notNull());
             EasyMock.expectLastCall().once();
         }
@@ -1620,15 +1871,20 @@
         EasyMock.expectLastCall().once();
     }
 
-    private void setRecoveryExpectationKillProcess(RecoverableTestDevice mockDevice)
-            throws DeviceNotAvailableException {
+    private void setRecoveryExpectationKillProcess(RecoverableTestDevice mockDevice,
+            DeqpTestRunner.ISleepProvider mockSleepProvider) throws DeviceNotAvailableException {
         EasyMock.expect(mockDevice.executeShellCommand(EasyMock.contains("ps"))).
                 andReturn("root 1234 com.drawelement.deqp").once();
 
         EasyMock.expect(mockDevice.executeShellCommand(EasyMock.eq("kill -9 1234"))).
                 andReturn("").once();
-    }
 
+        // Recovery checks if kill failed
+        mockSleepProvider.sleep(EasyMock.gt(0));
+        EasyMock.expectLastCall().once();
+        EasyMock.expect(mockDevice.executeShellCommand(EasyMock.contains("ps"))).
+                andReturn("").once();
+    }
 
     private void setRecoveryExpectationRecovery(RecoverableTestDevice mockDevice)
             throws DeviceNotAvailableException {
@@ -1664,11 +1920,11 @@
         switch (numConsecutiveErrors) {
             case 0:
                 setRecoveryExpectationWait(mockSleepProvider);
-                setRecoveryExpectationKillProcess(mockDevice);
+                setRecoveryExpectationKillProcess(mockDevice, mockSleepProvider);
                 return 1;
             case 1:
                 setRecoveryExpectationRecovery(mockDevice);
-                setRecoveryExpectationKillProcess(mockDevice);
+                setRecoveryExpectationKillProcess(mockDevice, mockSleepProvider);
                 return 2;
             case 2:
                 setRecoveryExpectationReboot(mockDevice);
@@ -1839,6 +2095,223 @@
                 RecoveryEvent.PROGRESS);
     }
 
+    /**
+     * Test recovery if process cannot be killed
+     */
+    public void testRecovery_unkillableProcess () throws Exception {
+        DeqpTestRunner.Recovery recovery = new DeqpTestRunner.Recovery();
+        IMocksControl orderedControl = EasyMock.createStrictControl();
+        RecoverableTestDevice mockDevice = orderedControl.createMock(RecoverableTestDevice.class);
+        DeqpTestRunner.ISleepProvider mockSleepProvider =
+                orderedControl.createMock(DeqpTestRunner.ISleepProvider.class);
+
+        // recovery attempts to kill the process after a timeout
+        mockSleepProvider.sleep(EasyMock.gt(0));
+        EasyMock.expect(mockDevice.executeShellCommand(EasyMock.contains("ps"))).
+                andReturn("root 1234 com.drawelement.deqp").once();
+        EasyMock.expect(mockDevice.executeShellCommand(EasyMock.eq("kill -9 1234"))).
+                andReturn("").once();
+
+        // Recovery checks if kill failed
+        mockSleepProvider.sleep(EasyMock.gt(0));
+        EasyMock.expectLastCall().once();
+        EasyMock.expect(mockDevice.executeShellCommand(EasyMock.contains("ps"))).
+                andReturn("root 1234 com.drawelement.deqp").once();
+
+        // Recovery resets the connection
+        mockDevice.recoverDevice();
+        EasyMock.expectLastCall().once();
+
+        // and attempts to kill the process again
+        EasyMock.expect(mockDevice.executeShellCommand(EasyMock.contains("ps"))).
+                andReturn("root 1234 com.drawelement.deqp").once();
+        EasyMock.expect(mockDevice.executeShellCommand(EasyMock.eq("kill -9 1234"))).
+                andReturn("").once();
+
+        // Recovery checks if kill failed
+        mockSleepProvider.sleep(EasyMock.gt(0));
+        EasyMock.expectLastCall().once();
+        EasyMock.expect(mockDevice.executeShellCommand(EasyMock.contains("ps"))).
+                andReturn("root 1234 com.drawelement.deqp").once();
+
+        // recovery reboots the device
+        mockDevice.reboot();
+        EasyMock.expectLastCall().once();
+
+        orderedControl.replay();
+        recovery.setDevice(mockDevice);
+        recovery.setSleepProvider(mockSleepProvider);
+        recovery.recoverComLinkKilled();
+        orderedControl.verify();
+    }
+
+    /**
+     * Test external interruption before batch run.
+     */
+    public void testInterrupt_killBeforeBatch() throws Exception {
+        final TestIdentifier testId = new TestIdentifier("dEQP-GLES3.interrupt", "test");
+
+        Collection<TestIdentifier> tests = new ArrayList<TestIdentifier>();
+        tests.add(testId);
+
+        Map<TestIdentifier, List<Map<String, String>>> instance = new HashMap<>();
+        instance.put(testId, DEFAULT_INSTANCE_ARGS);
+
+        ITestInvocationListener mockListener
+                = EasyMock.createStrictMock(ITestInvocationListener.class);
+        ITestDevice mockDevice = EasyMock.createMock(ITestDevice.class);
+        IDevice mockIDevice = EasyMock.createMock(IDevice.class);
+        IRunUtil mockRunUtil = EasyMock.createMock(IRunUtil.class);
+
+        DeqpTestRunner deqpTest = new DeqpTestRunner(NAME, NAME, tests, instance);
+        deqpTest.setAbi(UnitTests.ABI);
+        deqpTest.setDevice(mockDevice);
+        deqpTest.setBuildHelper(new StubCtsBuildHelper());
+        deqpTest.setRunUtil(mockRunUtil);
+
+        int version = 3 << 16;
+        EasyMock.expect(mockDevice.getProperty("ro.opengles.version"))
+                .andReturn(Integer.toString(version)).atLeastOnce();
+
+        EasyMock.expect(mockDevice.uninstallPackage(EasyMock.eq(DEQP_ONDEVICE_PKG))).
+            andReturn("").once();
+
+        EasyMock.expect(mockDevice.installPackage(EasyMock.<File>anyObject(),
+                EasyMock.eq(true),
+                EasyMock.eq(AbiUtils.createAbiFlag(UnitTests.ABI.getName())))).andReturn(null)
+                .once();
+
+        expectRenderConfigQuery(mockDevice,
+                "--deqp-gl-config-name=rgba8888d24s8 --deqp-screen-rotation=unspecified "
+                + "--deqp-surface-type=window --deqp-gl-major-version=3 "
+                + "--deqp-gl-minor-version=0");
+
+        mockRunUtil.sleep(0);
+        EasyMock.expectLastCall().andThrow(new RunInterruptedException());
+
+        mockListener.testRunStarted(ID, 1);
+        EasyMock.expectLastCall().once();
+
+        EasyMock.replay(mockDevice, mockIDevice);
+        EasyMock.replay(mockListener);
+        EasyMock.replay(mockRunUtil);
+        try {
+            deqpTest.run(mockListener);
+            fail("expected RunInterruptedException");
+        } catch (RunInterruptedException ex) {
+            // expected
+        }
+        EasyMock.verify(mockRunUtil);
+        EasyMock.verify(mockListener);
+        EasyMock.verify(mockDevice, mockIDevice);
+    }
+
+    /**
+     * Test external interruption in testFailed().
+     */
+    public void testInterrupt_killReportTestFailed() throws Exception {
+        final TestIdentifier testId = new TestIdentifier("dEQP-GLES3.interrupt", "test");
+        final String testPath = "dEQP-GLES3.interrupt.test";
+        final String testTrie = "{dEQP-GLES3{interrupt{test}}}";
+        final String output = "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseName\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=2014.x\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseId\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=0xcafebabe\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=targetName\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=android\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginSession\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginTestCase\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-BeginTestCase-TestCasePath=" + testPath + "\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-TestCaseResult-Code=Fail\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-TestCaseResult-Details=Fail\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=TestCaseResult\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=EndTestCase\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_STATUS: dEQP-EventType=EndSession\r\n"
+                + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+                + "INSTRUMENTATION_CODE: 0\r\n";
+
+        Collection<TestIdentifier> tests = new ArrayList<TestIdentifier>();
+        tests.add(testId);
+
+        Map<TestIdentifier, List<Map<String, String>>> instance = new HashMap<>();
+        instance.put(testId, DEFAULT_INSTANCE_ARGS);
+
+        ITestInvocationListener mockListener
+                = EasyMock.createStrictMock(ITestInvocationListener.class);
+        ITestDevice mockDevice = EasyMock.createMock(ITestDevice.class);
+        IDevice mockIDevice = EasyMock.createMock(IDevice.class);
+        IRunUtil mockRunUtil = EasyMock.createMock(IRunUtil.class);
+
+        DeqpTestRunner deqpTest = new DeqpTestRunner(NAME, NAME, tests, instance);
+        deqpTest.setAbi(UnitTests.ABI);
+        deqpTest.setDevice(mockDevice);
+        deqpTest.setBuildHelper(new StubCtsBuildHelper());
+        deqpTest.setRunUtil(mockRunUtil);
+
+        int version = 3 << 16;
+        EasyMock.expect(mockDevice.getProperty("ro.opengles.version"))
+                .andReturn(Integer.toString(version)).atLeastOnce();
+
+        EasyMock.expect(mockDevice.uninstallPackage(EasyMock.eq(DEQP_ONDEVICE_PKG))).
+            andReturn("").once();
+
+        EasyMock.expect(mockDevice.installPackage(EasyMock.<File>anyObject(),
+                EasyMock.eq(true),
+                EasyMock.eq(AbiUtils.createAbiFlag(UnitTests.ABI.getName())))).andReturn(null)
+                .once();
+
+        expectRenderConfigQuery(mockDevice,
+                "--deqp-gl-config-name=rgba8888d24s8 --deqp-screen-rotation=unspecified "
+                + "--deqp-surface-type=window --deqp-gl-major-version=3 "
+                + "--deqp-gl-minor-version=0");
+
+        mockRunUtil.sleep(0);
+        EasyMock.expectLastCall().once();
+
+        String commandLine = String.format(
+                "--deqp-caselist-file=%s --deqp-gl-config-name=rgba8888d24s8 "
+                + "--deqp-screen-rotation=unspecified "
+                + "--deqp-surface-type=window "
+                + "--deqp-log-images=disable "
+                + "--deqp-watchdog=enable",
+                CASE_LIST_FILE_NAME);
+
+        runInstrumentationLineAndAnswer(mockDevice, mockIDevice, testTrie, commandLine,
+                output);
+
+        mockListener.testRunStarted(ID, 1);
+        EasyMock.expectLastCall().once();
+
+        mockListener.testStarted(EasyMock.eq(testId));
+        EasyMock.expectLastCall().once();
+
+        mockListener.testFailed(EasyMock.eq(testId), EasyMock.<String>notNull());
+        EasyMock.expectLastCall().andThrow(new RunInterruptedException());
+
+        EasyMock.replay(mockDevice, mockIDevice);
+        EasyMock.replay(mockListener);
+        EasyMock.replay(mockRunUtil);
+        try {
+            deqpTest.run(mockListener);
+            fail("expected RunInterruptedException");
+        } catch (RunInterruptedException ex) {
+            // expected
+        }
+        EasyMock.verify(mockRunUtil);
+        EasyMock.verify(mockListener);
+        EasyMock.verify(mockDevice, mockIDevice);
+    }
+
     private void runInstrumentationLineAndAnswer(ITestDevice mockDevice, IDevice mockIDevice,
             final String testTrie, final String cmd, final String output) throws Exception {
         EasyMock.expect(mockDevice.executeShellCommand(EasyMock.eq("rm " + CASE_LIST_FILE_NAME)))
diff --git a/tools/utils/buildCts.py b/tools/utils/buildCts.py
index 5ecede8..79a3354 100755
--- a/tools/utils/buildCts.py
+++ b/tools/utils/buildCts.py
@@ -267,6 +267,7 @@
     #                   different deqp sets in different plans.
     plan.ExcludeTests('com.drawelements.deqp.gles3', ReadFileLines(os.path.join(self.test_root, 'deqp/gles3-temporary-failures.txt')))
     plan.ExcludeTests('com.drawelements.deqp.gles31', ReadFileLines(os.path.join(self.test_root, 'deqp/gles31-temporary-failures.txt')))
+    plan.ExcludeTests('com.drawelements.deqp.egl', ReadFileLines(os.path.join(self.test_root, 'deqp/egl-temporary-failures.txt')))
     self.__WritePlan(plan, 'CTS-DEQP')
 
     # CTS - sub plan for new test packages added for staging