am 816391ba: Fix cts-tradefed\'s Java version check.

* commit '816391bab3bb68ebaefd6849f6a0e6678d0d8fe7':
  Fix cts-tradefed's Java version check.
diff --git a/CtsTestCaseList.mk b/CtsTestCaseList.mk
index 31dc2fd..80e1efc 100644
--- a/CtsTestCaseList.mk
+++ b/CtsTestCaseList.mk
@@ -171,6 +171,7 @@
     CtsAdbTests \
     CtsAppSecurityTests \
     CtsDevicePolicyManagerTestCases \
+    CtsDumpsysHostTestCases \
     CtsHostJank \
     CtsHostUi \
     CtsMonkeyTestCases \
diff --git a/apps/CameraITS/.gitignore b/apps/CameraITS/.gitignore
new file mode 100644
index 0000000..259969b
--- /dev/null
+++ b/apps/CameraITS/.gitignore
@@ -0,0 +1,11 @@
+# Ignore files that are created asa result of running the ITS tests.
+
+*.json
+*.yuv
+*.jpg
+*.jpeg
+*.png
+*.pyc
+its.target.cfg
+.DS_Store
+
diff --git a/apps/CameraITS/Android.mk b/apps/CameraITS/Android.mk
new file mode 100644
index 0000000..8f7ed7c
--- /dev/null
+++ b/apps/CameraITS/Android.mk
@@ -0,0 +1,31 @@
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+its-dir-name := CameraITS
+its-dir := $(HOST_OUT)/$(its-dir-name)
+its-build-stamp := $(its-dir)/build_stamp
+
+camera-its: $(its-build-stamp)
+
+.PHONY: camera-its
+
+$(its-dir): $(its-build-stamp)
+
+$(its-build-stamp): $(ACP)
+	echo $(its_dir)
+	mkdir -p $(its-dir)
+	$(ACP) -rfp cts/apps/$(its-dir-name)/* $(its-dir)
+	rm $(its-dir)/Android.mk
+	touch $@
diff --git a/apps/CameraITS/CameraITS.pdf b/apps/CameraITS/CameraITS.pdf
new file mode 100644
index 0000000..0d10bae
--- /dev/null
+++ b/apps/CameraITS/CameraITS.pdf
Binary files differ
diff --git a/apps/CameraITS/build/envsetup.sh b/apps/CameraITS/build/envsetup.sh
new file mode 100644
index 0000000..6069341
--- /dev/null
+++ b/apps/CameraITS/build/envsetup.sh
@@ -0,0 +1,45 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file should be sourced from bash. Sets environment variables for
+# running tests, and also checks that a number of dependences are present
+# and that the unit tests for the modules passed (indicating that the setup
+# is correct).
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] || \
+    { echo ">> Script must be sourced with 'source $0'" >&2; exit 1; }
+
+command -v adb >/dev/null 2>&1 || \
+    echo ">> Require adb executable to be in path" >&2
+
+command -v python >/dev/null 2>&1 || \
+    echo ">> Require python executable to be in path" >&2
+
+python -V 2>&1 | grep -q "Python 2.7" || \
+    echo ">> Require python 2.7" >&2
+
+for M in numpy PIL Image matplotlib pylab cv2 scipy.stats scipy.spatial
+do
+    python -c "import $M" >/dev/null 2>&1 || \
+        echo ">> Require Python $M module" >&2
+done
+
+export PYTHONPATH="$PWD/pymodules:$PYTHONPATH"
+
+for M in device objects image caps dng target error
+do
+    python "pymodules/its/$M.py" 2>&1 | grep -q "OK" || \
+        echo ">> Unit test for $M failed" >&2
+done
+
diff --git a/apps/CameraITS/pymodules/its/__init__.py b/apps/CameraITS/pymodules/its/__init__.py
new file mode 100644
index 0000000..59058be
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/apps/CameraITS/pymodules/its/caps.py b/apps/CameraITS/pymodules/its/caps.py
new file mode 100644
index 0000000..b713db9
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/caps.py
@@ -0,0 +1,221 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+import its.objects
+import sys
+
+
+def skip_unless(cond):
+    """Skips the test if the condition is false.
+
+    If a test is skipped, then it is exited and returns the special code
+    of 101 to the calling shell, which can be used by an external test
+    harness to differentiate a skip from a pass or fail.
+
+    Args:
+        cond: Boolean, which must be true for the test to not skip.
+
+    Returns:
+        Nothing.
+    """
+    SKIP_RET_CODE = 101
+
+    if not cond:
+        print "Test skipped"
+        sys.exit(SKIP_RET_CODE)
+
+
+def full(props):
+    """Returns whether a device is a FULL capability camera2 device.
+
+    Args:
+        props: Camera properties object.
+
+    Returns:
+        Boolean.
+    """
+    return props.has_key("android.info.supportedHardwareLevel") and \
+           props["android.info.supportedHardwareLevel"] == 1
+
+def limited(props):
+    """Returns whether a device is a LIMITED capability camera2 device.
+
+    Args:
+        props: Camera properties object.
+
+    Returns:
+        Boolean.
+    """
+    return props.has_key("android.info.supportedHardwareLevel") and \
+           props["android.info.supportedHardwareLevel"] == 0
+
+def legacy(props):
+    """Returns whether a device is a LEGACY capability camera2 device.
+
+    Args:
+        props: Camera properties object.
+
+    Returns:
+        Boolean.
+    """
+    return props.has_key("android.info.supportedHardwareLevel") and \
+           props["android.info.supportedHardwareLevel"] == 2
+
+def manual_sensor(props):
+    """Returns whether a device supports MANUAL_SENSOR capabilities.
+
+    Args:
+        props: Camera properties object.
+
+    Returns:
+        Boolean.
+    """
+    return    props.has_key("android.request.availableCapabilities") and \
+              1 in props["android.request.availableCapabilities"] \
+           or full(props)
+
+def manual_post_proc(props):
+    """Returns whether a device supports MANUAL_POST_PROCESSING capabilities.
+
+    Args:
+        props: Camera properties object.
+
+    Returns:
+        Boolean.
+    """
+    return    props.has_key("android.request.availableCapabilities") and \
+              2 in props["android.request.availableCapabilities"] \
+           or full(props)
+
+def raw(props):
+    """Returns whether a device supports RAW capabilities.
+
+    Args:
+        props: Camera properties object.
+
+    Returns:
+        Boolean.
+    """
+    return props.has_key("android.request.availableCapabilities") and \
+           3 in props["android.request.availableCapabilities"]
+
+def raw16(props):
+    """Returns whether a device supports RAW16 output.
+
+    Args:
+        props: Camera properties object.
+
+    Returns:
+        Boolean.
+    """
+    return len(its.objects.get_available_output_sizes("raw", props)) > 0
+
+def raw10(props):
+    """Returns whether a device supports RAW10 output.
+
+    Args:
+        props: Camera properties object.
+
+    Returns:
+        Boolean.
+    """
+    return len(its.objects.get_available_output_sizes("raw10", props)) > 0
+
+def sensor_fusion(props):
+    """Returns whether the camera and motion sensor timestamps for the device
+    are in the same time domain and can be compared directly.
+
+    Args:
+        props: Camera properties object.
+
+    Returns:
+        Boolean.
+    """
+    return props.has_key("android.sensor.info.timestampSource") and \
+           props["android.sensor.info.timestampSource"] == 1
+
+def read_3a(props):
+    """Return whether a device supports reading out the following 3A settings:
+        sensitivity
+        exposure time
+        awb gain
+        awb cct
+        focus distance
+
+    Args:
+        props: Camera properties object.
+
+    Returns:
+        Boolean.
+    """
+    # TODO: check available result keys explicitly
+    return manual_sensor(props) and manual_post_proc(props)
+
+def compute_target_exposure(props):
+    """Return whether a device supports target exposure computation in its.target module.
+
+    Args:
+        props: Camera properties object.
+
+    Returns:
+        Boolean.
+    """
+    return manual_sensor(props) and manual_post_proc(props)
+
+def freeform_crop(props):
+    """Returns whether a device supports freefrom cropping.
+
+    Args:
+        props: Camera properties object.
+
+    Return:
+        Boolean.
+    """
+    return props.has_key("android.scaler.croppingType") and \
+           props["android.scaler.croppingType"] == 1
+
+def flash(props):
+    """Returns whether a device supports flash control.
+
+    Args:
+        props: Camera properties object.
+
+    Return:
+        Boolean.
+    """
+    return props.has_key("android.flash.info.available") and \
+           props["android.flash.info.available"] == 1
+
+
+def per_frame_control(props):
+    """Returns whether a device supports per frame control
+
+    Args:
+        props: Camera properties object.
+
+    Return:
+        Boolean.
+    """
+    return props.has_key("android.sync.maxLatency") and \
+           props["android.sync.maxLatency"] == 0
+
+class __UnitTest(unittest.TestCase):
+    """Run a suite of unit tests on this module.
+    """
+    # TODO: Add more unit tests.
+
+if __name__ == '__main__':
+    unittest.main()
+
diff --git a/apps/CameraITS/pymodules/its/device.py b/apps/CameraITS/pymodules/its/device.py
new file mode 100644
index 0000000..beba0ae
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/device.py
@@ -0,0 +1,545 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.error
+import os
+import os.path
+import sys
+import re
+import json
+import time
+import unittest
+import socket
+import subprocess
+import hashlib
+import numpy
+
+class ItsSession(object):
+    """Controls a device over adb to run ITS scripts.
+
+    The script importing this module (on the host machine) prepares JSON
+    objects encoding CaptureRequests, specifying sets of parameters to use
+    when capturing an image using the Camera2 APIs. This class encapsulates
+    sending the requests to the device, monitoring the device's progress, and
+    copying the resultant captures back to the host machine when done. TCP
+    forwarded over adb is the transport mechanism used.
+
+    The device must have CtsVerifier.apk installed.
+
+    Attributes:
+        sock: The open socket.
+    """
+
+    # Open a connection to localhost:6000, forwarded to port 6000 on the device.
+    # TODO: Support multiple devices running over different TCP ports.
+    IPADDR = '127.0.0.1'
+    PORT = 6000
+    BUFFER_SIZE = 4096
+
+    # Seconds timeout on each socket operation.
+    SOCK_TIMEOUT = 10.0
+
+    PACKAGE = 'com.android.cts.verifier.camera.its'
+    INTENT_START = 'com.android.cts.verifier.camera.its.START'
+    ACTION_ITS_RESULT = 'com.android.cts.verifier.camera.its.ACTION_ITS_RESULT'
+    EXTRA_SUCCESS = 'camera.its.extra.SUCCESS'
+
+    # TODO: Handle multiple connected devices.
+    ADB = "adb -d"
+
+    # Definitions for some of the common output format options for do_capture().
+    # Each gets images of full resolution for each requested format.
+    CAP_RAW = {"format":"raw"}
+    CAP_DNG = {"format":"dng"}
+    CAP_YUV = {"format":"yuv"}
+    CAP_JPEG = {"format":"jpeg"}
+    CAP_RAW_YUV = [{"format":"raw"}, {"format":"yuv"}]
+    CAP_DNG_YUV = [{"format":"dng"}, {"format":"yuv"}]
+    CAP_RAW_JPEG = [{"format":"raw"}, {"format":"jpeg"}]
+    CAP_DNG_JPEG = [{"format":"dng"}, {"format":"jpeg"}]
+    CAP_YUV_JPEG = [{"format":"yuv"}, {"format":"jpeg"}]
+    CAP_RAW_YUV_JPEG = [{"format":"raw"}, {"format":"yuv"}, {"format":"jpeg"}]
+    CAP_DNG_YUV_JPEG = [{"format":"dng"}, {"format":"yuv"}, {"format":"jpeg"}]
+
+    # Method to handle the case where the service isn't already running.
+    # This occurs when a test is invoked directly from the command line, rather
+    # than as a part of a separate test harness which is setting up the device
+    # and the TCP forwarding.
+    def __pre_init(self):
+
+        # This also includes the optional reboot handling: if the user
+        # provides a "reboot" or "reboot=N" arg, then reboot the device,
+        # waiting for N seconds (default 30) before returning.
+        for s in sys.argv[1:]:
+            if s[:6] == "reboot":
+                duration = 30
+                if len(s) > 7 and s[6] == "=":
+                    duration = int(s[7:])
+                print "Rebooting device"
+                _run("%s reboot" % (ItsSession.ADB));
+                _run("%s wait-for-device" % (ItsSession.ADB))
+                time.sleep(duration)
+                print "Reboot complete"
+
+        # TODO: Figure out why "--user 0" is needed, and fix the problem.
+        _run('%s shell am force-stop --user 0 %s' % (ItsSession.ADB, self.PACKAGE))
+        _run(('%s shell am startservice --user 0 -t text/plain '
+              '-a %s') % (ItsSession.ADB, self.INTENT_START))
+
+        # Wait until the socket is ready to accept a connection.
+        proc = subprocess.Popen(
+                ItsSession.ADB.split() + ["logcat"],
+                stdout=subprocess.PIPE)
+        logcat = proc.stdout
+        while True:
+            line = logcat.readline().strip()
+            if line.find('ItsService ready') >= 0:
+                break
+        proc.kill()
+
+        # Setup the TCP-over-ADB forwarding.
+        _run('%s forward tcp:%d tcp:%d' % (ItsSession.ADB,self.PORT,self.PORT))
+
+    def __init__(self):
+        if "noinit" not in sys.argv:
+            self.__pre_init()
+        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        self.sock.connect((self.IPADDR, self.PORT))
+        self.sock.settimeout(self.SOCK_TIMEOUT)
+        self.__close_camera()
+        self.__open_camera()
+
+    def __del__(self):
+        if hasattr(self, 'sock') and self.sock:
+            self.__close_camera()
+            self.sock.close()
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, type, value, traceback):
+        return False
+
+    def __read_response_from_socket(self):
+        # Read a line (newline-terminated) string serialization of JSON object.
+        chars = []
+        while len(chars) == 0 or chars[-1] != '\n':
+            ch = self.sock.recv(1)
+            if len(ch) == 0:
+                # Socket was probably closed; otherwise don't get empty strings
+                raise its.error.Error('Problem with socket on device side')
+            chars.append(ch)
+        line = ''.join(chars)
+        jobj = json.loads(line)
+        # Optionally read a binary buffer of a fixed size.
+        buf = None
+        if jobj.has_key("bufValueSize"):
+            n = jobj["bufValueSize"]
+            buf = bytearray(n)
+            view = memoryview(buf)
+            while n > 0:
+                nbytes = self.sock.recv_into(view, n)
+                view = view[nbytes:]
+                n -= nbytes
+            buf = numpy.frombuffer(buf, dtype=numpy.uint8)
+        return jobj, buf
+
+    def __open_camera(self):
+        # Get the camera ID to open as an argument.
+        camera_id = 0
+        for s in sys.argv[1:]:
+            if s[:7] == "camera=" and len(s) > 7:
+                camera_id = int(s[7:])
+        cmd = {"cmdName":"open", "cameraId":camera_id}
+        self.sock.send(json.dumps(cmd) + "\n")
+        data,_ = self.__read_response_from_socket()
+        if data['tag'] != 'cameraOpened':
+            raise its.error.Error('Invalid command response')
+
+    def __close_camera(self):
+        cmd = {"cmdName":"close"}
+        self.sock.send(json.dumps(cmd) + "\n")
+        data,_ = self.__read_response_from_socket()
+        if data['tag'] != 'cameraClosed':
+            raise its.error.Error('Invalid command response')
+
+    def do_vibrate(self, pattern):
+        """Cause the device to vibrate to a specific pattern.
+
+        Args:
+            pattern: Durations (ms) for which to turn on or off the vibrator.
+                The first value indicates the number of milliseconds to wait
+                before turning the vibrator on. The next value indicates the
+                number of milliseconds for which to keep the vibrator on
+                before turning it off. Subsequent values alternate between
+                durations in milliseconds to turn the vibrator off or to turn
+                the vibrator on.
+
+        Returns:
+            Nothing.
+        """
+        cmd = {}
+        cmd["cmdName"] = "doVibrate"
+        cmd["pattern"] = pattern
+        self.sock.send(json.dumps(cmd) + "\n")
+        data,_ = self.__read_response_from_socket()
+        if data['tag'] != 'vibrationStarted':
+            raise its.error.Error('Invalid command response')
+
+    def start_sensor_events(self):
+        """Start collecting sensor events on the device.
+
+        See get_sensor_events for more info.
+
+        Returns:
+            Nothing.
+        """
+        cmd = {}
+        cmd["cmdName"] = "startSensorEvents"
+        self.sock.send(json.dumps(cmd) + "\n")
+        data,_ = self.__read_response_from_socket()
+        if data['tag'] != 'sensorEventsStarted':
+            raise its.error.Error('Invalid command response')
+
+    def get_sensor_events(self):
+        """Get a trace of all sensor events on the device.
+
+        The trace starts when the start_sensor_events function is called. If
+        the test runs for a long time after this call, then the device's
+        internal memory can fill up. Calling get_sensor_events gets all events
+        from the device, and then stops the device from collecting events and
+        clears the internal buffer; to start again, the start_sensor_events
+        call must be used again.
+
+        Events from the accelerometer, compass, and gyro are returned; each
+        has a timestamp and x,y,z values.
+
+        Note that sensor events are only produced if the device isn't in its
+        standby mode (i.e.) if the screen is on.
+
+        Returns:
+            A Python dictionary with three keys ("accel", "mag", "gyro") each
+            of which maps to a list of objects containing "time","x","y","z"
+            keys.
+        """
+        cmd = {}
+        cmd["cmdName"] = "getSensorEvents"
+        self.sock.send(json.dumps(cmd) + "\n")
+        data,_ = self.__read_response_from_socket()
+        if data['tag'] != 'sensorEvents':
+            raise its.error.Error('Invalid command response')
+        return data['objValue']
+
+    def get_camera_properties(self):
+        """Get the camera properties object for the device.
+
+        Returns:
+            The Python dictionary object for the CameraProperties object.
+        """
+        cmd = {}
+        cmd["cmdName"] = "getCameraProperties"
+        self.sock.send(json.dumps(cmd) + "\n")
+        data,_ = self.__read_response_from_socket()
+        if data['tag'] != 'cameraProperties':
+            raise its.error.Error('Invalid command response')
+        return data['objValue']['cameraProperties']
+
+    def do_3a(self, regions_ae=[[0,0,1,1,1]],
+                    regions_awb=[[0,0,1,1,1]],
+                    regions_af=[[0,0,1,1,1]],
+                    do_ae=True, do_awb=True, do_af=True,
+                    lock_ae=False, lock_awb=False,
+                    get_results=False,
+                    ev_comp=0):
+        """Perform a 3A operation on the device.
+
+        Triggers some or all of AE, AWB, and AF, and returns once they have
+        converged. Uses the vendor 3A that is implemented inside the HAL.
+
+        Throws an assertion if 3A fails to converge.
+
+        Args:
+            regions_ae: List of weighted AE regions.
+            regions_awb: List of weighted AWB regions.
+            regions_af: List of weighted AF regions.
+            do_ae: Trigger AE and wait for it to converge.
+            do_awb: Wait for AWB to converge.
+            do_af: Trigger AF and wait for it to converge.
+            lock_ae: Request AE lock after convergence, and wait for it.
+            lock_awb: Request AWB lock after convergence, and wait for it.
+            get_results: Return the 3A results from this function.
+            ev_comp: An EV compensation value to use when running AE.
+
+        Region format in args:
+            Arguments are lists of weighted regions; each weighted region is a
+            list of 5 values, [x,y,w,h, wgt], and each argument is a list of
+            these 5-value lists. The coordinates are given as normalized
+            rectangles (x,y,w,h) specifying the region. For example:
+                [[0.0, 0.0, 1.0, 0.5, 5], [0.0, 0.5, 1.0, 0.5, 10]].
+            Weights are non-negative integers.
+
+        Returns:
+            Five values are returned if get_results is true::
+            * AE sensitivity; None if do_ae is False
+            * AE exposure time; None if do_ae is False
+            * AWB gains (list); None if do_awb is False
+            * AWB transform (list); None if do_awb is false
+            * AF focus position; None if do_af is false
+            Otherwise, it returns five None values.
+        """
+        print "Running vendor 3A on device"
+        cmd = {}
+        cmd["cmdName"] = "do3A"
+        cmd["regions"] = {"ae": sum(regions_ae, []),
+                          "awb": sum(regions_awb, []),
+                          "af": sum(regions_af, [])}
+        cmd["triggers"] = {"ae": do_ae, "af": do_af}
+        if lock_ae:
+            cmd["aeLock"] = True
+        if lock_awb:
+            cmd["awbLock"] = True
+        if ev_comp != 0:
+            cmd["evComp"] = ev_comp
+        self.sock.send(json.dumps(cmd) + "\n")
+
+        # Wait for each specified 3A to converge.
+        ae_sens = None
+        ae_exp = None
+        awb_gains = None
+        awb_transform = None
+        af_dist = None
+        converged = False
+        while True:
+            data,_ = self.__read_response_from_socket()
+            vals = data['strValue'].split()
+            if data['tag'] == 'aeResult':
+                ae_sens, ae_exp = [int(i) for i in vals]
+            elif data['tag'] == 'afResult':
+                af_dist = float(vals[0])
+            elif data['tag'] == 'awbResult':
+                awb_gains = [float(f) for f in vals[:4]]
+                awb_transform = [float(f) for f in vals[4:]]
+            elif data['tag'] == '3aConverged':
+                converged = True
+            elif data['tag'] == '3aDone':
+                break
+            else:
+                raise its.error.Error('Invalid command response')
+        if converged and not get_results:
+            return None,None,None,None,None
+        if (do_ae and ae_sens == None or do_awb and awb_gains == None
+                or do_af and af_dist == None or not converged):
+            raise its.error.Error('3A failed to converge')
+        return ae_sens, ae_exp, awb_gains, awb_transform, af_dist
+
+    def do_capture(self, cap_request, out_surfaces=None):
+        """Issue capture request(s), and read back the image(s) and metadata.
+
+        The main top-level function for capturing one or more images using the
+        device. Captures a single image if cap_request is a single object, and
+        captures a burst if it is a list of objects.
+
+        The out_surfaces field can specify the width(s), height(s), and
+        format(s) of the captured image. The formats may be "yuv", "jpeg",
+        "dng", "raw", or "raw10". The default is a YUV420 frame ("yuv")
+        corresponding to a full sensor frame.
+
+        Note that one or more surfaces can be specified, allowing a capture to
+        request images back in multiple formats (e.g.) raw+yuv, raw+jpeg,
+        yuv+jpeg, raw+yuv+jpeg. If the size is omitted for a surface, the
+        default is the largest resolution available for the format of that
+        surface. At most one output surface can be specified for a given format,
+        and raw+dng, raw10+dng, and raw+raw10 are not supported as combinations.
+
+        Example of a single capture request:
+
+            {
+                "android.sensor.exposureTime": 100*1000*1000,
+                "android.sensor.sensitivity": 100
+            }
+
+        Example of a list of capture requests:
+
+            [
+                {
+                    "android.sensor.exposureTime": 100*1000*1000,
+                    "android.sensor.sensitivity": 100
+                },
+                {
+                    "android.sensor.exposureTime": 100*1000*1000,
+                    "android.sensor.sensitivity": 200
+                }
+            ]
+
+        Examples of output surface specifications:
+
+            {
+                "width": 640,
+                "height": 480,
+                "format": "yuv"
+            }
+
+            [
+                {
+                    "format": "jpeg"
+                },
+                {
+                    "format": "raw"
+                }
+            ]
+
+        The following variables defined in this class are shortcuts for
+        specifying one or more formats where each output is the full size for
+        that format; they can be used as values for the out_surfaces arguments:
+
+            CAP_RAW
+            CAP_DNG
+            CAP_YUV
+            CAP_JPEG
+            CAP_RAW_YUV
+            CAP_DNG_YUV
+            CAP_RAW_JPEG
+            CAP_DNG_JPEG
+            CAP_YUV_JPEG
+            CAP_RAW_YUV_JPEG
+            CAP_DNG_YUV_JPEG
+
+        If multiple formats are specified, then this function returns multiple
+        capture objects, one for each requested format. If multiple formats and
+        multiple captures (i.e. a burst) are specified, then this function
+        returns multiple lists of capture objects. In both cases, the order of
+        the returned objects matches the order of the requested formats in the
+        out_surfaces parameter. For example:
+
+            yuv_cap            = do_capture( req1                           )
+            yuv_cap            = do_capture( req1,        yuv_fmt           )
+            yuv_cap,  raw_cap  = do_capture( req1,        [yuv_fmt,raw_fmt] )
+            yuv_caps           = do_capture( [req1,req2], yuv_fmt           )
+            yuv_caps, raw_caps = do_capture( [req1,req2], [yuv_fmt,raw_fmt] )
+
+        Args:
+            cap_request: The Python dict/list specifying the capture(s), which
+                will be converted to JSON and sent to the device.
+            out_surfaces: (Optional) specifications of the output image formats
+                and sizes to use for each capture.
+
+        Returns:
+            An object, list of objects, or list of lists of objects, where each
+            object contains the following fields:
+            * data: the image data as a numpy array of bytes.
+            * width: the width of the captured image.
+            * height: the height of the captured image.
+            * format: image the format, in ["yuv","jpeg","raw","raw10","dng"].
+            * metadata: the capture result object (Python dictionary).
+        """
+        cmd = {}
+        cmd["cmdName"] = "doCapture"
+        if not isinstance(cap_request, list):
+            cmd["captureRequests"] = [cap_request]
+        else:
+            cmd["captureRequests"] = cap_request
+        if out_surfaces is not None:
+            if not isinstance(out_surfaces, list):
+                cmd["outputSurfaces"] = [out_surfaces]
+            else:
+                cmd["outputSurfaces"] = out_surfaces
+            formats = [c["format"] if c.has_key("format") else "yuv"
+                       for c in cmd["outputSurfaces"]]
+            formats = [s if s != "jpg" else "jpeg" for s in formats]
+        else:
+            formats = ['yuv']
+        ncap = len(cmd["captureRequests"])
+        nsurf = 1 if out_surfaces is None else len(cmd["outputSurfaces"])
+        if len(formats) > len(set(formats)):
+            raise its.error.Error('Duplicate format requested')
+        if "dng" in formats and "raw" in formats or \
+                "dng" in formats and "raw10" in formats or \
+                "raw" in formats and "raw10" in formats:
+            raise its.error.Error('Different raw formats not supported')
+        print "Capturing %d frame%s with %d format%s [%s]" % (
+                  ncap, "s" if ncap>1 else "", nsurf, "s" if nsurf>1 else "",
+                  ",".join(formats))
+        self.sock.send(json.dumps(cmd) + "\n")
+
+        # Wait for ncap*nsurf images and ncap metadata responses.
+        # Assume that captures come out in the same order as requested in
+        # the burst, however individual images of different formats can come
+        # out in any order for that capture.
+        nbufs = 0
+        bufs = {"yuv":[], "raw":[], "raw10":[], "dng":[], "jpeg":[]}
+        mds = []
+        widths = None
+        heights = None
+        while nbufs < ncap*nsurf or len(mds) < ncap:
+            jsonObj,buf = self.__read_response_from_socket()
+            if jsonObj['tag'] in ['jpegImage', 'yuvImage', 'rawImage', \
+                    'raw10Image', 'dngImage'] and buf is not None:
+                fmt = jsonObj['tag'][:-5]
+                bufs[fmt].append(buf)
+                nbufs += 1
+            elif jsonObj['tag'] == 'captureResults':
+                mds.append(jsonObj['objValue']['captureResult'])
+                outputs = jsonObj['objValue']['outputs']
+                widths = [out['width'] for out in outputs]
+                heights = [out['height'] for out in outputs]
+            else:
+                # Just ignore other tags
+                None
+        rets = []
+        for j,fmt in enumerate(formats):
+            objs = []
+            for i in range(ncap):
+                obj = {}
+                obj["data"] = bufs[fmt][i]
+                obj["width"] = widths[j]
+                obj["height"] = heights[j]
+                obj["format"] = fmt
+                obj["metadata"] = mds[i]
+                objs.append(obj)
+            rets.append(objs if ncap>1 else objs[0])
+        return rets if len(rets)>1 else rets[0]
+
+def report_result(camera_id, success):
+    """Send a pass/fail result to the device, via an intent.
+
+    Args:
+        camera_id: The ID string of the camera for which to report pass/fail.
+        success: Boolean, indicating if the result was pass or fail.
+
+    Returns:
+        Nothing.
+    """
+    resultstr = "%s=%s" % (camera_id, 'True' if success else 'False')
+    _run(('%s shell am broadcast '
+          '-a %s --es %s %s') % (ItsSession.ADB, ItsSession.ACTION_ITS_RESULT,
+          ItsSession.EXTRA_SUCCESS, resultstr))
+
+
+def _run(cmd):
+    """Replacement for os.system, with hiding of stdout+stderr messages.
+    """
+    with open(os.devnull, 'wb') as devnull:
+        subprocess.check_call(
+                cmd.split(), stdout=devnull, stderr=subprocess.STDOUT)
+
+class __UnitTest(unittest.TestCase):
+    """Run a suite of unit tests on this module.
+    """
+
+    # TODO: Add some unit tests.
+    None
+
+if __name__ == '__main__':
+    unittest.main()
+
diff --git a/apps/CameraITS/pymodules/its/dng.py b/apps/CameraITS/pymodules/its/dng.py
new file mode 100644
index 0000000..f331d02
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/dng.py
@@ -0,0 +1,174 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import numpy
+import numpy.linalg
+import unittest
+
+# Illuminant IDs
+A = 0
+D65 = 1
+
+def compute_cm_fm(illuminant, gains, ccm, cal):
+    """Compute the ColorMatrix (CM) and ForwardMatrix (FM).
+
+    Given a captured shot of a grey chart illuminated by either a D65 or a
+    standard A illuminant, the HAL will produce the WB gains and transform,
+    in the android.colorCorrection.gains and android.colorCorrection.transform
+    tags respectively. These values have both golden module and per-unit
+    calibration baked in.
+
+    This function is used to take the per-unit gains, ccm, and calibration
+    matrix, and compute the values that the DNG ColorMatrix and ForwardMatrix
+    for the specified illuminant should be. These CM and FM values should be
+    the same for all DNG files captured by all units of the same model (e.g.
+    all Nexus 5 units). The calibration matrix should be the same for all DNGs
+    saved by the same unit, but will differ unit-to-unit.
+
+    Args:
+        illuminant: 0 (A) or 1 (D65).
+        gains: White balance gains, as a list of 4 floats.
+        ccm: White balance transform matrix, as a list of 9 floats.
+        cal: Per-unit calibration matrix, as a list of 9 floats.
+
+    Returns:
+        CM: The 3x3 ColorMatrix for the specified illuminant, as a numpy array
+        FM: The 3x3 ForwardMatrix for the specified illuminant, as a numpy array
+    """
+
+    ###########################################################################
+    # Standard matrices.
+
+    # W is the matrix that maps sRGB to XYZ.
+    # See: http://www.brucelindbloom.com/
+    W = numpy.array([
+        [ 0.4124564,  0.3575761,  0.1804375],
+        [ 0.2126729,  0.7151522,  0.0721750],
+        [ 0.0193339,  0.1191920,  0.9503041]])
+
+    # HH is the chromatic adaptation matrix from D65 (since sRGB's ref white is
+    # D65) to D50 (since CIE XYZ's ref white is D50).
+    HH = numpy.array([
+        [ 1.0478112,  0.0228866, -0.0501270],
+        [ 0.0295424,  0.9904844, -0.0170491],
+        [-0.0092345,  0.0150436,  0.7521316]])
+
+    # H is a chromatic adaptation matrix from D65 (because sRGB's reference
+    # white is D65) to the calibration illuminant (which is a standard matrix
+    # depending on the illuminant). For a D65 illuminant, the matrix is the
+    # identity. For the A illuminant, the matrix uses the linear Bradford
+    # adaptation method to map from D65 to A.
+    # See: http://www.brucelindbloom.com/
+    H_D65 = numpy.array([
+        [ 1.0,        0.0,        0.0],
+        [ 0.0,        1.0,        0.0],
+        [ 0.0,        0.0,        1.0]])
+    H_A = numpy.array([
+        [ 1.2164557,  0.1109905, -0.1549325],
+        [ 0.1533326,  0.9152313, -0.0559953],
+        [-0.0239469,  0.0358984,  0.3147529]])
+    H = [H_A, H_D65][illuminant]
+
+    ###########################################################################
+    # Per-model matrices (that should be the same for all units of a particular
+    # phone/camera. These are statics in the HAL camera properties.
+
+    # G is formed by taking the r,g,b gains and putting them into a
+    # diagonal matrix.
+    G = numpy.array([[gains[0],0,0], [0,gains[1],0], [0,0,gains[3]]])
+
+    # S is just the CCM.
+    S = numpy.array([ccm[0:3], ccm[3:6], ccm[6:9]])
+
+    ###########################################################################
+    # Per-unit matrices.
+
+    # The per-unit calibration matrix for the given illuminant.
+    CC = numpy.array([cal[0:3],cal[3:6],cal[6:9]])
+
+    ###########################################################################
+    # Derived matrices. These should match up with DNG-related matrices
+    # provided by the HAL.
+
+    # The color matrix and forward matrix are computed as follows:
+    #   CM = inv(H * W * S * G * CC)
+    #   FM = HH * W * S
+    CM = numpy.linalg.inv(
+            numpy.dot(numpy.dot(numpy.dot(numpy.dot(H, W), S), G), CC))
+    FM = numpy.dot(numpy.dot(HH, W), S)
+
+    # The color matrix is normalized so that it maps the D50 (PCS) white
+    # point to a maximum component value of 1.
+    CM = CM / max(numpy.dot(CM, (0.9642957, 1.0, 0.8251046)))
+
+    return CM, FM
+
+def compute_asn(illuminant, cal, CM):
+    """Compute the AsShotNeutral DNG value.
+
+    This value is the only dynamic DNG value; the ForwardMatrix, ColorMatrix,
+    and CalibrationMatrix values should be the same for every DNG saved by
+    a given unit. The AsShotNeutral depends on the scene white balance
+    estimate.
+
+    This function computes what the DNG AsShotNeutral values should be, for
+    a given ColorMatrix (which is computed from the WB gains and CCM for a
+    shot taken of a grey chart under either A or D65 illuminants) and the
+    per-unit calibration matrix.
+
+    Args:
+        illuminant: 0 (A) or 1 (D65).
+        cal: Per-unit calibration matrix, as a list of 9 floats.
+        CM: The computed 3x3 ColorMatrix for the illuminant, as a numpy array.
+
+    Returns:
+        ASN: The AsShotNeutral value, as a length-3 numpy array.
+    """
+
+    ###########################################################################
+    # Standard matrices.
+
+    # XYZCAL is the  XYZ coordinate of calibration illuminant (so A or D65).
+    # See: Wyszecki & Stiles, "Color Science", second edition.
+    XYZCAL_A = numpy.array([1.098675, 1.0, 0.355916])
+    XYZCAL_D65 = numpy.array([0.950456, 1.0, 1.089058])
+    XYZCAL = [XYZCAL_A, XYZCAL_D65][illuminant]
+
+    ###########################################################################
+    # Per-unit matrices.
+
+    # The per-unit calibration matrix for the given illuminant.
+    CC = numpy.array([cal[0:3],cal[3:6],cal[6:9]])
+
+    ###########################################################################
+    # Derived matrices.
+
+    # The AsShotNeutral value is then the product of this final color matrix
+    # with the XYZ coordinate of calibration illuminant.
+    #   ASN = CC * CM * XYZCAL
+    ASN = numpy.dot(numpy.dot(CC, CM), XYZCAL)
+
+    # Normalize so the max vector element is 1.0.
+    ASN = ASN / max(ASN)
+
+    return ASN
+
+class __UnitTest(unittest.TestCase):
+    """Run a suite of unit tests on this module.
+    """
+    # TODO: Add more unit tests.
+
+if __name__ == '__main__':
+    unittest.main()
+
diff --git a/apps/CameraITS/pymodules/its/error.py b/apps/CameraITS/pymodules/its/error.py
new file mode 100644
index 0000000..884389b
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/error.py
@@ -0,0 +1,26 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+class Error(Exception):
+    pass
+
+class __UnitTest(unittest.TestCase):
+    """Run a suite of unit tests on this module.
+    """
+
+if __name__ == '__main__':
+    unittest.main()
+
diff --git a/apps/CameraITS/pymodules/its/image.py b/apps/CameraITS/pymodules/its/image.py
new file mode 100644
index 0000000..b3bdb65
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/image.py
@@ -0,0 +1,747 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import matplotlib
+matplotlib.use('Agg')
+
+import its.error
+import pylab
+import sys
+import Image
+import numpy
+import math
+import unittest
+import cStringIO
+import scipy.stats
+import copy
+
+DEFAULT_YUV_TO_RGB_CCM = numpy.matrix([
+                                [1.000,  0.000,  1.402],
+                                [1.000, -0.344, -0.714],
+                                [1.000,  1.772,  0.000]])
+
+DEFAULT_YUV_OFFSETS = numpy.array([0, 128, 128])
+
+DEFAULT_GAMMA_LUT = numpy.array(
+        [math.floor(65535 * math.pow(i/65535.0, 1/2.2) + 0.5)
+         for i in xrange(65536)])
+
+DEFAULT_INVGAMMA_LUT = numpy.array(
+        [math.floor(65535 * math.pow(i/65535.0, 2.2) + 0.5)
+         for i in xrange(65536)])
+
+MAX_LUT_SIZE = 65536
+
+def convert_capture_to_rgb_image(cap,
+                                 ccm_yuv_to_rgb=DEFAULT_YUV_TO_RGB_CCM,
+                                 yuv_off=DEFAULT_YUV_OFFSETS,
+                                 props=None):
+    """Convert a captured image object to a RGB image.
+
+    Args:
+        cap: A capture object as returned by its.device.do_capture.
+        ccm_yuv_to_rgb: (Optional) the 3x3 CCM to convert from YUV to RGB.
+        yuv_off: (Optional) offsets to subtract from each of Y,U,V values.
+        props: (Optional) camera properties object (of static values);
+            required for processing raw images.
+
+    Returns:
+        RGB float-3 image array, with pixel values in [0.0, 1.0].
+    """
+    w = cap["width"]
+    h = cap["height"]
+    if cap["format"] == "raw10":
+        assert(props is not None)
+        cap = unpack_raw10_capture(cap, props)
+    if cap["format"] == "yuv":
+        y = cap["data"][0:w*h]
+        u = cap["data"][w*h:w*h*5/4]
+        v = cap["data"][w*h*5/4:w*h*6/4]
+        return convert_yuv420_to_rgb_image(y, u, v, w, h)
+    elif cap["format"] == "jpeg":
+        return decompress_jpeg_to_rgb_image(cap["data"])
+    elif cap["format"] == "raw":
+        assert(props is not None)
+        r,gr,gb,b = convert_capture_to_planes(cap, props)
+        return convert_raw_to_rgb_image(r,gr,gb,b, props, cap["metadata"])
+    else:
+        raise its.error.Error('Invalid format %s' % (cap["format"]))
+
+def unpack_raw10_capture(cap, props):
+    """Unpack a raw-10 capture to a raw-16 capture.
+
+    Args:
+        cap: A raw-10 capture object.
+        props: Camera properties object.
+
+    Returns:
+        New capture object with raw-16 data.
+    """
+    # Data is packed as 4x10b pixels in 5 bytes, with the first 4 bytes holding
+    # the MSPs of the pixels, and the 5th byte holding 4x2b LSBs.
+    w,h = cap["width"], cap["height"]
+    if w % 4 != 0:
+        raise its.error.Error('Invalid raw-10 buffer width')
+    cap = copy.deepcopy(cap)
+    cap["data"] = unpack_raw10_image(cap["data"].reshape(h,w*5/4))
+    cap["format"] = "raw"
+    return cap
+
+def unpack_raw10_image(img):
+    """Unpack a raw-10 image to a raw-16 image.
+
+    Output image will have the 10 LSBs filled in each 16b word, and the 6 MSBs
+    will be set to zero.
+
+    Args:
+        img: A raw-10 image, as a uint8 numpy array.
+
+    Returns:
+        Image as a uint16 numpy array, with all row padding stripped.
+    """
+    if img.shape[1] % 5 != 0:
+        raise its.error.Error('Invalid raw-10 buffer width')
+    w = img.shape[1]*4/5
+    h = img.shape[0]
+    # Cut out the 4x8b MSBs and shift to bits [10:2] in 16b words.
+    msbs = numpy.delete(img, numpy.s_[4::5], 1)
+    msbs = msbs.astype(numpy.uint16)
+    msbs = numpy.left_shift(msbs, 2)
+    msbs = msbs.reshape(h,w)
+    # Cut out the 4x2b LSBs and put each in bits [2:0] of their own 8b words.
+    lsbs = img[::, 4::5].reshape(h,w/4)
+    lsbs = numpy.right_shift(
+            numpy.packbits(numpy.unpackbits(lsbs).reshape(h,w/4,4,2),3), 6)
+    lsbs = lsbs.reshape(h,w)
+    # Fuse the MSBs and LSBs back together
+    img16 = numpy.bitwise_or(msbs, lsbs).reshape(h,w)
+    return img16
+
+def convert_capture_to_planes(cap, props=None):
+    """Convert a captured image object to separate image planes.
+
+    Decompose an image into multiple images, corresponding to different planes.
+
+    For YUV420 captures ("yuv"):
+        Returns Y,U,V planes, where the Y plane is full-res and the U,V planes
+        are each 1/2 x 1/2 of the full res.
+
+    For Bayer captures ("raw" or "raw10"):
+        Returns planes in the order R,Gr,Gb,B, regardless of the Bayer pattern
+        layout. Each plane is 1/2 x 1/2 of the full res.
+
+    For JPEG captures ("jpeg"):
+        Returns R,G,B full-res planes.
+
+    Args:
+        cap: A capture object as returned by its.device.do_capture.
+        props: (Optional) camera properties object (of static values);
+            required for processing raw images.
+
+    Returns:
+        A tuple of float numpy arrays (one per plane), consisting of pixel
+            values in the range [0.0, 1.0].
+    """
+    w = cap["width"]
+    h = cap["height"]
+    if cap["format"] == "raw10":
+        assert(props is not None)
+        cap = unpack_raw10_capture(cap, props)
+    if cap["format"] == "yuv":
+        y = cap["data"][0:w*h]
+        u = cap["data"][w*h:w*h*5/4]
+        v = cap["data"][w*h*5/4:w*h*6/4]
+        return ((y.astype(numpy.float32) / 255.0).reshape(h, w, 1),
+                (u.astype(numpy.float32) / 255.0).reshape(h/2, w/2, 1),
+                (v.astype(numpy.float32) / 255.0).reshape(h/2, w/2, 1))
+    elif cap["format"] == "jpeg":
+        rgb = decompress_jpeg_to_rgb_image(cap["data"]).reshape(w*h*3)
+        return (rgb[::3].reshape(h,w,1),
+                rgb[1::3].reshape(h,w,1),
+                rgb[2::3].reshape(h,w,1))
+    elif cap["format"] == "raw":
+        assert(props is not None)
+        white_level = float(props['android.sensor.info.whiteLevel'])
+        img = numpy.ndarray(shape=(h*w,), dtype='<u2',
+                            buffer=cap["data"][0:w*h*2])
+        img = img.astype(numpy.float32).reshape(h,w) / white_level
+        imgs = [img[::2].reshape(w*h/2)[::2].reshape(h/2,w/2,1),
+                img[::2].reshape(w*h/2)[1::2].reshape(h/2,w/2,1),
+                img[1::2].reshape(w*h/2)[::2].reshape(h/2,w/2,1),
+                img[1::2].reshape(w*h/2)[1::2].reshape(h/2,w/2,1)]
+        idxs = get_canonical_cfa_order(props)
+        return [imgs[i] for i in idxs]
+    else:
+        raise its.error.Error('Invalid format %s' % (cap["format"]))
+
+def get_canonical_cfa_order(props):
+    """Returns a mapping from the Bayer 2x2 top-left grid in the CFA to
+    the standard order R,Gr,Gb,B.
+
+    Args:
+        props: Camera properties object.
+
+    Returns:
+        List of 4 integers, corresponding to the positions in the 2x2 top-
+            left Bayer grid of R,Gr,Gb,B, where the 2x2 grid is labeled as
+            0,1,2,3 in row major order.
+    """
+    # Note that raw streams aren't croppable, so the cropRegion doesn't need
+    # to be considered when determining the top-left pixel color.
+    cfa_pat = props['android.sensor.info.colorFilterArrangement']
+    if cfa_pat == 0:
+        # RGGB
+        return [0,1,2,3]
+    elif cfa_pat == 1:
+        # GRBG
+        return [1,0,3,2]
+    elif cfa_pat == 2:
+        # GBRG
+        return [2,3,0,1]
+    elif cfa_pat == 3:
+        # BGGR
+        return [3,2,1,0]
+    else:
+        raise its.error.Error("Not supported")
+
+def get_gains_in_canonical_order(props, gains):
+    """Reorders the gains tuple to the canonical R,Gr,Gb,B order.
+
+    Args:
+        props: Camera properties object.
+        gains: List of 4 values, in R,G_even,G_odd,B order.
+
+    Returns:
+        List of gains values, in R,Gr,Gb,B order.
+    """
+    cfa_pat = props['android.sensor.info.colorFilterArrangement']
+    if cfa_pat in [0,1]:
+        # RGGB or GRBG, so G_even is Gr
+        return gains
+    elif cfa_pat in [2,3]:
+        # GBRG or BGGR, so G_even is Gb
+        return [gains[0], gains[2], gains[1], gains[3]]
+    else:
+        raise its.error.Error("Not supported")
+
+def convert_raw_to_rgb_image(r_plane, gr_plane, gb_plane, b_plane,
+                             props, cap_res):
+    """Convert a Bayer raw-16 image to an RGB image.
+
+    Includes some extremely rudimentary demosaicking and color processing
+    operations; the output of this function shouldn't be used for any image
+    quality analysis.
+
+    Args:
+        r_plane,gr_plane,gb_plane,b_plane: Numpy arrays for each color plane
+            in the Bayer image, with pixels in the [0.0, 1.0] range.
+        props: Camera properties object.
+        cap_res: Capture result (metadata) object.
+
+    Returns:
+        RGB float-3 image array, with pixel values in [0.0, 1.0]
+    """
+    # Values required for the RAW to RGB conversion.
+    assert(props is not None)
+    white_level = float(props['android.sensor.info.whiteLevel'])
+    black_levels = props['android.sensor.blackLevelPattern']
+    gains = cap_res['android.colorCorrection.gains']
+    ccm = cap_res['android.colorCorrection.transform']
+
+    # Reorder black levels and gains to R,Gr,Gb,B, to match the order
+    # of the planes.
+    idxs = get_canonical_cfa_order(props)
+    black_levels = [black_levels[i] for i in idxs]
+    gains = get_gains_in_canonical_order(props, gains)
+
+    # Convert CCM from rational to float, as numpy arrays.
+    ccm = numpy.array(its.objects.rational_to_float(ccm)).reshape(3,3)
+
+    # Need to scale the image back to the full [0,1] range after subtracting
+    # the black level from each pixel.
+    scale = white_level / (white_level - max(black_levels))
+
+    # Three-channel black levels, normalized to [0,1] by white_level.
+    black_levels = numpy.array([b/white_level for b in [
+            black_levels[i] for i in [0,1,3]]])
+
+    # Three-channel gains.
+    gains = numpy.array([gains[i] for i in [0,1,3]])
+
+    h,w = r_plane.shape[:2]
+    img = numpy.dstack([r_plane,(gr_plane+gb_plane)/2.0,b_plane])
+    img = (((img.reshape(h,w,3) - black_levels) * scale) * gains).clip(0.0,1.0)
+    img = numpy.dot(img.reshape(w*h,3), ccm.T).reshape(h,w,3).clip(0.0,1.0)
+    return img
+
+def convert_yuv420_to_rgb_image(y_plane, u_plane, v_plane,
+                                w, h,
+                                ccm_yuv_to_rgb=DEFAULT_YUV_TO_RGB_CCM,
+                                yuv_off=DEFAULT_YUV_OFFSETS):
+    """Convert a YUV420 8-bit planar image to an RGB image.
+
+    Args:
+        y_plane: The packed 8-bit Y plane.
+        u_plane: The packed 8-bit U plane.
+        v_plane: The packed 8-bit V plane.
+        w: The width of the image.
+        h: The height of the image.
+        ccm_yuv_to_rgb: (Optional) the 3x3 CCM to convert from YUV to RGB.
+        yuv_off: (Optional) offsets to subtract from each of Y,U,V values.
+
+    Returns:
+        RGB float-3 image array, with pixel values in [0.0, 1.0].
+    """
+    y = numpy.subtract(y_plane, yuv_off[0])
+    u = numpy.subtract(u_plane, yuv_off[1]).view(numpy.int8)
+    v = numpy.subtract(v_plane, yuv_off[2]).view(numpy.int8)
+    u = u.reshape(h/2, w/2).repeat(2, axis=1).repeat(2, axis=0)
+    v = v.reshape(h/2, w/2).repeat(2, axis=1).repeat(2, axis=0)
+    yuv = numpy.dstack([y, u.reshape(w*h), v.reshape(w*h)])
+    flt = numpy.empty([h, w, 3], dtype=numpy.float32)
+    flt.reshape(w*h*3)[:] = yuv.reshape(h*w*3)[:]
+    flt = numpy.dot(flt.reshape(w*h,3), ccm_yuv_to_rgb.T).clip(0, 255)
+    rgb = numpy.empty([h, w, 3], dtype=numpy.uint8)
+    rgb.reshape(w*h*3)[:] = flt.reshape(w*h*3)[:]
+    return rgb.astype(numpy.float32) / 255.0
+
+def load_yuv420_to_rgb_image(yuv_fname,
+                             w, h,
+                             ccm_yuv_to_rgb=DEFAULT_YUV_TO_RGB_CCM,
+                             yuv_off=DEFAULT_YUV_OFFSETS):
+    """Load a YUV420 image file, and return as an RGB image.
+
+    Args:
+        yuv_fname: The path of the YUV420 file.
+        w: The width of the image.
+        h: The height of the image.
+        ccm_yuv_to_rgb: (Optional) the 3x3 CCM to convert from YUV to RGB.
+        yuv_off: (Optional) offsets to subtract from each of Y,U,V values.
+
+    Returns:
+        RGB float-3 image array, with pixel values in [0.0, 1.0].
+    """
+    with open(yuv_fname, "rb") as f:
+        y = numpy.fromfile(f, numpy.uint8, w*h, "")
+        v = numpy.fromfile(f, numpy.uint8, w*h/4, "")
+        u = numpy.fromfile(f, numpy.uint8, w*h/4, "")
+        return convert_yuv420_to_rgb_image(y,u,v,w,h,ccm_yuv_to_rgb,yuv_off)
+
+def load_yuv420_to_yuv_planes(yuv_fname, w, h):
+    """Load a YUV420 image file, and return separate Y, U, and V plane images.
+
+    Args:
+        yuv_fname: The path of the YUV420 file.
+        w: The width of the image.
+        h: The height of the image.
+
+    Returns:
+        Separate Y, U, and V images as float-1 Numpy arrays, pixels in [0,1].
+        Note that pixel (0,0,0) is not black, since U,V pixels are centered at
+        0.5, and also that the Y and U,V plane images returned are different
+        sizes (due to chroma subsampling in the YUV420 format).
+    """
+    with open(yuv_fname, "rb") as f:
+        y = numpy.fromfile(f, numpy.uint8, w*h, "")
+        v = numpy.fromfile(f, numpy.uint8, w*h/4, "")
+        u = numpy.fromfile(f, numpy.uint8, w*h/4, "")
+        return ((y.astype(numpy.float32) / 255.0).reshape(h, w, 1),
+                (u.astype(numpy.float32) / 255.0).reshape(h/2, w/2, 1),
+                (v.astype(numpy.float32) / 255.0).reshape(h/2, w/2, 1))
+
+def decompress_jpeg_to_rgb_image(jpeg_buffer):
+    """Decompress a JPEG-compressed image, returning as an RGB image.
+
+    Args:
+        jpeg_buffer: The JPEG stream.
+
+    Returns:
+        A numpy array for the RGB image, with pixels in [0,1].
+    """
+    img = Image.open(cStringIO.StringIO(jpeg_buffer))
+    w = img.size[0]
+    h = img.size[1]
+    return numpy.array(img).reshape(h,w,3) / 255.0
+
+def apply_lut_to_image(img, lut):
+    """Applies a LUT to every pixel in a float image array.
+
+    Internally converts to a 16b integer image, since the LUT can work with up
+    to 16b->16b mappings (i.e. values in the range [0,65535]). The lut can also
+    have fewer than 65536 entries, however it must be sized as a power of 2
+    (and for smaller luts, the scale must match the bitdepth).
+
+    For a 16b lut of 65536 entries, the operation performed is:
+
+        lut[r * 65535] / 65535 -> r'
+        lut[g * 65535] / 65535 -> g'
+        lut[b * 65535] / 65535 -> b'
+
+    For a 10b lut of 1024 entries, the operation becomes:
+
+        lut[r * 1023] / 1023 -> r'
+        lut[g * 1023] / 1023 -> g'
+        lut[b * 1023] / 1023 -> b'
+
+    Args:
+        img: Numpy float image array, with pixel values in [0,1].
+        lut: Numpy table encoding a LUT, mapping 16b integer values.
+
+    Returns:
+        Float image array after applying LUT to each pixel.
+    """
+    n = len(lut)
+    if n <= 0 or n > MAX_LUT_SIZE or (n & (n - 1)) != 0:
+        raise its.error.Error('Invalid arg LUT size: %d' % (n))
+    m = float(n-1)
+    return (lut[(img * m).astype(numpy.uint16)] / m).astype(numpy.float32)
+
+def apply_matrix_to_image(img, mat):
+    """Multiplies a 3x3 matrix with each float-3 image pixel.
+
+    Each pixel is considered a column vector, and is left-multiplied by
+    the given matrix:
+
+        [     ]   r    r'
+        [ mat ] * g -> g'
+        [     ]   b    b'
+
+    Args:
+        img: Numpy float image array, with pixel values in [0,1].
+        mat: Numpy 3x3 matrix.
+
+    Returns:
+        The numpy float-3 image array resulting from the matrix mult.
+    """
+    h = img.shape[0]
+    w = img.shape[1]
+    img2 = numpy.empty([h, w, 3], dtype=numpy.float32)
+    img2.reshape(w*h*3)[:] = (numpy.dot(img.reshape(h*w, 3), mat.T)
+                             ).reshape(w*h*3)[:]
+    return img2
+
+def get_image_patch(img, xnorm, ynorm, wnorm, hnorm):
+    """Get a patch (tile) of an image.
+
+    Args:
+        img: Numpy float image array, with pixel values in [0,1].
+        xnorm,ynorm,wnorm,hnorm: Normalized (in [0,1]) coords for the tile.
+
+    Returns:
+        Float image array of the patch.
+    """
+    hfull = img.shape[0]
+    wfull = img.shape[1]
+    xtile = math.ceil(xnorm * wfull)
+    ytile = math.ceil(ynorm * hfull)
+    wtile = math.floor(wnorm * wfull)
+    htile = math.floor(hnorm * hfull)
+    return img[ytile:ytile+htile,xtile:xtile+wtile,:].copy()
+
+def compute_image_means(img):
+    """Calculate the mean of each color channel in the image.
+
+    Args:
+        img: Numpy float image array, with pixel values in [0,1].
+
+    Returns:
+        A list of mean values, one per color channel in the image.
+    """
+    means = []
+    chans = img.shape[2]
+    for i in xrange(chans):
+        means.append(numpy.mean(img[:,:,i], dtype=numpy.float64))
+    return means
+
+def compute_image_variances(img):
+    """Calculate the variance of each color channel in the image.
+
+    Args:
+        img: Numpy float image array, with pixel values in [0,1].
+
+    Returns:
+        A list of mean values, one per color channel in the image.
+    """
+    variances = []
+    chans = img.shape[2]
+    for i in xrange(chans):
+        variances.append(numpy.var(img[:,:,i], dtype=numpy.float64))
+    return variances
+
+def write_image(img, fname, apply_gamma=False):
+    """Save a float-3 numpy array image to a file.
+
+    Supported formats: PNG, JPEG, and others; see PIL docs for more.
+
+    Image can be 3-channel, which is interpreted as RGB, or can be 1-channel,
+    which is greyscale.
+
+    Can optionally specify that the image should be gamma-encoded prior to
+    writing it out; this should be done if the image contains linear pixel
+    values, to make the image look "normal".
+
+    Args:
+        img: Numpy image array data.
+        fname: Path of file to save to; the extension specifies the format.
+        apply_gamma: (Optional) apply gamma to the image prior to writing it.
+    """
+    if apply_gamma:
+        img = apply_lut_to_image(img, DEFAULT_GAMMA_LUT)
+    (h, w, chans) = img.shape
+    if chans == 3:
+        Image.fromarray((img * 255.0).astype(numpy.uint8), "RGB").save(fname)
+    elif chans == 1:
+        img3 = (img * 255.0).astype(numpy.uint8).repeat(3).reshape(h,w,3)
+        Image.fromarray(img3, "RGB").save(fname)
+    else:
+        raise its.error.Error('Unsupported image type')
+
+def downscale_image(img, f):
+    """Shrink an image by a given integer factor.
+
+    This function computes output pixel values by averaging over rectangular
+    regions of the input image; it doesn't skip or sample pixels, and all input
+    image pixels are evenly weighted.
+
+    If the downscaling factor doesn't cleanly divide the width and/or height,
+    then the remaining pixels on the right or bottom edge are discarded prior
+    to the downscaling.
+
+    Args:
+        img: The input image as an ndarray.
+        f: The downscaling factor, which should be an integer.
+
+    Returns:
+        The new (downscaled) image, as an ndarray.
+    """
+    h,w,chans = img.shape
+    f = int(f)
+    assert(f >= 1)
+    h = (h/f)*f
+    w = (w/f)*f
+    img = img[0:h:,0:w:,::]
+    chs = []
+    for i in xrange(chans):
+        ch = img.reshape(h*w*chans)[i::chans].reshape(h,w)
+        ch = ch.reshape(h,w/f,f).mean(2).reshape(h,w/f)
+        ch = ch.T.reshape(w/f,h/f,f).mean(2).T.reshape(h/f,w/f)
+        chs.append(ch.reshape(h*w/(f*f)))
+    img = numpy.vstack(chs).T.reshape(h/f,w/f,chans)
+    return img
+
+def __get_color_checker_patch(img, xc,yc, patch_size):
+    r = patch_size/2
+    tile = img[yc-r:yc+r:, xc-r:xc+r:, ::]
+    return tile
+
+def __measure_color_checker_patch(img, xc,yc, patch_size):
+    tile = __get_color_checker_patch(img, xc,yc, patch_size)
+    means = tile.mean(1).mean(0)
+    return means
+
+def get_color_checker_chart_patches(img, debug_fname_prefix=None):
+    """Return the center coords of each patch in a color checker chart.
+
+    Assumptions:
+    * Chart is vertical or horizontal w.r.t. camera, but not diagonal.
+    * Chart is (roughly) planar-parallel to the camera.
+    * Chart is centered in frame (roughly).
+    * Around/behind chart is white/grey background.
+    * The only black pixels in the image are from the chart.
+    * Chart is 100% visible and contained within image.
+    * No other objects within image.
+    * Image is well-exposed.
+    * Standard color checker chart with standard-sized black borders.
+
+    The values returned are in the coordinate system of the chart; that is,
+    patch (0,0) is the brown patch that is in the chart's top-left corner when
+    it is in the normal upright/horizontal orientation. (The chart may be any
+    of the four main orientations in the image.)
+
+    Args:
+        img: Input image, as a numpy array with pixels in [0,1].
+        debug_fname_prefix: If not None, the (string) name of a file prefix to
+            use to save a number of debug images for visualizing the output of
+            this function; can be used to see if the patches are being found
+            successfully.
+
+    Returns:
+        6x4 list of lists of integer (x,y) coords of the center of each patch,
+        ordered in the "chart order" (6x4 row major).
+    """
+
+    # Shrink the original image.
+    DOWNSCALE_FACTOR = 4
+    img_small = downscale_image(img, DOWNSCALE_FACTOR)
+
+    # Make a threshold image, which is 1.0 where the image is black,
+    # and 0.0 elsewhere.
+    BLACK_PIXEL_THRESH = 0.2
+    mask_img = scipy.stats.threshold(
+                img_small.max(2), BLACK_PIXEL_THRESH, 1.1, 0.0)
+    mask_img = 1.0 - scipy.stats.threshold(mask_img, -0.1, 0.1, 1.0)
+
+    if debug_fname_prefix is not None:
+        h,w = mask_img.shape
+        write_image(img, debug_fname_prefix+"_0.jpg")
+        write_image(mask_img.repeat(3).reshape(h,w,3),
+                debug_fname_prefix+"_1.jpg")
+
+    # Mask image flattened to a single row or column (by averaging).
+    # Also apply a threshold to these arrays.
+    FLAT_PIXEL_THRESH = 0.05
+    flat_row = mask_img.mean(0)
+    flat_col = mask_img.mean(1)
+    flat_row = [0 if v < FLAT_PIXEL_THRESH else 1 for v in flat_row]
+    flat_col = [0 if v < FLAT_PIXEL_THRESH else 1 for v in flat_col]
+
+    # Start and end of the non-zero region of the flattened row/column.
+    flat_row_nonzero = [i for i in range(len(flat_row)) if flat_row[i]>0]
+    flat_col_nonzero = [i for i in range(len(flat_col)) if flat_col[i]>0]
+    flat_row_min, flat_row_max = min(flat_row_nonzero), max(flat_row_nonzero)
+    flat_col_min, flat_col_max = min(flat_col_nonzero), max(flat_col_nonzero)
+
+    # Orientation of chart, and number of grid cells horz. and vertically.
+    orient = "h" if flat_row_max-flat_row_min>flat_col_max-flat_col_min else "v"
+    xgrids = 6 if orient=="h" else 4
+    ygrids = 6 if orient=="v" else 4
+
+    # Get better bounds on the patches region, lopping off some of the excess
+    # black border.
+    HRZ_BORDER_PAD_FRAC = 0.0138
+    VERT_BORDER_PAD_FRAC = 0.0395
+    xpad = HRZ_BORDER_PAD_FRAC if orient=="h" else VERT_BORDER_PAD_FRAC
+    ypad = HRZ_BORDER_PAD_FRAC if orient=="v" else VERT_BORDER_PAD_FRAC
+    xchart = flat_row_min + (flat_row_max - flat_row_min) * xpad
+    ychart = flat_col_min + (flat_col_max - flat_col_min) * ypad
+    wchart = (flat_row_max - flat_row_min) * (1 - 2*xpad)
+    hchart = (flat_col_max - flat_col_min) * (1 - 2*ypad)
+
+    # Get the colors of the 4 corner patches, in clockwise order, by measuring
+    # the average value of a small patch at each of the 4 patch centers.
+    colors = []
+    centers = []
+    for (x,y) in [(0,0), (xgrids-1,0), (xgrids-1,ygrids-1), (0,ygrids-1)]:
+        xc = xchart + (x + 0.5)*wchart/xgrids
+        yc = ychart + (y + 0.5)*hchart/ygrids
+        xc = int(xc * DOWNSCALE_FACTOR + 0.5)
+        yc = int(yc * DOWNSCALE_FACTOR + 0.5)
+        centers.append((xc,yc))
+        chan_means = __measure_color_checker_patch(img, xc,yc, 32)
+        colors.append(sum(chan_means) / len(chan_means))
+
+    # The brightest corner is the white patch, the darkest is the black patch.
+    # The black patch should be counter-clockwise from the white patch.
+    white_patch_index = None
+    for i in range(4):
+        if colors[i] == max(colors) and \
+                colors[(i-1+4)%4] == min(colors):
+            white_patch_index = i%4
+    assert(white_patch_index is not None)
+
+    # Return the coords of the origin (top-left when the chart is in the normal
+    # upright orientation) patch's center, and the vector displacement to the
+    # center of the second patch on the first row of the chart (when in the
+    # normal upright orientation).
+    origin_index = (white_patch_index+1)%4
+    prev_index = (origin_index-1+4)%4
+    next_index = (origin_index+1)%4
+    origin_center = centers[origin_index]
+    prev_center = centers[prev_index]
+    next_center = centers[next_index]
+    vec_across = tuple([(next_center[i]-origin_center[i])/5.0 for i in [0,1]])
+    vec_down = tuple([(prev_center[i]-origin_center[i])/3.0 for i in [0,1]])
+
+    # Compute the center of each patch.
+    patches = [[],[],[],[]]
+    for yi in range(4):
+        for xi in range(6):
+            x0,y0 = origin_center
+            dxh,dyh = vec_across
+            dxv,dyv = vec_down
+            xc = int(x0 + dxh*xi + dxv*yi)
+            yc = int(y0 + dyh*xi + dyv*yi)
+            patches[yi].append((xc,yc))
+
+    # Sanity check: test that the R,G,B,black,white patches are correct.
+    sanity_failed = False
+    patch_info = [(2,2,[0]), # Red
+                  (2,1,[1]), # Green
+                  (2,0,[2]), # Blue
+                  (3,0,[0,1,2]), # White
+                  (3,5,[])] # Black
+    for i in range(len(patch_info)):
+        yi,xi,high_chans = patch_info[i]
+        low_chans = [i for i in [0,1,2] if i not in high_chans]
+        xc,yc = patches[yi][xi]
+        means = __measure_color_checker_patch(img, xc,yc, 64)
+        if (min([means[i] for i in high_chans]+[1]) < \
+                max([means[i] for i in low_chans]+[0])):
+            sanity_failed = True
+
+    if debug_fname_prefix is not None:
+        gridimg = numpy.zeros([4*(32+2), 6*(32+2), 3])
+        for yi in range(4):
+            for xi in range(6):
+                xc,yc = patches[yi][xi]
+                tile = __get_color_checker_patch(img, xc,yc, 32)
+                gridimg[yi*(32+2)+1:yi*(32+2)+1+32,
+                        xi*(32+2)+1:xi*(32+2)+1+32, :] = tile
+        write_image(gridimg, debug_fname_prefix+"_2.png")
+
+    assert(not sanity_failed)
+
+    return patches
+
+class __UnitTest(unittest.TestCase):
+    """Run a suite of unit tests on this module.
+    """
+
+    # TODO: Add more unit tests.
+
+    def test_apply_matrix_to_image(self):
+        """Unit test for apply_matrix_to_image.
+
+        Test by using a canned set of values on a 1x1 pixel image.
+
+            [ 1 2 3 ]   [ 0.1 ]   [ 1.4 ]
+            [ 4 5 6 ] * [ 0.2 ] = [ 3.2 ]
+            [ 7 8 9 ]   [ 0.3 ]   [ 5.0 ]
+               mat         x         y
+        """
+        mat = numpy.array([[1,2,3],[4,5,6],[7,8,9]])
+        x = numpy.array([0.1,0.2,0.3]).reshape(1,1,3)
+        y = apply_matrix_to_image(x, mat).reshape(3).tolist()
+        y_ref = [1.4,3.2,5.0]
+        passed = all([math.fabs(y[i] - y_ref[i]) < 0.001 for i in xrange(3)])
+        self.assertTrue(passed)
+
+    def test_apply_lut_to_image(self):
+        """ Unit test for apply_lut_to_image.
+
+        Test by using a canned set of values on a 1x1 pixel image. The LUT will
+        simply double the value of the index:
+
+            lut[x] = 2*x
+        """
+        lut = numpy.array([2*i for i in xrange(65536)])
+        x = numpy.array([0.1,0.2,0.3]).reshape(1,1,3)
+        y = apply_lut_to_image(x, lut).reshape(3).tolist()
+        y_ref = [0.2,0.4,0.6]
+        passed = all([math.fabs(y[i] - y_ref[i]) < 0.001 for i in xrange(3)])
+        self.assertTrue(passed)
+
+if __name__ == '__main__':
+    unittest.main()
+
diff --git a/apps/CameraITS/pymodules/its/objects.py b/apps/CameraITS/pymodules/its/objects.py
new file mode 100644
index 0000000..a531f3b
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/objects.py
@@ -0,0 +1,243 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import os.path
+import sys
+import re
+import json
+import tempfile
+import time
+import unittest
+import subprocess
+import math
+
+def int_to_rational(i):
+    """Function to convert Python integers to Camera2 rationals.
+
+    Args:
+        i: Python integer or list of integers.
+
+    Returns:
+        Python dictionary or list of dictionaries representing the given int(s)
+        as rationals with denominator=1.
+    """
+    if isinstance(i, list):
+        return [{"numerator":val, "denominator":1} for val in i]
+    else:
+        return {"numerator":i, "denominator":1}
+
+def float_to_rational(f, denom=128):
+    """Function to convert Python floats to Camera2 rationals.
+
+    Args:
+        f: Python float or list of floats.
+        denom: (Optonal) the denominator to use in the output rationals.
+
+    Returns:
+        Python dictionary or list of dictionaries representing the given
+        float(s) as rationals.
+    """
+    if isinstance(f, list):
+        return [{"numerator":math.floor(val*denom+0.5), "denominator":denom}
+                for val in f]
+    else:
+        return {"numerator":math.floor(f*denom+0.5), "denominator":denom}
+
+def rational_to_float(r):
+    """Function to convert Camera2 rational objects to Python floats.
+
+    Args:
+        r: Rational or list of rationals, as Python dictionaries.
+
+    Returns:
+        Float or list of floats.
+    """
+    if isinstance(r, list):
+        return [float(val["numerator"]) / float(val["denominator"])
+                for val in r]
+    else:
+        return float(r["numerator"]) / float(r["denominator"])
+
+def manual_capture_request(sensitivity, exp_time, linear_tonemap=False):
+    """Return a capture request with everything set to manual.
+
+    Uses identity/unit color correction, and the default tonemap curve.
+    Optionally, the tonemap can be specified as being linear.
+
+    Args:
+        sensitivity: The sensitivity value to populate the request with.
+        exp_time: The exposure time, in nanoseconds, to populate the request
+            with.
+        linear_tonemap: [Optional] whether a linear tonemap should be used
+            in this request.
+
+    Returns:
+        The default manual capture request, ready to be passed to the
+        its.device.do_capture function.
+    """
+    req = {
+        "android.control.captureIntent": 6,
+        "android.control.mode": 0,
+        "android.control.aeMode": 0,
+        "android.control.awbMode": 0,
+        "android.control.afMode": 0,
+        "android.control.effectMode": 0,
+        "android.sensor.frameDuration": 0,
+        "android.sensor.sensitivity": sensitivity,
+        "android.sensor.exposureTime": exp_time,
+        "android.colorCorrection.mode": 0,
+        "android.colorCorrection.transform":
+                int_to_rational([1,0,0, 0,1,0, 0,0,1]),
+        "android.colorCorrection.gains": [1,1,1,1],
+        "android.tonemap.mode": 1,
+        "android.shading.mode": 1
+        }
+    if linear_tonemap:
+        req["android.tonemap.mode"] = 0
+        req["android.tonemap.curveRed"] = [0.0,0.0, 1.0,1.0]
+        req["android.tonemap.curveGreen"] = [0.0,0.0, 1.0,1.0]
+        req["android.tonemap.curveBlue"] = [0.0,0.0, 1.0,1.0]
+    return req
+
+def auto_capture_request():
+    """Return a capture request with everything set to auto.
+    """
+    return {
+        "android.control.mode": 1,
+        "android.control.aeMode": 1,
+        "android.control.awbMode": 1,
+        "android.control.afMode": 1,
+        "android.colorCorrection.mode": 1,
+        "android.tonemap.mode": 1,
+        }
+
+def get_available_output_sizes(fmt, props):
+    """Return a sorted list of available output sizes for a given format.
+
+    Args:
+        fmt: the output format, as a string in ["jpg", "yuv", "raw"].
+        props: the object returned from its.device.get_camera_properties().
+
+    Returns:
+        A sorted list of (w,h) tuples (sorted large-to-small).
+    """
+    fmt_codes = {"raw":0x20, "raw10":0x25, "yuv":0x23, "jpg":0x100, "jpeg":0x100}
+    configs = props['android.scaler.streamConfigurationMap']\
+                   ['availableStreamConfigurations']
+    fmt_configs = [cfg for cfg in configs if cfg['format'] == fmt_codes[fmt]]
+    out_configs = [cfg for cfg in fmt_configs if cfg['input'] == False]
+    out_sizes = [(cfg['width'],cfg['height']) for cfg in out_configs]
+    out_sizes.sort(reverse=True)
+    return out_sizes
+
+def set_filter_off_or_fast_if_possible(props, req, available_modes, filter):
+    """ Check and set controlKey to off or fast in req
+
+    Args:
+        props: the object returned from its.device.get_camera_properties().
+        req: the input request.
+        available_modes: the key to check available modes.
+        filter: the filter key
+
+    Returns:
+        None. control_key will be set to OFF or FAST if possible.
+    """
+    if props.has_key(available_modes):
+        if 0 in props[available_modes]:
+            req[filter] = 0
+        elif 1 in props[available_modes]:
+            req[filter] = 1
+
+def get_fastest_manual_capture_settings(props):
+    """Return a capture request and format spec for the fastest capture.
+
+    Args:
+        props: the object returned from its.device.get_camera_properties().
+
+    Returns:
+        Two values, the first is a capture request, and the second is an output
+        format specification, for the fastest possible (legal) capture that
+        can be performed on this device (with the smallest output size).
+    """
+    fmt = "yuv"
+    size = get_available_output_sizes(fmt, props)[-1]
+    out_spec = {"format":fmt, "width":size[0], "height":size[1]}
+    s = min(props['android.sensor.info.sensitivityRange'])
+    e = min(props['android.sensor.info.exposureTimeRange'])
+    req = manual_capture_request(s,e)
+
+    set_filter_off_or_fast_if_possible(props, req,
+        "android.noiseReduction.availableNoiseReductionModes",
+        "android.noiseReduction.mode")
+    set_filter_off_or_fast_if_possible(props, req,
+        "android.colorCorrection.availableAberrationModes",
+        "android.colorCorrection.aberrationMode")
+    set_filter_off_or_fast_if_possible(props, req,
+        "android.hotPixel.availableHotPixelModes",
+        "android.hotPixel.mode")
+    set_filter_off_or_fast_if_possible(props, req,
+        "android.edge.availableEdgeModes",
+        "android.edge.mode")
+
+    return req, out_spec
+
+def get_max_digital_zoom(props):
+    """Returns the maximum amount of zooming possible by the camera device.
+
+    Args:
+        props: the object returned from its.device.get_camera_properties().
+
+    Return:
+        A float indicating the maximum amount of zooming possible by the
+        camera device.
+    """
+
+    maxz = 1.0
+
+    if props.has_key("android.scaler.availableMaxDigitalZoom"):
+        maxz = props["android.scaler.availableMaxDigitalZoom"]
+
+    return maxz
+
+
+class __UnitTest(unittest.TestCase):
+    """Run a suite of unit tests on this module.
+    """
+
+    def test_int_to_rational(self):
+        """Unit test for int_to_rational.
+        """
+        self.assertEqual(int_to_rational(10),
+                         {"numerator":10,"denominator":1})
+        self.assertEqual(int_to_rational([1,2]),
+                         [{"numerator":1,"denominator":1},
+                          {"numerator":2,"denominator":1}])
+
+    def test_float_to_rational(self):
+        """Unit test for float_to_rational.
+        """
+        self.assertEqual(float_to_rational(0.5001, 64),
+                        {"numerator":32, "denominator":64})
+
+    def test_rational_to_float(self):
+        """Unit test for rational_to_float.
+        """
+        self.assertTrue(
+                abs(rational_to_float({"numerator":32,"denominator":64})-0.5)
+                < 0.0001)
+
+if __name__ == '__main__':
+    unittest.main()
+
diff --git a/apps/CameraITS/pymodules/its/target.py b/apps/CameraITS/pymodules/its/target.py
new file mode 100644
index 0000000..3715f34
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/target.py
@@ -0,0 +1,266 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.device
+import its.image
+import its.objects
+import os
+import os.path
+import sys
+import json
+import unittest
+import json
+
+CACHE_FILENAME = "its.target.cfg"
+
+def __do_target_exposure_measurement(its_session):
+    """Use device 3A and captured shots to determine scene exposure.
+
+    Creates a new ITS device session (so this function should not be called
+    while another session to the device is open).
+
+    Assumes that the camera is pointed at a scene that is reasonably uniform
+    and reasonably lit -- that is, an appropriate target for running the ITS
+    tests that assume such uniformity.
+
+    Measures the scene using device 3A and then by taking a shot to hone in on
+    the exact exposure level that will result in a center 10% by 10% patch of
+    the scene having a intensity level of 0.5 (in the pixel range of [0,1])
+    when a linear tonemap is used. That is, the pixels coming off the sensor
+    should be at approximately 50% intensity (however note that it's actually
+    the luma value in the YUV image that is being targeted to 50%).
+
+    The computed exposure value is the product of the sensitivity (ISO) and
+    exposure time (ns) to achieve that sensor exposure level.
+
+    Args:
+        its_session: Holds an open device session.
+
+    Returns:
+        The measured product of sensitivity and exposure time that results in
+            the luma channel of captured shots having an intensity of 0.5.
+    """
+    print "Measuring target exposure"
+
+    # Get AE+AWB lock first, so the auto values in the capture result are
+    # populated properly.
+    r = [[0.45, 0.45, 0.1, 0.1, 1]]
+    sens, exp_time, gains, xform, _ \
+            = its_session.do_3a(r,r,r,do_af=False,get_results=True)
+
+    # Convert the transform to rational.
+    xform_rat = [{"numerator":int(100*x),"denominator":100} for x in xform]
+
+    # Linear tonemap
+    tmap = sum([[i/63.0,i/63.0] for i in range(64)], [])
+
+    # Capture a manual shot with this exposure, using a linear tonemap.
+    # Use the gains+transform returned by the AWB pass.
+    req = its.objects.manual_capture_request(sens, exp_time)
+    req["android.tonemap.mode"] = 0
+    req["android.tonemap.curveRed"] = tmap
+    req["android.tonemap.curveGreen"] = tmap
+    req["android.tonemap.curveBlue"] = tmap
+    req["android.colorCorrection.transform"] = xform_rat
+    req["android.colorCorrection.gains"] = gains
+    cap = its_session.do_capture(req)
+
+    # Compute the mean luma of a center patch.
+    yimg,uimg,vimg = its.image.convert_capture_to_planes(cap)
+    tile = its.image.get_image_patch(yimg, 0.45, 0.45, 0.1, 0.1)
+    luma_mean = its.image.compute_image_means(tile)
+
+    # Compute the exposure value that would result in a luma of 0.5.
+    return sens * exp_time * 0.5 / luma_mean[0]
+
+def __set_cached_target_exposure(exposure):
+    """Saves the given exposure value to a cached location.
+
+    Once a value is cached, a call to __get_cached_target_exposure will return
+    the value, even from a subsequent test/script run. That is, the value is
+    persisted.
+
+    The value is persisted in a JSON file in the current directory (from which
+    the script calling this function is run).
+
+    Args:
+        exposure: The value to cache.
+    """
+    print "Setting cached target exposure"
+    with open(CACHE_FILENAME, "w") as f:
+        f.write(json.dumps({"exposure":exposure}))
+
+def __get_cached_target_exposure():
+    """Get the cached exposure value.
+
+    Returns:
+        The cached exposure value, or None if there is no valid cached value.
+    """
+    try:
+        with open(CACHE_FILENAME, "r") as f:
+            o = json.load(f)
+            return o["exposure"]
+    except:
+        return None
+
+def clear_cached_target_exposure():
+    """If there is a cached exposure value, clear it.
+    """
+    if os.path.isfile(CACHE_FILENAME):
+        os.remove(CACHE_FILENAME)
+
+def set_hardcoded_exposure(exposure):
+    """Set a hard-coded exposure value, rather than relying on measurements.
+
+    The exposure value is the product of sensitivity (ISO) and eposure time
+    (ns) that will result in a center-patch luma value of 0.5 (using a linear
+    tonemap) for the scene that the camera is pointing at.
+
+    If bringing up a new HAL implementation and the ability use the device to
+    measure the scene isn't there yet (e.g. device 3A doesn't work), then a
+    cache file of the appropriate name can be manually created and populated
+    with a hard-coded value using this function.
+
+    Args:
+        exposure: The hard-coded exposure value to set.
+    """
+    __set_cached_target_exposure(exposure)
+
+def get_target_exposure(its_session=None):
+    """Get the target exposure to use.
+
+    If there is a cached value and if the "target" command line parameter is
+    present, then return the cached value. Otherwise, measure a new value from
+    the scene, cache it, then return it.
+
+    Args:
+        its_session: Optional, holding an open device session.
+
+    Returns:
+        The target exposure value.
+    """
+    cached_exposure = None
+    for s in sys.argv[1:]:
+        if s == "target":
+            cached_exposure = __get_cached_target_exposure()
+    if cached_exposure is not None:
+        print "Using cached target exposure"
+        return cached_exposure
+    if its_session is None:
+        with its.device.ItsSession() as cam:
+            measured_exposure = __do_target_exposure_measurement(cam)
+    else:
+        measured_exposure = __do_target_exposure_measurement(its_session)
+    __set_cached_target_exposure(measured_exposure)
+    return measured_exposure
+
+def get_target_exposure_combos(its_session=None):
+    """Get a set of legal combinations of target (exposure time, sensitivity).
+
+    Gets the target exposure value, which is a product of sensitivity (ISO) and
+    exposure time, and returns equivalent tuples of (exposure time,sensitivity)
+    that are all legal and that correspond to the four extrema in this 2D param
+    space, as well as to two "middle" points.
+
+    Will open a device session if its_session is None.
+
+    Args:
+        its_session: Optional, holding an open device session.
+
+    Returns:
+        Object containing six legal (exposure time, sensitivity) tuples, keyed
+        by the following strings:
+            "minExposureTime"
+            "midExposureTime"
+            "maxExposureTime"
+            "minSensitivity"
+            "midSensitivity"
+            "maxSensitivity
+    """
+    if its_session is None:
+        with its.device.ItsSession() as cam:
+            exposure = get_target_exposure(cam)
+            props = cam.get_camera_properties()
+    else:
+        exposure = get_target_exposure(its_session)
+        props = its_session.get_camera_properties()
+
+    sens_range = props['android.sensor.info.sensitivityRange']
+    exp_time_range = props['android.sensor.info.exposureTimeRange']
+
+    # Combo 1: smallest legal exposure time.
+    e1_expt = exp_time_range[0]
+    e1_sens = exposure / e1_expt
+    if e1_sens > sens_range[1]:
+        e1_sens = sens_range[1]
+        e1_expt = exposure / e1_sens
+
+    # Combo 2: largest legal exposure time.
+    e2_expt = exp_time_range[1]
+    e2_sens = exposure / e2_expt
+    if e2_sens < sens_range[0]:
+        e2_sens = sens_range[0]
+        e2_expt = exposure / e2_sens
+
+    # Combo 3: smallest legal sensitivity.
+    e3_sens = sens_range[0]
+    e3_expt = exposure / e3_sens
+    if e3_expt > exp_time_range[1]:
+        e3_expt = exp_time_range[1]
+        e3_sens = exposure / e3_expt
+
+    # Combo 4: largest legal sensitivity.
+    e4_sens = sens_range[1]
+    e4_expt = exposure / e4_sens
+    if e4_expt < exp_time_range[0]:
+        e4_expt = exp_time_range[0]
+        e4_sens = exposure / e4_expt
+
+    # Combo 5: middle exposure time.
+    e5_expt = (exp_time_range[0] + exp_time_range[1]) / 2.0
+    e5_sens = exposure / e5_expt
+    if e5_sens > sens_range[1]:
+        e5_sens = sens_range[1]
+        e5_expt = exposure / e5_sens
+    if e5_sens < sens_range[0]:
+        e5_sens = sens_range[0]
+        e5_expt = exposure / e5_sens
+
+    # Combo 6: middle sensitivity.
+    e6_sens = (sens_range[0] + sens_range[1]) / 2.0
+    e6_expt = exposure / e6_sens
+    if e6_expt > exp_time_range[1]:
+        e6_expt = exp_time_range[1]
+        e6_sens = exposure / e6_expt
+    if e6_expt < exp_time_range[0]:
+        e6_expt = exp_time_range[0]
+        e6_sens = exposure / e6_expt
+
+    return {
+        "minExposureTime" : (int(e1_expt), int(e1_sens)),
+        "maxExposureTime" : (int(e2_expt), int(e2_sens)),
+        "minSensitivity" : (int(e3_expt), int(e3_sens)),
+        "maxSensitivity" : (int(e4_expt), int(e4_sens)),
+        "midExposureTime" : (int(e5_expt), int(e5_sens)),
+        "midSensitivity" : (int(e6_expt), int(e6_sens))
+        }
+
+class __UnitTest(unittest.TestCase):
+    """Run a suite of unit tests on this module.
+    """
+    # TODO: Add some unit tests.
+
+if __name__ == '__main__':
+    unittest.main()
+
diff --git a/apps/CameraITS/tests/dng_noise_model/DngNoiseModel.pdf b/apps/CameraITS/tests/dng_noise_model/DngNoiseModel.pdf
new file mode 100644
index 0000000..01389fa
--- /dev/null
+++ b/apps/CameraITS/tests/dng_noise_model/DngNoiseModel.pdf
Binary files differ
diff --git a/apps/CameraITS/tests/dng_noise_model/dng_noise_model.py b/apps/CameraITS/tests/dng_noise_model/dng_noise_model.py
new file mode 100644
index 0000000..19b6c92
--- /dev/null
+++ b/apps/CameraITS/tests/dng_noise_model/dng_noise_model.py
@@ -0,0 +1,187 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.device
+import its.objects
+import its.image
+import pprint
+import pylab
+import os.path
+import matplotlib
+import matplotlib.pyplot
+import numpy
+import math
+
+def main():
+    """Compute the DNG noise model from a color checker chart.
+
+    TODO: Make this more robust; some manual futzing may be needed.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    with its.device.ItsSession() as cam:
+
+        props = cam.get_camera_properties()
+
+        white_level = float(props['android.sensor.info.whiteLevel'])
+        black_levels = props['android.sensor.blackLevelPattern']
+        idxs = its.image.get_canonical_cfa_order(props)
+        black_levels = [black_levels[i] for i in idxs]
+
+        # Expose for the scene with min sensitivity
+        sens_min, sens_max = props['android.sensor.info.sensitivityRange']
+        s_ae,e_ae,awb_gains,awb_ccm,_  = cam.do_3a(get_results=True)
+        s_e_prod = s_ae * e_ae
+
+        # Make the image brighter since the script looks at linear Bayer
+        # raw patches rather than gamma-encoded YUV patches (and the AE
+        # probably under-exposes a little for this use-case).
+        s_e_prod *= 2
+
+        # Capture raw frames across the full sensitivity range.
+        NUM_SENS_STEPS = 9
+        sens_step = int((sens_max - sens_min - 1) / float(NUM_SENS_STEPS))
+        reqs = []
+        sens = []
+        for s in range(sens_min, sens_max, sens_step):
+            e = int(s_e_prod / float(s))
+            req = its.objects.manual_capture_request(s, e)
+            req["android.colorCorrection.transform"] = \
+                    its.objects.float_to_rational(awb_ccm)
+            req["android.colorCorrection.gains"] = awb_gains
+            reqs.append(req)
+            sens.append(s)
+
+        caps = cam.do_capture(reqs, cam.CAP_RAW)
+
+        # A list of the (x,y) coords of the center pixel of a collection of
+        # patches of a color checker chart. Each patch should be uniform,
+        # however the actual color doesn't matter. Note that the coords are
+        # relative to the *converted* RGB image, which is 1/2 x 1/2 of the
+        # full size; convert back to full.
+        img = its.image.convert_capture_to_rgb_image(caps[0], props=props)
+        patches = its.image.get_color_checker_chart_patches(img, NAME+"_debug")
+        patches = [(2*x,2*y) for (x,y) in sum(patches,[])]
+
+        lines = []
+        for iouter, (s,cap) in enumerate(zip(sens,caps)):
+            # For each capture, compute the mean value in each patch, for each
+            # Bayer plane; discard patches where pixels are close to clamped.
+            # Also compute the variance.
+            CLAMP_THRESH = 0.2
+            planes = its.image.convert_capture_to_planes(cap, props)
+            points = []
+            for i,plane in enumerate(planes):
+                plane = (plane * white_level - black_levels[i]) / (
+                        white_level - black_levels[i])
+                for j,(x,y) in enumerate(patches):
+                    tile = plane[y/2-16:y/2+16:,x/2-16:x/2+16:,::]
+                    mean = its.image.compute_image_means(tile)[0]
+                    var = its.image.compute_image_variances(tile)[0]
+                    if (mean > CLAMP_THRESH and mean < 1.0-CLAMP_THRESH):
+                        # Each point is a (mean,variance) tuple for a patch;
+                        # for a given ISO, there should be a linear
+                        # relationship between these values.
+                        points.append((mean,var))
+
+            # Fit a line to the points, with a line equation: y = mx + b.
+            # This line is the relationship between mean and variance (i.e.)
+            # between signal level and noise, for this particular sensor.
+            # In the DNG noise model, the gradient (m) is "S", and the offset
+            # (b) is "O".
+            points.sort()
+            xs = [x for (x,y) in points]
+            ys = [y for (x,y) in points]
+            m,b = numpy.polyfit(xs, ys, 1)
+            lines.append((s,m,b))
+            print s, "->", m, b
+
+            # TODO: Clean up these checks (which currently fail in some cases).
+            # Some sanity checks:
+            # * Noise levels should increase with brightness.
+            # * Extrapolating to a black image, the noise should be positive.
+            # Basically, the "b" value should correspond to the read noise,
+            # which is the noise level if the sensor was operating in zero
+            # light.
+            #assert(m > 0)
+            #assert(b >= 0)
+
+            if iouter == 0:
+                pylab.plot(xs, ys, 'r', label="Measured")
+                pylab.plot([0,xs[-1]],[b,m*xs[-1]+b],'b', label="Fit")
+            else:
+                pylab.plot(xs, ys, 'r')
+                pylab.plot([0,xs[-1]],[b,m*xs[-1]+b],'b')
+
+        pylab.xlabel("Mean")
+        pylab.ylabel("Variance")
+        pylab.legend()
+        matplotlib.pyplot.savefig("%s_plot_mean_vs_variance.png" % (NAME))
+
+        # Now fit a line across the (m,b) line parameters for each sensitivity.
+        # The gradient (m) params are fit to the "S" line, and the offset (b)
+        # params are fit to the "O" line, both as a function of sensitivity.
+        gains = [d[0] for d in lines]
+        Ss = [d[1] for d in lines]
+        Os = [d[2] for d in lines]
+        mS,bS = numpy.polyfit(gains, Ss, 1)
+        mO,bO = numpy.polyfit(gains, Os, 1)
+
+        # Plot curve "O" as 10x, so it fits in the same scale as curve "S".
+        fig = matplotlib.pyplot.figure()
+        pylab.plot(gains, [10*o for o in Os], 'r', label="Measured")
+        pylab.plot([gains[0],gains[-1]],
+                [10*mO*gains[0]+10*bO, 10*mO*gains[-1]+10*bO],'r--',label="Fit")
+        pylab.plot(gains, Ss, 'b', label="Measured")
+        pylab.plot([gains[0],gains[-1]], [mS*gains[0]+bS,mS*gains[-1]+bS],'b--',
+                label="Fit")
+        pylab.xlabel("Sensitivity")
+        pylab.ylabel("Model parameter: S (blue), O x10 (red)")
+        pylab.legend()
+        matplotlib.pyplot.savefig("%s_plot_S_O.png" % (NAME))
+
+        print """
+        /* Generated test code to dump a table of data for external validation
+         * of the noise model parameters.
+         */
+        #include <stdio.h>
+        #include <assert.h>
+        double compute_noise_model_entry_S(int sens);
+        double compute_noise_model_entry_O(int sens);
+        int main(void) {
+            int sens;
+            for (sens = %d; sens <= %d; sens += 100) {
+                double o = compute_noise_model_entry_O(sens);
+                double s = compute_noise_model_entry_S(sens);
+                printf("%%d,%%lf,%%lf\\n", sens, o, s);
+            }
+            return 0;
+        }
+
+        /* Generated functions to map a given sensitivity to the O and S noise
+         * model parameters in the DNG noise model.
+         */
+        double compute_noise_model_entry_S(int sens) {
+            double s = %e * sens + %e;
+            return s < 0.0 ? 0.0 : s;
+        }
+        double compute_noise_model_entry_O(int sens) {
+            double o = %e * sens + %e;
+            return o < 0.0 ? 0.0 : o;
+        }
+        """%(sens_min,sens_max,mS,bS,mO,bO)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/inprog/scene2/README b/apps/CameraITS/tests/inprog/scene2/README
new file mode 100644
index 0000000..3a0953f
--- /dev/null
+++ b/apps/CameraITS/tests/inprog/scene2/README
@@ -0,0 +1,8 @@
+Scene 2 requires a camera lab with controlled illuminants, for example
+light sources capable of producing D65, D50, A, TL84, etc. illumination.
+Specific charts may also be required, for example grey cards, color
+checker charts, and resolution charts. The individual tests will specify
+the setup that they require.
+
+If a test requires that the camera be in any particular orientaion, it will
+specify this too. Otherwise, the camara can be in either portrait or lanscape.
diff --git a/apps/CameraITS/tests/inprog/scene2/test_dng_tags.py b/apps/CameraITS/tests/inprog/scene2/test_dng_tags.py
new file mode 100644
index 0000000..0c96ca7
--- /dev/null
+++ b/apps/CameraITS/tests/inprog/scene2/test_dng_tags.py
@@ -0,0 +1,94 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.dng
+import its.objects
+import numpy
+import os.path
+
+def main():
+    """Test that the DNG tags are internally self-consistent.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+
+        # Assumes that illuminant 1 is D65, and illuminant 2 is standard A.
+        # TODO: Generalize DNG tags check for any provided illuminants.
+        illum_code = [21, 17] # D65, A
+        illum_str = ['D65', 'A']
+        ref_str = ['android.sensor.referenceIlluminant%d'%(i) for i in [1,2]]
+        cm_str = ['android.sensor.colorTransform%d'%(i) for i in [1,2]]
+        fm_str = ['android.sensor.forwardMatrix%d'%(i) for i in [1,2]]
+        cal_str = ['android.sensor.calibrationTransform%d'%(i) for i in [1,2]]
+        dng_illum = [its.dng.D65, its.dng.A]
+
+        for i in [0,1]:
+            assert(props[ref_str[i]] == illum_code[i])
+            raw_input("\n[Point camera at grey card under %s and press ENTER]"%(
+                    illum_str[i]))
+
+            cam.do_3a(do_af=False)
+            cap = cam.do_capture(its.objects.auto_capture_request())
+            gains = cap["metadata"]["android.colorCorrection.gains"]
+            ccm = its.objects.rational_to_float(
+                    cap["metadata"]["android.colorCorrection.transform"])
+            cal = its.objects.rational_to_float(props[cal_str[i]])
+            print "HAL reported gains:\n", numpy.array(gains)
+            print "HAL reported ccm:\n", numpy.array(ccm).reshape(3,3)
+            print "HAL reported cal:\n", numpy.array(cal).reshape(3,3)
+
+            # Dump the image.
+            img = its.image.convert_capture_to_rgb_image(cap)
+            its.image.write_image(img, "%s_%s.jpg" % (NAME, illum_str[i]))
+
+            # Compute the matrices that are expected under this illuminant from
+            # the HAL-reported WB gains, CCM, and calibration matrix.
+            cm, fm = its.dng.compute_cm_fm(dng_illum[i], gains, ccm, cal)
+            asn = its.dng.compute_asn(dng_illum[i], cal, cm)
+            print "Expected ColorMatrix:\n", cm
+            print "Expected ForwardMatrix:\n", fm
+            print "Expected AsShotNeutral:\n", asn
+
+            # Get the matrices that are reported by the HAL for this
+            # illuminant.
+            cm_ref = numpy.array(its.objects.rational_to_float(
+                    props[cm_str[i]])).reshape(3,3)
+            fm_ref = numpy.array(its.objects.rational_to_float(
+                    props[fm_str[i]])).reshape(3,3)
+            asn_ref = numpy.array(its.objects.rational_to_float(
+                    cap['metadata']['android.sensor.neutralColorPoint']))
+            print "Reported ColorMatrix:\n", cm_ref
+            print "Reported ForwardMatrix:\n", fm_ref
+            print "Reported AsShotNeutral:\n", asn_ref
+
+            # The color matrix may be scaled (between the reported and
+            # expected values).
+            cm_scale = cm.mean(1).mean(0) / cm_ref.mean(1).mean(0)
+            print "ColorMatrix scale factor:", cm_scale
+
+            # Compute the deltas between reported and expected.
+            print "Ratios in ColorMatrix:\n", cm / cm_ref
+            print "Deltas in ColorMatrix (after normalizing):\n", cm/cm_scale - cm_ref
+            print "Deltas in ForwardMatrix:\n", fm - fm_ref
+            print "Deltas in AsShotNeutral:\n", asn - asn_ref
+
+            # TODO: Add pass/fail test on DNG matrices.
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/inprog/test_3a_remote.py b/apps/CameraITS/tests/inprog/test_3a_remote.py
new file mode 100644
index 0000000..c76ff6d
--- /dev/null
+++ b/apps/CameraITS/tests/inprog/test_3a_remote.py
@@ -0,0 +1,70 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.objects
+import os.path
+import pprint
+import math
+import numpy
+import matplotlib.pyplot
+import mpl_toolkits.mplot3d
+
+def main():
+    """Run 3A remotely (from this script).
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        w_map = props["android.lens.info.shadingMapSize"]["width"]
+        h_map = props["android.lens.info.shadingMapSize"]["height"]
+
+        # TODO: Test for 3A convergence, and exit this test once converged.
+
+        triggered = False
+        while True:
+            req = its.objects.auto_capture_request()
+            req["android.statistics.lensShadingMapMode"] = 1
+            req['android.control.aePrecaptureTrigger'] = (0 if triggered else 1)
+            req['android.control.afTrigger'] = (0 if triggered else 1)
+            triggered = True
+
+            cap = cam.do_capture(req)
+
+            ae_state = cap["metadata"]["android.control.aeState"]
+            awb_state = cap["metadata"]["android.control.awbState"]
+            af_state = cap["metadata"]["android.control.afState"]
+            gains = cap["metadata"]["android.colorCorrection.gains"]
+            transform = cap["metadata"]["android.colorCorrection.transform"]
+            exp_time = cap["metadata"]['android.sensor.exposureTime']
+            lsc_map = cap["metadata"]["android.statistics.lensShadingMap"]
+            foc_dist = cap["metadata"]['android.lens.focusDistance']
+            foc_range = cap["metadata"]['android.lens.focusRange']
+
+            print "States (AE,AWB,AF):", ae_state, awb_state, af_state
+            print "Gains:", gains
+            print "Transform:", [its.objects.rational_to_float(t)
+                                 for t in transform]
+            print "AE region:", cap["metadata"]['android.control.aeRegions']
+            print "AF region:", cap["metadata"]['android.control.afRegions']
+            print "AWB region:", cap["metadata"]['android.control.awbRegions']
+            print "LSC map:", w_map, h_map, lsc_map[:8]
+            print "Focus (dist,range):", foc_dist, foc_range
+            print ""
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/inprog/test_black_level.py b/apps/CameraITS/tests/inprog/test_black_level.py
new file mode 100644
index 0000000..37dab94
--- /dev/null
+++ b/apps/CameraITS/tests/inprog/test_black_level.py
@@ -0,0 +1,99 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.objects
+import pylab
+import os.path
+import matplotlib
+import matplotlib.pyplot
+import numpy
+
+def main():
+    """Black level consistence test.
+
+    Test: capture dark frames and check if black level correction is done
+    correctly.
+    1. Black level should be roughly consistent for repeating shots.
+    2. Noise distribution should be roughly centered at black level.
+
+    Shoot with the camera covered (i.e.) dark/black. The test varies the
+    sensitivity parameter.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    NUM_REPEAT = 3
+    NUM_STEPS = 3
+
+    # Only check the center part where LSC has little effects.
+    R = 200
+
+    # The most frequent pixel value in each image; assume this is the black
+    # level, since the images are all dark (shot with the lens covered).
+    ymodes = []
+    umodes = []
+    vmodes = []
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        sens_range = props['android.sensor.info.sensitivityRange']
+        sens_step = (sens_range[1] - sens_range[0]) / float(NUM_STEPS-1)
+        sensitivities = [sens_range[0] + i*sens_step for i in range(NUM_STEPS)]
+        print "Sensitivities:", sensitivities
+
+        for si, s in enumerate(sensitivities):
+            for rep in xrange(NUM_REPEAT):
+                req = its.objects.manual_capture_request(100, 1*1000*1000)
+                req["android.blackLevel.lock"] = True
+                req["android.sensor.sensitivity"] = s
+                cap = cam.do_capture(req)
+                yimg,uimg,vimg = its.image.convert_capture_to_planes(cap)
+                w = cap["width"]
+                h = cap["height"]
+
+                # Magnify the noise in saved images to help visualize.
+                its.image.write_image(yimg * 2,
+                                      "%s_s=%05d_y.jpg" % (NAME, s), True)
+                its.image.write_image(numpy.absolute(uimg - 0.5) * 2,
+                                      "%s_s=%05d_u.jpg" % (NAME, s), True)
+
+                yimg = yimg[w/2-R:w/2+R, h/2-R:h/2+R]
+                uimg = uimg[w/4-R/2:w/4+R/2, w/4-R/2:w/4+R/2]
+                vimg = vimg[w/4-R/2:w/4+R/2, w/4-R/2:w/4+R/2]
+                yhist,_ = numpy.histogram(yimg*255, 256, (0,256))
+                ymodes.append(numpy.argmax(yhist))
+                uhist,_ = numpy.histogram(uimg*255, 256, (0,256))
+                umodes.append(numpy.argmax(uhist))
+                vhist,_ = numpy.histogram(vimg*255, 256, (0,256))
+                vmodes.append(numpy.argmax(vhist))
+
+                # Take 32 bins from Y, U, and V.
+                # Histograms of U and V are cropped at the center of 128.
+                pylab.plot(range(32), yhist.tolist()[0:32], 'rgb'[si])
+                pylab.plot(range(32), uhist.tolist()[112:144], 'rgb'[si]+'--')
+                pylab.plot(range(32), vhist.tolist()[112:144], 'rgb'[si]+'--')
+
+    pylab.xlabel("DN: Y[0:32], U[112:144], V[112:144]")
+    pylab.ylabel("Pixel count")
+    pylab.title("Histograms for different sensitivities")
+    matplotlib.pyplot.savefig("%s_plot_histograms.png" % (NAME))
+
+    print "Y black levels:", ymodes
+    print "U black levels:", umodes
+    print "V black levels:", vmodes
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/inprog/test_blc_lsc.py b/apps/CameraITS/tests/inprog/test_blc_lsc.py
new file mode 100644
index 0000000..ce120a2
--- /dev/null
+++ b/apps/CameraITS/tests/inprog/test_blc_lsc.py
@@ -0,0 +1,106 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.objects
+import pylab
+import os.path
+import matplotlib
+import matplotlib.pyplot
+
+def main():
+    """Test that BLC and LSC look reasonable.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    r_means_center = []
+    g_means_center = []
+    b_means_center = []
+    r_means_corner = []
+    g_means_corner = []
+    b_means_corner = []
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        expt_range = props['android.sensor.info.exposureTimeRange']
+
+        # Get AE+AWB lock first, so the auto values in the capture result are
+        # populated properly.
+        r = [[0,0,1,1,1]]
+        ae_sen,ae_exp,awb_gains,awb_transform,_ \
+                = cam.do_3a(r,r,r,do_af=False,get_results=True)
+        print "AE:", ae_sen, ae_exp / 1000000.0
+        print "AWB:", awb_gains, awb_transform
+
+        # Set analog gain (sensitivity) to 800
+        ae_exp = ae_exp * ae_sen / 800
+        ae_sen = 800
+
+        # Capture range of exposures from 1/100x to 4x of AE estimate.
+        exposures = [ae_exp*x/100.0 for x in [1]+range(10,401,40)]
+        exposures = [e for e in exposures
+                     if e >= expt_range[0] and e <= expt_range[1]]
+
+        # Convert the transform back to rational.
+        awb_transform_rat = its.objects.float_to_rational(awb_transform)
+
+        # Linear tonemap
+        tmap = sum([[i/63.0,i/63.0] for i in range(64)], [])
+
+        reqs = []
+        for e in exposures:
+            req = its.objects.manual_capture_request(ae_sen,e)
+            req["android.tonemap.mode"] = 0
+            req["android.tonemap.curveRed"] = tmap
+            req["android.tonemap.curveGreen"] = tmap
+            req["android.tonemap.curveBlue"] = tmap
+            req["android.colorCorrection.transform"] = awb_transform_rat
+            req["android.colorCorrection.gains"] = awb_gains
+            reqs.append(req)
+
+        caps = cam.do_capture(reqs)
+        for i,cap in enumerate(caps):
+            img = its.image.convert_capture_to_rgb_image(cap)
+            its.image.write_image(img, "%s_i=%d.jpg"%(NAME, i))
+
+            tile_center = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+            rgb_means = its.image.compute_image_means(tile_center)
+            r_means_center.append(rgb_means[0])
+            g_means_center.append(rgb_means[1])
+            b_means_center.append(rgb_means[2])
+
+            tile_corner = its.image.get_image_patch(img, 0.0, 0.0, 0.1, 0.1)
+            rgb_means = its.image.compute_image_means(tile_corner)
+            r_means_corner.append(rgb_means[0])
+            g_means_corner.append(rgb_means[1])
+            b_means_corner.append(rgb_means[2])
+
+    fig = matplotlib.pyplot.figure()
+    pylab.plot(exposures, r_means_center, 'r')
+    pylab.plot(exposures, g_means_center, 'g')
+    pylab.plot(exposures, b_means_center, 'b')
+    pylab.ylim([0,1])
+    matplotlib.pyplot.savefig("%s_plot_means_center.png" % (NAME))
+
+    fig = matplotlib.pyplot.figure()
+    pylab.plot(exposures, r_means_corner, 'r')
+    pylab.plot(exposures, g_means_corner, 'g')
+    pylab.plot(exposures, b_means_corner, 'b')
+    pylab.ylim([0,1])
+    matplotlib.pyplot.savefig("%s_plot_means_corner.png" % (NAME))
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/inprog/test_burst_sameness_auto.py b/apps/CameraITS/tests/inprog/test_burst_sameness_auto.py
new file mode 100644
index 0000000..fdf72be
--- /dev/null
+++ b/apps/CameraITS/tests/inprog/test_burst_sameness_auto.py
@@ -0,0 +1,91 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import os.path
+import numpy
+
+def main():
+    """Take long bursts of images and check that they're all identical.
+
+    Assumes a static scene. Can be used to idenfity if there are sporadic
+    frames that are processed differently or have artifacts, or if 3A isn't
+    stable, since this test converges 3A at the start but doesn't lock 3A
+    throughout capture.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    BURST_LEN = 50
+    BURSTS = 5
+    FRAMES = BURST_LEN * BURSTS
+
+    SPREAD_THRESH = 0.03
+
+    with its.device.ItsSession() as cam:
+
+        # Capture at the smallest resolution.
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.manual_sensor(props))
+
+        _, fmt = its.objects.get_fastest_manual_capture_settings(props)
+        w,h = fmt["width"], fmt["height"]
+
+        # Converge 3A prior to capture.
+        cam.do_3a(lock_ae=True, lock_awb=True)
+
+        # After 3A has converged, lock AE+AWB for the duration of the test.
+        req = its.objects.auto_capture_request()
+        req["android.blackLevel.lock"] = True
+        req["android.control.awbLock"] = True
+        req["android.control.aeLock"] = True
+
+        # Capture bursts of YUV shots.
+        # Get the mean values of a center patch for each.
+        # Also build a 4D array, which is an array of all RGB images.
+        r_means = []
+        g_means = []
+        b_means = []
+        imgs = numpy.empty([FRAMES,h,w,3])
+        for j in range(BURSTS):
+            caps = cam.do_capture([req]*BURST_LEN, [fmt])
+            for i,cap in enumerate(caps):
+                n = j*BURST_LEN + i
+                imgs[n] = its.image.convert_capture_to_rgb_image(cap)
+                tile = its.image.get_image_patch(imgs[n], 0.45, 0.45, 0.1, 0.1)
+                means = its.image.compute_image_means(tile)
+                r_means.append(means[0])
+                g_means.append(means[1])
+                b_means.append(means[2])
+
+        # Dump all images.
+        print "Dumping images"
+        for i in range(FRAMES):
+            its.image.write_image(imgs[i], "%s_frame%03d.jpg"%(NAME,i))
+
+        # The mean image.
+        img_mean = imgs.mean(0)
+        its.image.write_image(img_mean, "%s_mean.jpg"%(NAME))
+
+        # Pass/fail based on center patch similarity.
+        for means in [r_means, g_means, b_means]:
+            spread = max(means) - min(means)
+            print spread
+            assert(spread < SPREAD_THRESH)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/inprog/test_burst_sameness_fullres_auto.py b/apps/CameraITS/tests/inprog/test_burst_sameness_fullres_auto.py
new file mode 100644
index 0000000..a8d1d45
--- /dev/null
+++ b/apps/CameraITS/tests/inprog/test_burst_sameness_fullres_auto.py
@@ -0,0 +1,91 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.objects
+import os.path
+import numpy
+import pylab
+import matplotlib
+import matplotlib.pyplot
+
+def main():
+    """Take long bursts of images and check that they're all identical.
+
+    Assumes a static scene. Can be used to idenfity if there are sporadic
+    frames that are processed differently or have artifacts, or if 3A isn't
+    stable, since this test converges 3A at the start but doesn't lock 3A
+    throughout capture.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    BURST_LEN = 6
+    BURSTS = 2
+    FRAMES = BURST_LEN * BURSTS
+
+    DELTA_THRESH = 0.1
+
+    with its.device.ItsSession() as cam:
+
+        # Capture at full resolution.
+        props = cam.get_camera_properties()
+        w,h = its.objects.get_available_output_sizes("yuv", props)[0]
+
+        # Converge 3A prior to capture.
+        cam.do_3a(lock_ae=True, lock_awb=True)
+
+        # After 3A has converged, lock AE+AWB for the duration of the test.
+        req = its.objects.auto_capture_request()
+        req["android.blackLevel.lock"] = True
+        req["android.control.awbLock"] = True
+        req["android.control.aeLock"] = True
+
+        # Capture bursts of YUV shots.
+        # Build a 4D array, which is an array of all RGB images after down-
+        # scaling them by a factor of 4x4.
+        imgs = numpy.empty([FRAMES,h/4,w/4,3])
+        for j in range(BURSTS):
+            caps = cam.do_capture([req]*BURST_LEN)
+            for i,cap in enumerate(caps):
+                n = j*BURST_LEN + i
+                imgs[n] = its.image.downscale_image(
+                        its.image.convert_capture_to_rgb_image(cap), 4)
+
+        # Dump all images.
+        print "Dumping images"
+        for i in range(FRAMES):
+            its.image.write_image(imgs[i], "%s_frame%03d.jpg"%(NAME,i))
+
+        # The mean image.
+        img_mean = imgs.mean(0)
+        its.image.write_image(img_mean, "%s_mean.jpg"%(NAME))
+
+        # Compute the deltas of each image from the mean image; this test
+        # passes if none of the deltas are large.
+        print "Computing frame differences"
+        delta_maxes = []
+        for i in range(FRAMES):
+            deltas = (imgs[i] - img_mean).reshape(h*w*3/16)
+            delta_max_pos = numpy.max(deltas)
+            delta_max_neg = numpy.min(deltas)
+            delta_maxes.append(max(abs(delta_max_pos), abs(delta_max_neg)))
+        max_delta_max = max(delta_maxes)
+        print "Frame %d has largest diff %f" % (
+                delta_maxes.index(max_delta_max), max_delta_max)
+        assert(max_delta_max < DELTA_THRESH)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/inprog/test_crop_region.py b/apps/CameraITS/tests/inprog/test_crop_region.py
new file mode 100644
index 0000000..396603f
--- /dev/null
+++ b/apps/CameraITS/tests/inprog/test_crop_region.py
@@ -0,0 +1,67 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import its.image
+import its.device
+import its.objects
+
+
+def main():
+    """Takes shots with different sensor crop regions.
+    """
+    name = os.path.basename(__file__).split(".")[0]
+
+    # Regions specified here in x,y,w,h normalized form.
+    regions = [[0.0, 0.0, 0.5, 0.5], # top left
+               [0.0, 0.5, 0.5, 0.5], # bottom left
+               [0.1, 0.9, 0.5, 1.0]] # right side (top + bottom)
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        r = props['android.sensor.info.pixelArraySize']
+        w = r['width']
+        h = r['height']
+
+        # Capture a full frame first.
+        reqs = [its.objects.auto_capture_request()]
+        print "Capturing img0 with the full sensor region"
+
+        # Capture a frame for each of the regions.
+        for i,region in enumerate(regions):
+            req = its.objects.auto_capture_request()
+            req['android.scaler.cropRegion'] = {
+                    "left": int(region[0] * w),
+                    "top": int(region[1] * h),
+                    "right": int((region[0]+region[2])*w),
+                    "bottom": int((region[1]+region[3])*h)}
+            reqs.append(req)
+            crop = req['android.scaler.cropRegion']
+            print "Capturing img%d with crop: %d,%d %dx%d"%(i+1,
+                    crop["left"],crop["top"],
+                    crop["right"]-crop["left"],crop["bottom"]-crop["top"])
+
+        cam.do_3a()
+        caps = cam.do_capture(reqs)
+
+        for i,cap in enumerate(caps):
+            img = its.image.convert_capture_to_rgb_image(cap)
+            crop = cap["metadata"]['android.scaler.cropRegion']
+            its.image.write_image(img, "%s_img%d.jpg"%(name,i))
+            print "Captured img%d with crop: %d,%d %dx%d"%(i,
+                    crop["left"],crop["top"],
+                    crop["right"]-crop["left"],crop["bottom"]-crop["top"])
+
+if __name__ == '__main__':
+    main()
diff --git a/apps/CameraITS/tests/inprog/test_faces.py b/apps/CameraITS/tests/inprog/test_faces.py
new file mode 100644
index 0000000..228dac8
--- /dev/null
+++ b/apps/CameraITS/tests/inprog/test_faces.py
@@ -0,0 +1,41 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.objects
+import os.path
+
+def main():
+    """Test face detection.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    with its.device.ItsSession() as cam:
+        cam.do_3a()
+        req = its.objects.auto_capture_request()
+        req['android.statistics.faceDetectMode'] = 2
+        caps = cam.do_capture([req]*5)
+        for i,cap in enumerate(caps):
+            md = cap['metadata']
+            print "Frame %d face metadata:" % i
+            print "  Ids:", md['android.statistics.faceIds']
+            print "  Landmarks:", md['android.statistics.faceLandmarks']
+            print "  Rectangles:", md['android.statistics.faceRectangles']
+            print "  Scores:", md['android.statistics.faceScores']
+            print ""
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/inprog/test_param_black_level_lock.py b/apps/CameraITS/tests/inprog/test_param_black_level_lock.py
new file mode 100644
index 0000000..7d0be92
--- /dev/null
+++ b/apps/CameraITS/tests/inprog/test_param_black_level_lock.py
@@ -0,0 +1,76 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.objects
+import pylab
+import os.path
+import matplotlib
+import matplotlib.pyplot
+import numpy
+
+def main():
+    """Test that when the black level is locked, it doesn't change.
+
+    Shoot with the camera covered (i.e.) dark/black. The test varies the
+    sensitivity parameter and checks if the black level changes.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    NUM_STEPS = 5
+
+    req = {
+        "android.blackLevel.lock": True,
+        "android.control.mode": 0,
+        "android.control.aeMode": 0,
+        "android.control.awbMode": 0,
+        "android.control.afMode": 0,
+        "android.sensor.frameDuration": 0,
+        "android.sensor.exposureTime": 10*1000*1000
+        }
+
+    # The most frequent pixel value in each image; assume this is the black
+    # level, since the images are all dark (shot with the lens covered).
+    modes = []
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        sens_range = props['android.sensor.info.sensitivityRange']
+        sensitivities = range(sens_range[0],
+                              sens_range[1]+1,
+                              int((sens_range[1] - sens_range[0]) / NUM_STEPS))
+        for si, s in enumerate(sensitivities):
+            req["android.sensor.sensitivity"] = s
+            cap = cam.do_capture(req)
+            yimg,_,_ = its.image.convert_capture_to_planes(cap)
+            hist,_ = numpy.histogram(yimg*255, 256, (0,256))
+            modes.append(numpy.argmax(hist))
+
+            # Add this histogram to a plot; solid for shots without BL
+            # lock, dashes for shots with BL lock
+            pylab.plot(range(16), hist.tolist()[:16])
+
+    pylab.xlabel("Luma DN, showing [0:16] out of full [0:256] range")
+    pylab.ylabel("Pixel count")
+    pylab.title("Histograms for different sensitivities")
+    matplotlib.pyplot.savefig("%s_plot_histograms.png" % (NAME))
+
+    # Check that the black levels are all the same.
+    print "Black levels:", modes
+    assert(all([modes[i] == modes[0] for i in range(len(modes))]))
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/inprog/test_param_edge_mode.py b/apps/CameraITS/tests/inprog/test_param_edge_mode.py
new file mode 100644
index 0000000..e928f21
--- /dev/null
+++ b/apps/CameraITS/tests/inprog/test_param_edge_mode.py
@@ -0,0 +1,48 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.objects
+import pylab
+import os.path
+import matplotlib
+import matplotlib.pyplot
+
+def main():
+    """Test that the android.edge.mode parameter is applied.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    req = {
+        "android.control.mode": 0,
+        "android.control.aeMode": 0,
+        "android.control.awbMode": 0,
+        "android.control.afMode": 0,
+        "android.sensor.frameDuration": 0,
+        "android.sensor.exposureTime": 30*1000*1000,
+        "android.sensor.sensitivity": 100
+        }
+
+    with its.device.ItsSession() as cam:
+        sens, exp, gains, xform, focus = cam.do_3a(get_results=True)
+        for e in [0,1,2]:
+            req["android.edge.mode"] = e
+            cap = cam.do_capture(req)
+            img = its.image.convert_capture_to_rgb_image(cap)
+            its.image.write_image(img, "%s_mode=%d.jpg" % (NAME, e))
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/inprog/test_test_patterns.py b/apps/CameraITS/tests/inprog/test_test_patterns.py
new file mode 100644
index 0000000..f75b141
--- /dev/null
+++ b/apps/CameraITS/tests/inprog/test_test_patterns.py
@@ -0,0 +1,41 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.objects
+import os.path
+
+def main():
+    """Test sensor test patterns.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    with its.device.ItsSession() as cam:
+        caps = []
+        for i in range(1,6):
+            req = its.objects.manual_capture_request(100, 10*1000*1000)
+            req['android.sensor.testPatternData'] = [40, 100, 160, 220]
+            req['android.sensor.testPatternMode'] = i
+
+            # Capture the shot twice, and use the second one, so the pattern
+            # will have stabilized.
+            caps = cam.do_capture([req]*2)
+
+            img = its.image.convert_capture_to_rgb_image(caps[1])
+            its.image.write_image(img, "%s_pattern=%d.jpg" % (NAME, i))
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene0/test_camera_properties.py b/apps/CameraITS/tests/scene0/test_camera_properties.py
new file mode 100644
index 0000000..eb638f0
--- /dev/null
+++ b/apps/CameraITS/tests/scene0/test_camera_properties.py
@@ -0,0 +1,43 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.caps
+import its.device
+import its.objects
+import pprint
+
+def main():
+    """Basic test to query and print out camera properties.
+    """
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+
+        pprint.pprint(props)
+
+        its.caps.skip_unless(its.caps.manual_sensor(props))
+
+        # Test that a handful of required keys are present.
+        assert(props.has_key('android.sensor.info.sensitivityRange'))
+        assert(props.has_key('android.sensor.orientation'))
+        assert(props.has_key('android.scaler.streamConfigurationMap'))
+        assert(props.has_key('android.lens.facing'))
+
+        print "JPG sizes:", its.objects.get_available_output_sizes("jpg", props)
+        print "RAW sizes:", its.objects.get_available_output_sizes("raw", props)
+        print "YUV sizes:", its.objects.get_available_output_sizes("yuv", props)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene0/test_capture_result_dump.py b/apps/CameraITS/tests/scene0/test_capture_result_dump.py
new file mode 100644
index 0000000..6646557
--- /dev/null
+++ b/apps/CameraITS/tests/scene0/test_capture_result_dump.py
@@ -0,0 +1,40 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.caps
+import its.image
+import its.device
+import its.objects
+import its.target
+import pprint
+
+def main():
+    """Test that a capture result is returned from a manual capture; dump it.
+    """
+
+    with its.device.ItsSession() as cam:
+        # Arbitrary capture request exposure values; image content is not
+        # important for this test, only the metadata.
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.manual_sensor(props))
+
+        req,fmt = its.objects.get_fastest_manual_capture_settings(props)
+        cap = cam.do_capture(req, fmt)
+        pprint.pprint(cap["metadata"])
+
+        # No pass/fail check; test passes if it completes.
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene0/test_gyro_bias.py b/apps/CameraITS/tests/scene0/test_gyro_bias.py
new file mode 100644
index 0000000..7ea90c3
--- /dev/null
+++ b/apps/CameraITS/tests/scene0/test_gyro_bias.py
@@ -0,0 +1,78 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import time
+import pylab
+import os.path
+import matplotlib
+import matplotlib.pyplot
+import numpy
+
+def main():
+    """Test if the gyro has stable output when device is stationary.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    # Number of samples averaged together, in the plot.
+    N = 20
+
+    # Pass/fail thresholds for gyro drift
+    MEAN_THRESH = 0.01
+    VAR_THRESH = 0.001
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        # Only run test if the appropriate caps are claimed.
+        its.caps.skip_unless(its.caps.sensor_fusion(props))
+
+        print "Collecting gyro events"
+        cam.start_sensor_events()
+        time.sleep(5)
+        gyro_events = cam.get_sensor_events()["gyro"]
+
+    nevents = (len(gyro_events) / N) * N
+    gyro_events = gyro_events[:nevents]
+    times = numpy.array([(e["time"] - gyro_events[0]["time"])/1000000000.0
+                         for e in gyro_events])
+    xs = numpy.array([e["x"] for e in gyro_events])
+    ys = numpy.array([e["y"] for e in gyro_events])
+    zs = numpy.array([e["z"] for e in gyro_events])
+
+    # Group samples into size-N groups and average each together, to get rid
+    # of individual random spikes in the data.
+    times = times[N/2::N]
+    xs = xs.reshape(nevents/N, N).mean(1)
+    ys = ys.reshape(nevents/N, N).mean(1)
+    zs = zs.reshape(nevents/N, N).mean(1)
+
+    pylab.plot(times, xs, 'r', label="x")
+    pylab.plot(times, ys, 'g', label="y")
+    pylab.plot(times, zs, 'b', label="z")
+    pylab.xlabel("Time (seconds)")
+    pylab.ylabel("Gyro readings (mean of %d samples)"%(N))
+    pylab.legend()
+    matplotlib.pyplot.savefig("%s_plot.png" % (NAME))
+
+    for samples in [xs,ys,zs]:
+        assert(samples.mean() < MEAN_THRESH)
+        assert(numpy.var(samples) < VAR_THRESH)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene0/test_jitter.py b/apps/CameraITS/tests/scene0/test_jitter.py
new file mode 100644
index 0000000..82e8e38
--- /dev/null
+++ b/apps/CameraITS/tests/scene0/test_jitter.py
@@ -0,0 +1,65 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import os.path
+import pylab
+import matplotlib
+import matplotlib.pyplot
+
+def main():
+    """Measure jitter in camera timestamps.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    # Pass/fail thresholds
+    MIN_AVG_FRAME_DELTA = 30 # at least 30ms delta between frames
+    MAX_VAR_FRAME_DELTA = 0.01 # variance of frame deltas
+    MAX_FRAME_DELTA_JITTER = 0.3 # max ms gap from the average frame delta
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.manual_sensor(props))
+
+        req, fmt = its.objects.get_fastest_manual_capture_settings(props)
+        caps = cam.do_capture([req]*50, [fmt])
+
+        # Print out the millisecond delta between the start of each exposure
+        tstamps = [c['metadata']['android.sensor.timestamp'] for c in caps]
+        deltas = [tstamps[i]-tstamps[i-1] for i in range(1,len(tstamps))]
+        deltas_ms = [d/1000000.0 for d in deltas]
+        avg = sum(deltas_ms) / len(deltas_ms)
+        var = sum([d*d for d in deltas_ms]) / len(deltas_ms) - avg * avg
+        range0 = min(deltas_ms) - avg
+        range1 = max(deltas_ms) - avg
+        print "Average:", avg
+        print "Variance:", var
+        print "Jitter range:", range0, "to", range1
+
+        # Draw a plot.
+        pylab.plot(range(len(deltas_ms)), deltas_ms)
+        matplotlib.pyplot.savefig("%s_deltas.png" % (NAME))
+
+        # Test for pass/fail.
+        assert(avg > MIN_AVG_FRAME_DELTA)
+        assert(var < MAX_VAR_FRAME_DELTA)
+        assert(abs(range0) < MAX_FRAME_DELTA_JITTER)
+        assert(abs(range1) < MAX_FRAME_DELTA_JITTER)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene0/test_metadata.py b/apps/CameraITS/tests/scene0/test_metadata.py
new file mode 100644
index 0000000..b4ca4cb
--- /dev/null
+++ b/apps/CameraITS/tests/scene0/test_metadata.py
@@ -0,0 +1,98 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.objects
+import its.target
+import its.caps
+
+def main():
+    """Test the validity of some metadata entries.
+
+    Looks at capture results and at the camera characteristics objects.
+    """
+    global md, props, failed
+
+    with its.device.ItsSession() as cam:
+        # Arbitrary capture request exposure values; image content is not
+        # important for this test, only the metadata.
+        props = cam.get_camera_properties()
+        auto_req = its.objects.auto_capture_request()
+        cap = cam.do_capture(auto_req)
+        md = cap["metadata"]
+
+    print "Hardware level"
+    print "  Legacy:", its.caps.legacy(props)
+    print "  Limited:", its.caps.limited(props)
+    print "  Full:", its.caps.full(props)
+    print "Capabilities"
+    print "  Manual sensor:", its.caps.manual_sensor(props)
+    print "  Manual post-proc:", its.caps.manual_post_proc(props)
+    print "  Raw:", its.caps.raw(props)
+    print "  Sensor fusion:", its.caps.sensor_fusion(props)
+
+    # Test: hardware level should be a valid value.
+    check('props.has_key("android.info.supportedHardwareLevel")')
+    check('props["android.info.supportedHardwareLevel"] is not None')
+    check('props["android.info.supportedHardwareLevel"] in [0,1,2]')
+    full = getval('props["android.info.supportedHardwareLevel"]') == 1
+
+    # Test: rollingShutterSkew, and frameDuration tags must all be present,
+    # and rollingShutterSkew must be greater than zero and smaller than all
+    # of the possible frame durations.
+    check('md.has_key("android.sensor.frameDuration")')
+    check('md["android.sensor.frameDuration"] is not None')
+    check('md.has_key("android.sensor.rollingShutterSkew")')
+    check('md["android.sensor.rollingShutterSkew"] is not None')
+    check('md["android.sensor.frameDuration"] > '
+          'md["android.sensor.rollingShutterSkew"] > 0')
+
+    # Test: timestampSource must be a valid value.
+    check('props.has_key("android.sensor.info.timestampSource")')
+    check('props["android.sensor.info.timestampSource"] is not None')
+    check('props["android.sensor.info.timestampSource"] in [0,1]')
+
+    # Test: croppingType must be a valid value, and for full devices, it
+    # must be FREEFORM=1.
+    check('props.has_key("android.scaler.croppingType")')
+    check('props["android.scaler.croppingType"] is not None')
+    check('props["android.scaler.croppingType"] in [0,1]')
+    if full:
+        check('props["android.scaler.croppingType"] == 1')
+
+    assert(not failed)
+
+def getval(expr, default=None):
+    try:
+        return eval(expr)
+    except:
+        return default
+
+failed = False
+def check(expr):
+    global md, props, failed
+    try:
+        if eval(expr):
+            print "Passed>", expr
+        else:
+            print "Failed>>", expr
+            failed = True
+    except:
+        print "Failed>>", expr
+        failed = True
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene0/test_param_sensitivity_burst.py b/apps/CameraITS/tests/scene0/test_param_sensitivity_burst.py
new file mode 100644
index 0000000..a6a5214
--- /dev/null
+++ b/apps/CameraITS/tests/scene0/test_param_sensitivity_burst.py
@@ -0,0 +1,48 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+
+def main():
+    """Test that the android.sensor.sensitivity parameter is applied properly
+    within a burst. Inspects the output metadata only (not the image data).
+    """
+
+    NUM_STEPS = 3
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.manual_sensor(props) and
+                             its.caps.per_frame_control(props))
+
+        sens_range = props['android.sensor.info.sensitivityRange']
+        sens_step = (sens_range[1] - sens_range[0]) / NUM_STEPS
+        sens_list = range(sens_range[0], sens_range[1], sens_step)
+        e = min(props['android.sensor.info.exposureTimeRange'])
+        reqs = [its.objects.manual_capture_request(s,e) for s in sens_list]
+        _,fmt = its.objects.get_fastest_manual_capture_settings(props)
+
+        caps = cam.do_capture(reqs, fmt)
+        for i,cap in enumerate(caps):
+            s_req = sens_list[i]
+            s_res = cap["metadata"]["android.sensor.sensitivity"]
+            assert(s_req == s_res)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene0/test_sensor_events.py b/apps/CameraITS/tests/scene0/test_sensor_events.py
new file mode 100644
index 0000000..5973de2
--- /dev/null
+++ b/apps/CameraITS/tests/scene0/test_sensor_events.py
@@ -0,0 +1,42 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.device
+import its.caps
+import time
+
+def main():
+    """Basic test to query and print out sensor events.
+
+    Test will only work if the screen is on (i.e.) the device isn't in standby.
+    Pass if some of each event are received.
+    """
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        # Only run test if the appropriate caps are claimed.
+        its.caps.skip_unless(its.caps.sensor_fusion(props))
+
+        cam.start_sensor_events()
+        time.sleep(1)
+        events = cam.get_sensor_events()
+        print "Events over 1s: %d gyro, %d accel, %d mag"%(
+                len(events["gyro"]), len(events["accel"]), len(events["mag"]))
+        assert(len(events["gyro"]) > 0)
+        assert(len(events["accel"]) > 0)
+        assert(len(events["mag"]) > 0)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene0/test_unified_timestamps.py b/apps/CameraITS/tests/scene0/test_unified_timestamps.py
new file mode 100644
index 0000000..019e6c5
--- /dev/null
+++ b/apps/CameraITS/tests/scene0/test_unified_timestamps.py
@@ -0,0 +1,65 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.device
+import its.objects
+import its.caps
+import time
+
+def main():
+    """Test if image and motion sensor events are in the same time domain.
+    """
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+
+        # Only run test if the appropriate caps are claimed.
+        its.caps.skip_unless(its.caps.sensor_fusion(props))
+
+        # Get the timestamp of a captured image.
+        req, fmt = its.objects.get_fastest_manual_capture_settings(props)
+        cap = cam.do_capture(req, fmt)
+        ts_image0 = cap['metadata']['android.sensor.timestamp']
+
+        # Get the timestamps of motion events.
+        print "Reading sensor measurements"
+        cam.start_sensor_events()
+        time.sleep(0.5)
+        events = cam.get_sensor_events()
+        assert(len(events["gyro"]) > 0)
+        assert(len(events["accel"]) > 0)
+        assert(len(events["mag"]) > 0)
+        ts_gyro0 = events["gyro"][0]["time"]
+        ts_gyro1 = events["gyro"][-1]["time"]
+        ts_accel0 = events["accel"][0]["time"]
+        ts_accel1 = events["accel"][-1]["time"]
+        ts_mag0 = events["mag"][0]["time"]
+        ts_mag1 = events["mag"][-1]["time"]
+
+        # Get the timestamp of another image.
+        cap = cam.do_capture(req, fmt)
+        ts_image1 = cap['metadata']['android.sensor.timestamp']
+
+        print "Image timestamps:", ts_image0, ts_image1
+        print "Gyro timestamps:", ts_gyro0, ts_gyro1
+        print "Accel timestamps:", ts_accel0, ts_accel1
+        print "Mag timestamps:", ts_mag0, ts_mag1
+
+        # The motion timestamps must be between the two image timestamps.
+        assert ts_image0 < min(ts_gyro0, ts_accel0, ts_mag0) < ts_image1
+        assert ts_image0 < max(ts_gyro1, ts_accel1, ts_mag1) < ts_image1
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_3a.py b/apps/CameraITS/tests/scene1/test_3a.py
new file mode 100644
index 0000000..08cd747
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_3a.py
@@ -0,0 +1,40 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.device
+import its.caps
+
+def main():
+    """Basic test for bring-up of 3A.
+
+    To pass, 3A must converge. Check that the returned 3A values are legal.
+    """
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.read_3a(props))
+
+        sens, exp, gains, xform, focus = cam.do_3a(get_results=True)
+        print "AE: sensitivity %d, exposure %dms" % (sens, exp/1000000)
+        print "AWB: gains", gains, "transform", xform
+        print "AF: distance", focus
+        assert(sens > 0)
+        assert(exp > 0)
+        assert(len(gains) == 4)
+        assert(len(xform) == 9)
+        assert(focus >= 0)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_ae_precapture_trigger.py b/apps/CameraITS/tests/scene1/test_ae_precapture_trigger.py
new file mode 100644
index 0000000..563cebd
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_ae_precapture_trigger.py
@@ -0,0 +1,80 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.device
+import its.caps
+import its.objects
+import its.target
+
+# AE must converge within this number of auto requests under scene1
+THRESH_AE_CONVERGE = 8
+
+def main():
+    """Test the AE state machine when using the precapture trigger.
+    """
+
+    INACTIVE = 0
+    SEARCHING = 1
+    CONVERGED = 2
+    LOCKED = 3
+    FLASHREQUIRED = 4
+    PRECAPTURE = 5
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.per_frame_control(props))
+
+        _,fmt = its.objects.get_fastest_manual_capture_settings(props)
+
+        # Capture 5 manual requests, with AE disabled, and the last request
+        # has an AE precapture trigger (which should be ignored since AE is
+        # disabled).
+        manual_reqs = []
+        e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
+        manual_req = its.objects.manual_capture_request(s,e)
+        manual_req['android.control.aeMode'] = 0 # Off
+        manual_reqs += [manual_req]*4
+        precap_req = its.objects.manual_capture_request(s,e)
+        precap_req['android.control.aeMode'] = 0 # Off
+        precap_req['android.control.aePrecaptureTrigger'] = 1 # Start
+        manual_reqs.append(precap_req)
+        caps = cam.do_capture(manual_reqs, fmt)
+        for cap in caps:
+            assert(cap['metadata']['android.control.aeState'] == INACTIVE)
+
+        # Capture an auto request and verify the AE state; no trigger.
+        auto_req = its.objects.auto_capture_request()
+        auto_req['android.control.aeMode'] = 1  # On
+        cap = cam.do_capture(auto_req, fmt)
+        state = cap['metadata']['android.control.aeState']
+        print "AE state after auto request:", state
+        assert(state in [SEARCHING, CONVERGED])
+
+        # Capture with auto request with a precapture trigger.
+        auto_req['android.control.aePrecaptureTrigger'] = 1  # Start
+        cap = cam.do_capture(auto_req, fmt)
+        state = cap['metadata']['android.control.aeState']
+        print "AE state after auto request with precapture trigger:", state
+        assert(state in [SEARCHING, CONVERGED, PRECAPTURE])
+
+        # Capture some more auto requests, and AE should converge.
+        auto_req['android.control.aePrecaptureTrigger'] = 0
+        caps = cam.do_capture([auto_req] * THRESH_AE_CONVERGE, fmt)
+        state = caps[-1]['metadata']['android.control.aeState']
+        print "AE state after auto request:", state
+        assert(state == CONVERGED)
+
+if __name__ == '__main__':
+    main()
diff --git a/apps/CameraITS/tests/scene1/test_auto_vs_manual.py b/apps/CameraITS/tests/scene1/test_auto_vs_manual.py
new file mode 100644
index 0000000..a9efa0b
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_auto_vs_manual.py
@@ -0,0 +1,94 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import os.path
+import math
+
+def main():
+    """Capture auto and manual shots that should look the same.
+
+    Manual shots taken with just manual WB, and also with manual WB+tonemap.
+
+    In all cases, the general color/look of the shots should be the same,
+    however there can be variations in brightness/contrast due to different
+    "auto" ISP blocks that may be disabled in the manual flows.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.manual_sensor(props) and
+                             its.caps.manual_post_proc(props) and
+                             its.caps.per_frame_control(props))
+
+        # Converge 3A and get the estimates.
+        sens, exp, gains, xform, focus = cam.do_3a(get_results=True)
+        xform_rat = its.objects.float_to_rational(xform)
+        print "AE sensitivity %d, exposure %dms" % (sens, exp/1000000.0)
+        print "AWB gains", gains
+        print "AWB transform", xform
+        print "AF distance", focus
+
+        # Auto capture.
+        req = its.objects.auto_capture_request()
+        cap_auto = cam.do_capture(req)
+        img_auto = its.image.convert_capture_to_rgb_image(cap_auto)
+        its.image.write_image(img_auto, "%s_auto.jpg" % (NAME))
+        xform_a = its.objects.rational_to_float(
+                cap_auto["metadata"]["android.colorCorrection.transform"])
+        gains_a = cap_auto["metadata"]["android.colorCorrection.gains"]
+        print "Auto gains:", gains_a
+        print "Auto transform:", xform_a
+
+        # Manual capture 1: WB
+        req = its.objects.manual_capture_request(sens, exp)
+        req["android.colorCorrection.transform"] = xform_rat
+        req["android.colorCorrection.gains"] = gains
+        cap_man1 = cam.do_capture(req)
+        img_man1 = its.image.convert_capture_to_rgb_image(cap_man1)
+        its.image.write_image(img_man1, "%s_manual_wb.jpg" % (NAME))
+        xform_m1 = its.objects.rational_to_float(
+                cap_man1["metadata"]["android.colorCorrection.transform"])
+        gains_m1 = cap_man1["metadata"]["android.colorCorrection.gains"]
+        print "Manual wb gains:", gains_m1
+        print "Manual wb transform:", xform_m1
+
+        # Manual capture 2: WB + tonemap
+        gamma = sum([[i/63.0,math.pow(i/63.0,1/2.2)] for i in xrange(64)],[])
+        req["android.tonemap.mode"] = 0
+        req["android.tonemap.curveRed"] = gamma
+        req["android.tonemap.curveGreen"] = gamma
+        req["android.tonemap.curveBlue"] = gamma
+        cap_man2 = cam.do_capture(req)
+        img_man2 = its.image.convert_capture_to_rgb_image(cap_man2)
+        its.image.write_image(img_man2, "%s_manual_wb_tm.jpg" % (NAME))
+        xform_m2 = its.objects.rational_to_float(
+                cap_man2["metadata"]["android.colorCorrection.transform"])
+        gains_m2 = cap_man2["metadata"]["android.colorCorrection.gains"]
+        print "Manual wb+tm gains:", gains_m2
+        print "Manual wb+tm transform:", xform_m2
+
+        # Check that the WB gains and transform reported in each capture
+        # result match with the original AWB estimate from do_3a.
+        for g,x in [(gains_a,xform_a),(gains_m1,xform_m1),(gains_m2,xform_m2)]:
+            assert(all([abs(xform[i] - x[i]) < 0.05 for i in range(9)]))
+            assert(all([abs(gains[i] - g[i]) < 0.05 for i in range(4)]))
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_black_white.py b/apps/CameraITS/tests/scene1/test_black_white.py
new file mode 100644
index 0000000..68d7de6
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_black_white.py
@@ -0,0 +1,85 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import pylab
+import os.path
+import matplotlib
+import matplotlib.pyplot
+
+def main():
+    """Test that the device will produce full black+white images.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    r_means = []
+    g_means = []
+    b_means = []
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.manual_sensor(props) and
+                             its.caps.per_frame_control(props))
+
+        expt_range = props['android.sensor.info.exposureTimeRange']
+        sens_range = props['android.sensor.info.sensitivityRange']
+
+        # Take a shot with very low ISO and exposure time. Expect it to
+        # be black.
+        print "Black shot: sens = %d, exp time = %.4fms" % (
+                sens_range[0], expt_range[0]/1000000.0)
+        req = its.objects.manual_capture_request(sens_range[0], expt_range[0])
+        cap = cam.do_capture(req)
+        img = its.image.convert_capture_to_rgb_image(cap)
+        its.image.write_image(img, "%s_black.jpg" % (NAME))
+        tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+        black_means = its.image.compute_image_means(tile)
+        r_means.append(black_means[0])
+        g_means.append(black_means[1])
+        b_means.append(black_means[2])
+        print "Dark pixel means:", black_means
+
+        # Take a shot with very high ISO and exposure time. Expect it to
+        # be white.
+        print "White shot: sens = %d, exp time = %.2fms" % (
+                sens_range[1], expt_range[1]/1000000.0)
+        req = its.objects.manual_capture_request(sens_range[1], expt_range[1])
+        cap = cam.do_capture(req)
+        img = its.image.convert_capture_to_rgb_image(cap)
+        its.image.write_image(img, "%s_white.jpg" % (NAME))
+        tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+        white_means = its.image.compute_image_means(tile)
+        r_means.append(white_means[0])
+        g_means.append(white_means[1])
+        b_means.append(white_means[2])
+        print "Bright pixel means:", white_means
+
+        # Draw a plot.
+        pylab.plot([0,1], r_means, 'r')
+        pylab.plot([0,1], g_means, 'g')
+        pylab.plot([0,1], b_means, 'b')
+        pylab.ylim([0,1])
+        matplotlib.pyplot.savefig("%s_plot_means.png" % (NAME))
+
+        for val in black_means:
+            assert(val < 0.025)
+        for val in white_means:
+            assert(val > 0.975)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_burst_sameness_manual.py b/apps/CameraITS/tests/scene1/test_burst_sameness_manual.py
new file mode 100644
index 0000000..edb8995
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_burst_sameness_manual.py
@@ -0,0 +1,85 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import os.path
+import numpy
+
+def main():
+    """Take long bursts of images and check that they're all identical.
+
+    Assumes a static scene. Can be used to idenfity if there are sporadic
+    frames that are processed differently or have artifacts. Uses manual
+    capture settings.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    BURST_LEN = 50
+    BURSTS = 5
+    FRAMES = BURST_LEN * BURSTS
+
+    SPREAD_THRESH = 0.03
+
+    with its.device.ItsSession() as cam:
+
+        # Capture at the smallest resolution.
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.manual_sensor(props) and
+                             its.caps.per_frame_control(props))
+
+        _, fmt = its.objects.get_fastest_manual_capture_settings(props)
+        e, s = its.target.get_target_exposure_combos(cam)["minSensitivity"]
+        req = its.objects.manual_capture_request(s, e)
+        w,h = fmt["width"], fmt["height"]
+
+        # Capture bursts of YUV shots.
+        # Get the mean values of a center patch for each.
+        # Also build a 4D array, which is an array of all RGB images.
+        r_means = []
+        g_means = []
+        b_means = []
+        imgs = numpy.empty([FRAMES,h,w,3])
+        for j in range(BURSTS):
+            caps = cam.do_capture([req]*BURST_LEN, [fmt])
+            for i,cap in enumerate(caps):
+                n = j*BURST_LEN + i
+                imgs[n] = its.image.convert_capture_to_rgb_image(cap)
+                tile = its.image.get_image_patch(imgs[n], 0.45, 0.45, 0.1, 0.1)
+                means = its.image.compute_image_means(tile)
+                r_means.append(means[0])
+                g_means.append(means[1])
+                b_means.append(means[2])
+
+        # Dump all images.
+        print "Dumping images"
+        for i in range(FRAMES):
+            its.image.write_image(imgs[i], "%s_frame%03d.jpg"%(NAME,i))
+
+        # The mean image.
+        img_mean = imgs.mean(0)
+        its.image.write_image(img_mean, "%s_mean.jpg"%(NAME))
+
+        # Pass/fail based on center patch similarity.
+        for means in [r_means, g_means, b_means]:
+            spread = max(means) - min(means)
+            print spread
+            assert(spread < SPREAD_THRESH)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_capture_result.py b/apps/CameraITS/tests/scene1/test_capture_result.py
new file mode 100644
index 0000000..331d1cd
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_capture_result.py
@@ -0,0 +1,213 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import os.path
+import numpy
+import matplotlib.pyplot
+
+# Required for 3d plot to work
+import mpl_toolkits.mplot3d
+
+def main():
+    """Test that valid data comes back in CaptureResult objects.
+    """
+    global NAME, auto_req, manual_req, w_map, h_map
+    global manual_tonemap, manual_transform, manual_gains, manual_region
+    global manual_exp_time, manual_sensitivity, manual_gains_ok
+
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.manual_sensor(props) and
+                             its.caps.manual_post_proc(props) and
+                             its.caps.per_frame_control(props))
+
+        manual_tonemap = [0,0, 1,1] # Linear
+        manual_transform = its.objects.int_to_rational([1,2,3, 4,5,6, 7,8,9])
+        manual_gains = [1,2,3,4]
+        manual_region = [{"x":8,"y":8,"width":128,"height":128,"weight":1}]
+        manual_exp_time = min(props['android.sensor.info.exposureTimeRange'])
+        manual_sensitivity = min(props['android.sensor.info.sensitivityRange'])
+
+        # The camera HAL may not support different gains for two G channels.
+        manual_gains_ok = [[1,2,3,4],[1,2,2,4],[1,3,3,4]]
+
+        auto_req = its.objects.auto_capture_request()
+        auto_req["android.statistics.lensShadingMapMode"] = 1
+
+        manual_req = {
+            "android.control.mode": 0,
+            "android.control.aeMode": 0,
+            "android.control.awbMode": 0,
+            "android.control.afMode": 0,
+            "android.sensor.frameDuration": 0,
+            "android.sensor.sensitivity": manual_sensitivity,
+            "android.sensor.exposureTime": manual_exp_time,
+            "android.colorCorrection.mode": 0,
+            "android.colorCorrection.transform": manual_transform,
+            "android.colorCorrection.gains": manual_gains,
+            "android.tonemap.mode": 0,
+            "android.tonemap.curveRed": manual_tonemap,
+            "android.tonemap.curveGreen": manual_tonemap,
+            "android.tonemap.curveBlue": manual_tonemap,
+            "android.control.aeRegions": manual_region,
+            "android.control.afRegions": manual_region,
+            "android.control.awbRegions": manual_region,
+            "android.statistics.lensShadingMapMode":1
+            }
+
+        w_map = props["android.lens.info.shadingMapSize"]["width"]
+        h_map = props["android.lens.info.shadingMapSize"]["height"]
+
+        print "Testing auto capture results"
+        lsc_map_auto = test_auto(cam, w_map, h_map)
+        print "Testing manual capture results"
+        test_manual(cam, w_map, h_map, lsc_map_auto)
+        print "Testing auto capture results again"
+        test_auto(cam, w_map, h_map)
+
+# A very loose definition for two floats being close to each other;
+# there may be different interpolation and rounding used to get the
+# two values, and all this test is looking at is whether there is
+# something obviously broken; it's not looking for a perfect match.
+def is_close_float(n1, n2):
+    return abs(n1 - n2) < 0.05
+
+def is_close_rational(n1, n2):
+    return is_close_float(its.objects.rational_to_float(n1),
+                          its.objects.rational_to_float(n2))
+
+def draw_lsc_plot(w_map, h_map, lsc_map, name):
+    for ch in range(4):
+        fig = matplotlib.pyplot.figure()
+        ax = fig.gca(projection='3d')
+        xs = numpy.array([range(w_map)] * h_map).reshape(h_map, w_map)
+        ys = numpy.array([[i]*w_map for i in range(h_map)]).reshape(
+                h_map, w_map)
+        zs = numpy.array(lsc_map[ch::4]).reshape(h_map, w_map)
+        ax.plot_wireframe(xs, ys, zs)
+        matplotlib.pyplot.savefig("%s_plot_lsc_%s_ch%d.png"%(NAME,name,ch))
+
+def test_auto(cam, w_map, h_map):
+    # Get 3A lock first, so the auto values in the capture result are
+    # populated properly.
+    rect = [[0,0,1,1,1]]
+    cam.do_3a(rect, rect, rect, do_af=False)
+
+    cap = cam.do_capture(auto_req)
+    cap_res = cap["metadata"]
+
+    gains = cap_res["android.colorCorrection.gains"]
+    transform = cap_res["android.colorCorrection.transform"]
+    exp_time = cap_res['android.sensor.exposureTime']
+    lsc_map = cap_res["android.statistics.lensShadingMap"]
+    ctrl_mode = cap_res["android.control.mode"]
+
+    print "Control mode:", ctrl_mode
+    print "Gains:", gains
+    print "Transform:", [its.objects.rational_to_float(t)
+                         for t in transform]
+    print "AE region:", cap_res['android.control.aeRegions']
+    print "AF region:", cap_res['android.control.afRegions']
+    print "AWB region:", cap_res['android.control.awbRegions']
+    print "LSC map:", w_map, h_map, lsc_map[:8]
+
+    assert(ctrl_mode == 1)
+
+    # Color correction gain and transform must be valid.
+    assert(len(gains) == 4)
+    assert(len(transform) == 9)
+    assert(all([g > 0 for g in gains]))
+    assert(all([t["denominator"] != 0 for t in transform]))
+
+    # Color correction should not match the manual settings.
+    assert(any([not is_close_float(gains[i], manual_gains[i])
+                for i in xrange(4)]))
+    assert(any([not is_close_rational(transform[i], manual_transform[i])
+                for i in xrange(9)]))
+
+    # Exposure time must be valid.
+    assert(exp_time > 0)
+
+    # Lens shading map must be valid.
+    assert(w_map > 0 and h_map > 0 and w_map * h_map * 4 == len(lsc_map))
+    assert(all([m >= 1 for m in lsc_map]))
+
+    draw_lsc_plot(w_map, h_map, lsc_map, "auto")
+
+    return lsc_map
+
+def test_manual(cam, w_map, h_map, lsc_map_auto):
+    cap = cam.do_capture(manual_req)
+    cap_res = cap["metadata"]
+
+    gains = cap_res["android.colorCorrection.gains"]
+    transform = cap_res["android.colorCorrection.transform"]
+    curves = [cap_res["android.tonemap.curveRed"],
+              cap_res["android.tonemap.curveGreen"],
+              cap_res["android.tonemap.curveBlue"]]
+    exp_time = cap_res['android.sensor.exposureTime']
+    lsc_map = cap_res["android.statistics.lensShadingMap"]
+    ctrl_mode = cap_res["android.control.mode"]
+
+    print "Control mode:", ctrl_mode
+    print "Gains:", gains
+    print "Transform:", [its.objects.rational_to_float(t)
+                         for t in transform]
+    print "Tonemap:", curves[0][1::16]
+    print "AE region:", cap_res['android.control.aeRegions']
+    print "AF region:", cap_res['android.control.afRegions']
+    print "AWB region:", cap_res['android.control.awbRegions']
+    print "LSC map:", w_map, h_map, lsc_map[:8]
+
+    assert(ctrl_mode == 0)
+
+    # Color correction gain and transform must be valid.
+    # Color correction gains and transform should be the same size and
+    # values as the manually set values.
+    assert(len(gains) == 4)
+    assert(len(transform) == 9)
+    assert( all([is_close_float(gains[i], manual_gains_ok[0][i])
+                 for i in xrange(4)]) or
+            all([is_close_float(gains[i], manual_gains_ok[1][i])
+                 for i in xrange(4)]) or
+            all([is_close_float(gains[i], manual_gains_ok[2][i])
+                 for i in xrange(4)]))
+    assert(all([is_close_rational(transform[i], manual_transform[i])
+                for i in xrange(9)]))
+
+    # Tonemap must be valid.
+    # The returned tonemap must be linear.
+    for c in curves:
+        assert(len(c) > 0)
+        assert(all([is_close_float(c[i], c[i+1])
+                    for i in xrange(0,len(c),2)]))
+
+    # Exposure time must be close to the requested exposure time.
+    assert(is_close_float(exp_time/1000000.0, manual_exp_time/1000000.0))
+
+    # Lens shading map must be valid.
+    assert(w_map > 0 and h_map > 0 and w_map * h_map * 4 == len(lsc_map))
+    assert(all([m >= 1 for m in lsc_map]))
+
+    draw_lsc_plot(w_map, h_map, lsc_map, "manual")
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_crop_region_raw.py b/apps/CameraITS/tests/scene1/test_crop_region_raw.py
new file mode 100644
index 0000000..9fc52cb
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_crop_region_raw.py
@@ -0,0 +1,153 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import numpy
+import os.path
+
+
+def check_crop_region(expected, reported, active, err_threshold):
+    """Check if the reported region is within the tolerance.
+
+    Args:
+        expected: expected crop region
+        reported: reported crop region
+        active: active resolution
+        err_threshold: error threshold for the active resolution
+    """
+
+    ex = (active["right"] - active["left"]) * err_threshold
+    ey = (active["bottom"] - active["top"]) * err_threshold
+
+    assert ((abs(expected["left"] - reported["left"]) <= ex) and
+            (abs(expected["right"] - reported["right"]) <= ex) and
+            (abs(expected["top"] - reported["top"]) <= ey) and
+            (abs(expected["bottom"] - reported["bottom"]) <= ey))
+
+def main():
+    """Test that raw streams are not croppable.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    DIFF_THRESH = 0.05
+    CROP_REGION_ERROR_THRESHOLD = 0.01
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.raw16(props) and
+                             its.caps.per_frame_control(props))
+
+        a = props['android.sensor.info.activeArraySize']
+        ax, ay = a["left"], a["top"]
+        aw, ah = a["right"] - a["left"], a["bottom"] - a["top"]
+        print "Active sensor region: (%d,%d %dx%d)" % (ax, ay, aw, ah)
+
+        full_region = {
+            "left": 0,
+            "top": 0,
+            "right": aw,
+            "bottom": ah
+        }
+
+        # Capture without a crop region.
+        # Use a manual request with a linear tonemap so that the YUV and RAW
+        # should look the same (once converted by the its.image module).
+        e, s = its.target.get_target_exposure_combos(cam)["minSensitivity"]
+        req = its.objects.manual_capture_request(s,e, True)
+        cap1_raw, cap1_yuv = cam.do_capture(req, cam.CAP_RAW_YUV)
+
+        # Calculate a center crop region.
+        zoom = min(3.0, its.objects.get_max_digital_zoom(props))
+        assert(zoom >= 1)
+        cropw = aw / zoom
+        croph = ah / zoom
+
+        req["android.scaler.cropRegion"] = {
+            "left": aw / 2 - cropw / 2,
+            "top": ah / 2 - croph / 2,
+            "right": aw / 2 + cropw / 2,
+            "bottom": ah / 2 + croph / 2
+        }
+
+        # when both YUV and RAW are requested, the crop region that's
+        # applied to YUV should be reported.
+        crop_region = req["android.scaler.cropRegion"]
+        if crop_region == full_region:
+            crop_region_err_thresh = 0.0
+        else:
+            crop_region_err_thresh = CROP_REGION_ERROR_THRESHOLD
+
+        cap2_raw, cap2_yuv = cam.do_capture(req, cam.CAP_RAW_YUV)
+
+        imgs = {}
+        for s, cap, cr, err_delta in [("yuv_full", cap1_yuv, full_region, 0),
+                      ("raw_full", cap1_raw, full_region, 0),
+                      ("yuv_crop", cap2_yuv, crop_region, crop_region_err_thresh),
+                      ("raw_crop", cap2_raw, crop_region, crop_region_err_thresh)]:
+            img = its.image.convert_capture_to_rgb_image(cap, props=props)
+            its.image.write_image(img, "%s_%s.jpg" % (NAME, s))
+            r = cap["metadata"]["android.scaler.cropRegion"]
+            x, y = r["left"], r["top"]
+            w, h = r["right"] - r["left"], r["bottom"] - r["top"]
+            imgs[s] = img
+            print "Crop on %s: (%d,%d %dx%d)" % (s, x, y, w, h)
+            check_crop_region(cr, r, a, err_delta)
+
+        # Also check the image content; 3 of the 4 shots should match.
+        # Note that all the shots are RGB below; the variable names correspond
+        # to what was captured.
+        # Average the images down 4x4 -> 1 prior to comparison to smooth out
+        # noise.
+        # Shrink the YUV images an additional 2x2 -> 1 to account for the size
+        # reduction that the raw images went through in the RGB conversion.
+        imgs2 = {}
+        for s,img in imgs.iteritems():
+            h,w,ch = img.shape
+            m = 4
+            if s in ["yuv_full", "yuv_crop"]:
+                m = 8
+            img = img.reshape(h/m,m,w/m,m,3).mean(3).mean(1).reshape(h/m,w/m,3)
+            imgs2[s] = img
+            print s, img.shape
+
+        # Strip any border pixels from the raw shots (since the raw images may
+        # be larger than the YUV images). Assume a symmetric padded border.
+        xpad = (imgs2["raw_full"].shape[1] - imgs2["yuv_full"].shape[1]) / 2
+        ypad = (imgs2["raw_full"].shape[0] - imgs2["yuv_full"].shape[0]) / 2
+        wyuv = imgs2["yuv_full"].shape[1]
+        hyuv = imgs2["yuv_full"].shape[0]
+        imgs2["raw_full"]=imgs2["raw_full"][ypad:ypad+hyuv:,xpad:xpad+wyuv:,::]
+        imgs2["raw_crop"]=imgs2["raw_crop"][ypad:ypad+hyuv:,xpad:xpad+wyuv:,::]
+        print "Stripping padding before comparison:", xpad, ypad
+
+        for s,img in imgs2.iteritems():
+            its.image.write_image(img, "%s_comp_%s.jpg" % (NAME, s))
+
+        # Compute image diffs.
+        diff_yuv = numpy.fabs((imgs2["yuv_full"] - imgs2["yuv_crop"])).mean()
+        diff_raw = numpy.fabs((imgs2["raw_full"] - imgs2["raw_crop"])).mean()
+        print "YUV diff (crop vs. non-crop):", diff_yuv
+        print "RAW diff (crop vs. non-crop):", diff_raw
+
+        assert(diff_yuv > DIFF_THRESH)
+        assert(diff_raw < DIFF_THRESH)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_crop_regions.py b/apps/CameraITS/tests/scene1/test_crop_regions.py
new file mode 100644
index 0000000..6d3dad1
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_crop_regions.py
@@ -0,0 +1,106 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import os.path
+import numpy
+
+def main():
+    """Test that crop regions work.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    # A list of 5 regions, specified in normalized (x,y,w,h) coords.
+    # The regions correspond to: TL, TR, BL, BR, CENT
+    REGIONS = [(0.0, 0.0, 0.5, 0.5),
+               (0.5, 0.0, 0.5, 0.5),
+               (0.0, 0.5, 0.5, 0.5),
+               (0.5, 0.5, 0.5, 0.5),
+               (0.25, 0.25, 0.5, 0.5)]
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.freeform_crop(props) and
+                             its.caps.per_frame_control(props))
+
+        a = props['android.sensor.info.activeArraySize']
+        ax, ay = a["left"], a["top"]
+        aw, ah = a["right"] - a["left"], a["bottom"] - a["top"]
+        e, s = its.target.get_target_exposure_combos(cam)["minSensitivity"]
+        print "Active sensor region (%d,%d %dx%d)" % (ax, ay, aw, ah)
+
+        # Uses a 2x digital zoom.
+        assert(its.objects.get_max_digital_zoom(props) >= 2)
+
+        # Capture a full frame.
+        req = its.objects.manual_capture_request(s,e)
+        cap_full = cam.do_capture(req)
+        img_full = its.image.convert_capture_to_rgb_image(cap_full)
+        its.image.write_image(img_full, "%s_full.jpg" % (NAME))
+        wfull, hfull = cap_full["width"], cap_full["height"]
+
+        # Capture a burst of crop region frames.
+        # Note that each region is 1/2x1/2 of the full frame, and is digitally
+        # zoomed into the full size output image, so must be downscaled (below)
+        # by 2x when compared to a tile of the full image.
+        reqs = []
+        for x,y,w,h in REGIONS:
+            req = its.objects.manual_capture_request(s,e)
+            req["android.scaler.cropRegion"] = {
+                    "top": int(ah * y),
+                    "left": int(aw * x),
+                    "right": int(aw * (x + w)),
+                    "bottom": int(ah * (y + h))}
+            reqs.append(req)
+        caps_regions = cam.do_capture(reqs)
+        match_failed = False
+        for i,cap in enumerate(caps_regions):
+            a = cap["metadata"]["android.scaler.cropRegion"]
+            ax, ay = a["left"], a["top"]
+            aw, ah = a["right"] - a["left"], a["bottom"] - a["top"]
+
+            # Match this crop image against each of the five regions of
+            # the full image, to find the best match (which should be
+            # the region that corresponds to this crop image).
+            img_crop = its.image.convert_capture_to_rgb_image(cap)
+            img_crop = its.image.downscale_image(img_crop, 2)
+            its.image.write_image(img_crop, "%s_crop%d.jpg" % (NAME, i))
+            min_diff = None
+            min_diff_region = None
+            for j,(x,y,w,h) in enumerate(REGIONS):
+                tile_full = its.image.get_image_patch(img_full, x,y,w,h)
+                wtest = min(tile_full.shape[1], aw)
+                htest = min(tile_full.shape[0], ah)
+                tile_full = tile_full[0:htest:, 0:wtest:, ::]
+                tile_crop = img_crop[0:htest:, 0:wtest:, ::]
+                its.image.write_image(tile_full, "%s_fullregion%d.jpg"%(NAME,j))
+                diff = numpy.fabs(tile_full - tile_crop).mean()
+                if min_diff is None or diff < min_diff:
+                    min_diff = diff
+                    min_diff_region = j
+            if i != min_diff_region:
+                match_failed = True
+            print "Crop image %d (%d,%d %dx%d) best match with region %d"%(
+                    i, ax, ay, aw, ah, min_diff_region)
+
+        assert(not match_failed)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_dng_noise_model.py b/apps/CameraITS/tests/scene1/test_dng_noise_model.py
new file mode 100644
index 0000000..51270b6
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_dng_noise_model.py
@@ -0,0 +1,114 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.device
+import its.caps
+import its.objects
+import its.image
+import os.path
+import pylab
+import matplotlib
+import matplotlib.pyplot
+
+def main():
+    """Verify that the DNG raw model parameters are correct.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    NUM_STEPS = 4
+
+    # Pass if the difference between expected and computed variances is small,
+    # defined as being within an absolute variance delta of 0.0005, or within
+    # 20% of the expected variance, whichever is larger; this is to allow the
+    # test to pass in the presence of some randomness (since this test is
+    # measuring noise of a small patch) and some imperfect scene conditions
+    # (since ITS doesn't require a perfectly uniformly lit scene).
+    DIFF_THRESH = 0.0005
+    FRAC_THRESH = 0.2
+
+    with its.device.ItsSession() as cam:
+
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.raw(props) and
+                             its.caps.raw16(props) and
+                             its.caps.manual_sensor(props) and
+                             its.caps.read_3a(props) and
+                             its.caps.per_frame_control(props))
+
+        white_level = float(props['android.sensor.info.whiteLevel'])
+        black_levels = props['android.sensor.blackLevelPattern']
+        cfa_idxs = its.image.get_canonical_cfa_order(props)
+        black_levels = [black_levels[i] for i in cfa_idxs]
+
+        # Expose for the scene with min sensitivity
+        sens_min, sens_max = props['android.sensor.info.sensitivityRange']
+        sens_step = (sens_max - sens_min) / NUM_STEPS
+        s_ae,e_ae,_,_,_  = cam.do_3a(get_results=True)
+        s_e_prod = s_ae * e_ae
+        sensitivities = range(sens_min, sens_max, sens_step)
+
+        var_expected = [[],[],[],[]]
+        var_measured = [[],[],[],[]]
+        for sens in sensitivities:
+
+            # Capture a raw frame with the desired sensitivity.
+            exp = int(s_e_prod / float(sens))
+            req = its.objects.manual_capture_request(sens, exp)
+            cap = cam.do_capture(req, cam.CAP_RAW)
+
+            # Test each raw color channel (R, GR, GB, B):
+            noise_profile = cap["metadata"]["android.sensor.noiseProfile"]
+            assert((len(noise_profile)) == 4)
+            for ch in range(4):
+                # Get the noise model parameters for this channel of this shot.
+                s,o = noise_profile[cfa_idxs[ch]]
+
+                # Get a center tile of the raw channel, and compute the mean.
+                # Use a very small patch to ensure gross uniformity (i.e. so
+                # non-uniform lighting or vignetting doesn't affect the variance
+                # calculation).
+                plane = its.image.convert_capture_to_planes(cap, props)[ch]
+                plane = (plane * white_level - black_levels[ch]) / (
+                        white_level - black_levels[ch])
+                tile = its.image.get_image_patch(plane, 0.49,0.49,0.02,0.02)
+                mean = tile.mean()
+
+                # Calculate the expected variance based on the model, and the
+                # measured variance from the tile.
+                var_measured[ch].append(
+                        its.image.compute_image_variances(tile)[0])
+                var_expected[ch].append(s * mean + o)
+
+    for ch in range(4):
+        pylab.plot(sensitivities, var_expected[ch], "rgkb"[ch],
+                label=["R","GR","GB","B"][ch]+" expected")
+        pylab.plot(sensitivities, var_measured[ch], "rgkb"[ch]+"--",
+                label=["R", "GR", "GB", "B"][ch]+" measured")
+    pylab.xlabel("Sensitivity")
+    pylab.ylabel("Center patch variance")
+    pylab.legend(loc=2)
+    matplotlib.pyplot.savefig("%s_plot.png" % (NAME))
+
+    # Pass/fail check.
+    for ch in range(4):
+        diffs = [var_measured[ch][i] - var_expected[ch][i]
+                 for i in range(NUM_STEPS)]
+        print "Diffs (%s):"%(["R","GR","GB","B"][ch]), diffs
+        for i,diff in enumerate(diffs):
+            thresh = max(DIFF_THRESH, FRAC_THRESH * var_expected[ch][i])
+            assert(diff <= thresh)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_ev_compensation_advanced.py b/apps/CameraITS/tests/scene1/test_ev_compensation_advanced.py
new file mode 100644
index 0000000..6341c67
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_ev_compensation_advanced.py
@@ -0,0 +1,83 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.caps
+import its.objects
+import os.path
+import pylab
+import matplotlib
+import matplotlib.pyplot
+import numpy
+
+def main():
+    """Tests that EV compensation is applied.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    MAX_LUMA_DELTA_THRESH = 0.02
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.manual_sensor(props) and
+                             its.caps.manual_post_proc(props) and
+                             its.caps.per_frame_control(props))
+
+        evs = range(-4,5)
+        lumas = []
+        for ev in evs:
+            # Re-converge 3A, and lock AE once converged. skip AF trigger as
+            # dark/bright scene could make AF convergence fail and this test
+            # doesn't care the image sharpness.
+            cam.do_3a(ev_comp=ev, lock_ae=True, do_af=False)
+
+            # Capture a single shot with the same EV comp and locked AE.
+            req = its.objects.auto_capture_request()
+            req['android.control.aeExposureCompensation'] = ev
+            req["android.control.aeLock"] = True
+            # Use linear tone curve to avoid brightness being impacted
+            # by tone curves.
+            req["android.tonemap.mode"] = 0
+            req["android.tonemap.curveRed"] = [0.0,0.0, 1.0,1.0]
+            req["android.tonemap.curveGreen"] = [0.0,0.0, 1.0,1.0]
+            req["android.tonemap.curveBlue"] = [0.0,0.0, 1.0,1.0]
+            cap = cam.do_capture(req)
+            y = its.image.convert_capture_to_planes(cap)[0]
+            tile = its.image.get_image_patch(y, 0.45,0.45,0.1,0.1)
+            lumas.append(its.image.compute_image_means(tile)[0])
+
+        ev_step_size_in_stops = its.objects.rational_to_float(
+                props['android.control.aeCompensationStep'])
+        luma_increase_per_step = pow(2, ev_step_size_in_stops)
+        print "ev_step_size_in_stops", ev_step_size_in_stops
+        imid = len(lumas) / 2
+        expected_lumas = [lumas[imid] / pow(luma_increase_per_step, i)
+                          for i in range(imid , 0, -1)]  + \
+                         [lumas[imid] * pow(luma_increase_per_step, i-imid)
+                          for i in range(imid, len(evs))]
+
+        pylab.plot(evs, lumas, 'r')
+        pylab.plot(evs, expected_lumas, 'b')
+        matplotlib.pyplot.savefig("%s_plot_means.png" % (NAME))
+
+        luma_diffs = [expected_lumas[i] - lumas[i] for i in range(len(evs))]
+        max_diff = max(abs(i) for i in luma_diffs)
+        avg_diff = abs(numpy.array(luma_diffs)).mean()
+        print "Max delta between modeled and measured lumas:", max_diff
+        print "Avg delta between modeled and measured lumas:", avg_diff
+        assert(max_diff < MAX_LUMA_DELTA_THRESH)
+
+if __name__ == '__main__':
+    main()
diff --git a/apps/CameraITS/tests/scene1/test_ev_compensation_basic.py b/apps/CameraITS/tests/scene1/test_ev_compensation_basic.py
new file mode 100644
index 0000000..13f318f
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_ev_compensation_basic.py
@@ -0,0 +1,60 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.objects
+import os.path
+import pylab
+import matplotlib
+import matplotlib.pyplot
+import numpy
+
+def main():
+    """Tests that EV compensation is applied.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+
+        evs = range(-4,5)
+        lumas = []
+        for ev in evs:
+            # Re-converge 3A, and lock AE once converged. skip AF trigger as
+            # dark/bright scene could make AF convergence fail and this test
+            # doesn't care the image sharpness.
+            cam.do_3a(ev_comp=ev, lock_ae=True, do_af=False)
+
+            # Capture a single shot with the same EV comp and locked AE.
+            req = its.objects.auto_capture_request()
+            req['android.control.aeExposureCompensation'] = ev
+            req["android.control.aeLock"] = True
+            cap = cam.do_capture(req)
+            y = its.image.convert_capture_to_planes(cap)[0]
+            tile = its.image.get_image_patch(y, 0.45,0.45,0.1,0.1)
+            lumas.append(its.image.compute_image_means(tile)[0])
+
+        pylab.plot(evs, lumas, 'r')
+        matplotlib.pyplot.savefig("%s_plot_means.png" % (NAME))
+
+        luma_diffs = numpy.diff(lumas)
+        min_luma_diffs = min(luma_diffs)
+        print "Min of the luma value difference between adjacent ev comp: ", \
+                min_luma_diffs
+        # All luma brightness should be increasing with increasing ev comp.
+        assert(min_luma_diffs > 0)
+
+if __name__ == '__main__':
+    main()
diff --git a/apps/CameraITS/tests/scene1/test_exposure.py b/apps/CameraITS/tests/scene1/test_exposure.py
new file mode 100644
index 0000000..c55e7ad
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_exposure.py
@@ -0,0 +1,91 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import pylab
+import numpy
+import os.path
+import matplotlib
+import matplotlib.pyplot
+
+def main():
+    """Test that a constant exposure is seen as ISO and exposure time vary.
+
+    Take a series of shots that have ISO and exposure time chosen to balance
+    each other; result should be the same brightness, but over the sequence
+    the images should get noisier.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    THRESHOLD_MAX_OUTLIER_DIFF = 0.1
+    THRESHOLD_MIN_LEVEL = 0.1
+    THRESHOLD_MAX_LEVEL = 0.9
+    THRESHOLD_MAX_ABS_GRAD = 0.001
+
+    mults = []
+    r_means = []
+    g_means = []
+    b_means = []
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.per_frame_control(props))
+
+        e,s = its.target.get_target_exposure_combos(cam)["minSensitivity"]
+        expt_range = props['android.sensor.info.exposureTimeRange']
+        sens_range = props['android.sensor.info.sensitivityRange']
+
+        m = 1
+        while s*m < sens_range[1] and e/m > expt_range[0]:
+            mults.append(m)
+            req = its.objects.manual_capture_request(s*m, e/m)
+            cap = cam.do_capture(req)
+            img = its.image.convert_capture_to_rgb_image(cap)
+            its.image.write_image(img, "%s_mult=%02d.jpg" % (NAME, m))
+            tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+            rgb_means = its.image.compute_image_means(tile)
+            r_means.append(rgb_means[0])
+            g_means.append(rgb_means[1])
+            b_means.append(rgb_means[2])
+            m = m + 4
+
+    # Draw a plot.
+    pylab.plot(mults, r_means, 'r')
+    pylab.plot(mults, g_means, 'g')
+    pylab.plot(mults, b_means, 'b')
+    pylab.ylim([0,1])
+    matplotlib.pyplot.savefig("%s_plot_means.png" % (NAME))
+
+    # Check for linearity. For each R,G,B channel, fit a line y=mx+b, and
+    # assert that the gradient is close to 0 (flat) and that there are no
+    # crazy outliers. Also ensure that the images aren't clamped to 0 or 1
+    # (which would make them look like flat lines).
+    for chan in xrange(3):
+        values = [r_means, g_means, b_means][chan]
+        m, b = numpy.polyfit(mults, values, 1).tolist()
+        print "Channel %d line fit (y = mx+b): m = %f, b = %f" % (chan, m, b)
+        assert(abs(m) < THRESHOLD_MAX_ABS_GRAD)
+        assert(b > THRESHOLD_MIN_LEVEL and b < THRESHOLD_MAX_LEVEL)
+        for v in values:
+            assert(v > THRESHOLD_MIN_LEVEL and v < THRESHOLD_MAX_LEVEL)
+            assert(abs(v - b) < THRESHOLD_MAX_OUTLIER_DIFF)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_format_combos.py b/apps/CameraITS/tests/scene1/test_format_combos.py
new file mode 100644
index 0000000..1b40826
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_format_combos.py
@@ -0,0 +1,124 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.error
+import its.target
+import sys
+import os
+import os.path
+
+# Change this to True, to have the test break at the first failure.
+stop_at_first_failure = False
+
+def main():
+    """Test different combinations of output formats.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    with its.device.ItsSession() as cam:
+
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.raw16(props))
+
+        successes = []
+        failures = []
+
+        # Two different requests: auto, and manual.
+        e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
+        req_aut = its.objects.auto_capture_request()
+        req_man = its.objects.manual_capture_request(s, e)
+        reqs = [req_aut, # R0
+                req_man] # R1
+
+        # 10 different combos of output formats; some are single surfaces, and
+        # some are multiple surfaces.
+        wyuv,hyuv = its.objects.get_available_output_sizes("yuv", props)[-1]
+        wjpg,hjpg = its.objects.get_available_output_sizes("jpg", props)[-1]
+        fmt_yuv_prev = {"format":"yuv", "width":wyuv, "height":hyuv}
+        fmt_yuv_full = {"format":"yuv"}
+        fmt_jpg_prev = {"format":"jpeg","width":wjpg, "height":hjpg}
+        fmt_jpg_full = {"format":"jpeg"}
+        fmt_raw_full = {"format":"raw"}
+        fmt_combos =[
+                [fmt_yuv_prev],                             # F0
+                [fmt_yuv_full],                             # F1
+                [fmt_jpg_prev],                             # F2
+                [fmt_jpg_full],                             # F3
+                [fmt_raw_full],                             # F4
+                [fmt_yuv_prev, fmt_jpg_prev],               # F5
+                [fmt_yuv_prev, fmt_jpg_full],               # F6
+                [fmt_yuv_prev, fmt_raw_full],               # F7
+                [fmt_yuv_prev, fmt_jpg_prev, fmt_raw_full], # F8
+                [fmt_yuv_prev, fmt_jpg_full, fmt_raw_full]] # F9
+
+        # Two different burst lengths: single frame, and 3 frames.
+        burst_lens = [1, # B0
+                      3] # B1
+
+        # There are 2x10x2=40 different combinations. Run through them all.
+        n = 0
+        for r,req in enumerate(reqs):
+            for f,fmt_combo in enumerate(fmt_combos):
+                for b,burst_len in enumerate(burst_lens):
+                    try:
+                        caps = cam.do_capture([req]*burst_len, fmt_combo)
+                        successes.append((n,r,f,b))
+                        print "==> Success[%02d]: R%d F%d B%d" % (n,r,f,b)
+
+                        # Dump the captures out to jpegs.
+                        if not isinstance(caps, list):
+                            caps = [caps]
+                        elif isinstance(caps[0], list):
+                            caps = sum(caps, [])
+                        for c,cap in enumerate(caps):
+                            img = its.image.convert_capture_to_rgb_image(cap,
+                                    props=props)
+                            its.image.write_image(img,
+                                    "%s_n%02d_r%d_f%d_b%d_c%d.jpg"%(NAME,n,r,f,b,c))
+
+                    except Exception as e:
+                        print e
+                        print "==> Failure[%02d]: R%d F%d B%d" % (n,r,f,b)
+                        failures.append((n,r,f,b))
+                        if stop_at_first_failure:
+                            sys.exit(0)
+                    n += 1
+
+        num_fail = len(failures)
+        num_success = len(successes)
+        num_total = len(reqs)*len(fmt_combos)*len(burst_lens)
+        num_not_run = num_total - num_success - num_fail
+
+        print "\nFailures (%d / %d):" % (num_fail, num_total)
+        for (n,r,f,b) in failures:
+            print "  %02d: R%d F%d B%d" % (n,r,f,b)
+        print "\nSuccesses (%d / %d):" % (num_success, num_total)
+        for (n,r,f,b) in successes:
+            print "  %02d: R%d F%d B%d" % (n,r,f,b)
+        if num_not_run > 0:
+            print "\nNumber of tests not run: %d / %d" % (num_not_run, num_total)
+        print ""
+
+        # The test passes if all the combinations successfully capture.
+        assert(num_fail == 0)
+        assert(num_success == num_total)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_jpeg.py b/apps/CameraITS/tests/scene1/test_jpeg.py
new file mode 100644
index 0000000..25c2038
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_jpeg.py
@@ -0,0 +1,63 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import os.path
+import math
+
+def main():
+    """Test that converted YUV images and device JPEG images look the same.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    THRESHOLD_MAX_RMS_DIFF = 0.01
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.per_frame_control(props))
+
+        e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
+        req = its.objects.manual_capture_request(s, e, True)
+
+        # YUV
+        size = its.objects.get_available_output_sizes("yuv", props)[0]
+        out_surface = {"width":size[0], "height":size[1], "format":"yuv"}
+        cap = cam.do_capture(req, out_surface)
+        img = its.image.convert_capture_to_rgb_image(cap)
+        its.image.write_image(img, "%s_fmt=yuv.jpg" % (NAME))
+        tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+        rgb0 = its.image.compute_image_means(tile)
+
+        # JPEG
+        size = its.objects.get_available_output_sizes("jpg", props)[0]
+        out_surface = {"width":size[0], "height":size[1], "format":"jpg"}
+        cap = cam.do_capture(req, out_surface)
+        img = its.image.decompress_jpeg_to_rgb_image(cap["data"])
+        its.image.write_image(img, "%s_fmt=jpg.jpg" % (NAME))
+        tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+        rgb1 = its.image.compute_image_means(tile)
+
+        rms_diff = math.sqrt(
+                sum([pow(rgb0[i] - rgb1[i], 2.0) for i in range(3)]) / 3.0)
+        print "RMS difference:", rms_diff
+        assert(rms_diff < THRESHOLD_MAX_RMS_DIFF)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_latching.py b/apps/CameraITS/tests/scene1/test_latching.py
new file mode 100644
index 0000000..3bc4356
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_latching.py
@@ -0,0 +1,90 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import pylab
+import os.path
+import matplotlib
+import matplotlib.pyplot
+
+def main():
+    """Test that settings latch on the right frame.
+
+    Takes a bunch of shots using back-to-back requests, varying the capture
+    request parameters between shots. Checks that the images that come back
+    have the expected properties.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.full(props) and
+                             its.caps.per_frame_control(props))
+
+        _,fmt = its.objects.get_fastest_manual_capture_settings(props)
+        e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
+        e /= 2.0
+
+        r_means = []
+        g_means = []
+        b_means = []
+
+        reqs = [
+            its.objects.manual_capture_request(s,  e,   True),
+            its.objects.manual_capture_request(s,  e,   True),
+            its.objects.manual_capture_request(s*2,e,   True),
+            its.objects.manual_capture_request(s*2,e,   True),
+            its.objects.manual_capture_request(s,  e,   True),
+            its.objects.manual_capture_request(s,  e,   True),
+            its.objects.manual_capture_request(s,  e*2, True),
+            its.objects.manual_capture_request(s,  e,   True),
+            its.objects.manual_capture_request(s*2,e,   True),
+            its.objects.manual_capture_request(s,  e,   True),
+            its.objects.manual_capture_request(s,  e*2, True),
+            its.objects.manual_capture_request(s,  e,   True),
+            its.objects.manual_capture_request(s,  e*2, True),
+            its.objects.manual_capture_request(s,  e*2, True),
+            ]
+
+        caps = cam.do_capture(reqs, fmt)
+        for i,cap in enumerate(caps):
+            img = its.image.convert_capture_to_rgb_image(cap)
+            its.image.write_image(img, "%s_i=%02d.jpg" % (NAME, i))
+            tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+            rgb_means = its.image.compute_image_means(tile)
+            r_means.append(rgb_means[0])
+            g_means.append(rgb_means[1])
+            b_means.append(rgb_means[2])
+
+        # Draw a plot.
+        idxs = range(len(r_means))
+        pylab.plot(idxs, r_means, 'r')
+        pylab.plot(idxs, g_means, 'g')
+        pylab.plot(idxs, b_means, 'b')
+        pylab.ylim([0,1])
+        matplotlib.pyplot.savefig("%s_plot_means.png" % (NAME))
+
+        g_avg = sum(g_means) / len(g_means)
+        g_ratios = [g / g_avg for g in g_means]
+        g_hilo = [g>1.0 for g in g_ratios]
+        assert(g_hilo == [False, False, True, True, False, False, True,
+                          False, True, False, True, False, True, True])
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_linearity.py b/apps/CameraITS/tests/scene1/test_linearity.py
new file mode 100644
index 0000000..a9063a9
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_linearity.py
@@ -0,0 +1,98 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import numpy
+import math
+import pylab
+import os.path
+import matplotlib
+import matplotlib.pyplot
+
+def main():
+    """Test that device processing can be inverted to linear pixels.
+
+    Captures a sequence of shots with the device pointed at a uniform
+    target. Attempts to invert all the ISP processing to get back to
+    linear R,G,B pixel data.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    RESIDUAL_THRESHOLD = 0.00005
+
+    # The HAL3.2 spec requires that curves up to 64 control points in length
+    # must be supported.
+    L = 64
+    LM1 = float(L-1)
+
+    gamma_lut = numpy.array(
+            sum([[i/LM1, math.pow(i/LM1, 1/2.2)] for i in xrange(L)], []))
+    inv_gamma_lut = numpy.array(
+            sum([[i/LM1, math.pow(i/LM1, 2.2)] for i in xrange(L)], []))
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.per_frame_control(props))
+
+        e,s = its.target.get_target_exposure_combos(cam)["midSensitivity"]
+        s /= 2
+        sens_range = props['android.sensor.info.sensitivityRange']
+        sensitivities = [s*1.0/3.0, s*2.0/3.0, s, s*4.0/3.0, s*5.0/3.0]
+        sensitivities = [s for s in sensitivities
+                if s > sens_range[0] and s < sens_range[1]]
+
+        req = its.objects.manual_capture_request(0, e)
+        req["android.blackLevel.lock"] = True
+        req["android.tonemap.mode"] = 0
+        req["android.tonemap.curveRed"] = gamma_lut.tolist()
+        req["android.tonemap.curveGreen"] = gamma_lut.tolist()
+        req["android.tonemap.curveBlue"] = gamma_lut.tolist()
+
+        r_means = []
+        g_means = []
+        b_means = []
+
+        for sens in sensitivities:
+            req["android.sensor.sensitivity"] = sens
+            cap = cam.do_capture(req)
+            img = its.image.convert_capture_to_rgb_image(cap)
+            its.image.write_image(
+                    img, "%s_sens=%04d.jpg" % (NAME, sens))
+            img = its.image.apply_lut_to_image(img, inv_gamma_lut[1::2] * LM1)
+            tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+            rgb_means = its.image.compute_image_means(tile)
+            r_means.append(rgb_means[0])
+            g_means.append(rgb_means[1])
+            b_means.append(rgb_means[2])
+
+        pylab.plot(sensitivities, r_means, 'r')
+        pylab.plot(sensitivities, g_means, 'g')
+        pylab.plot(sensitivities, b_means, 'b')
+        pylab.ylim([0,1])
+        matplotlib.pyplot.savefig("%s_plot_means.png" % (NAME))
+
+        # Check that each plot is actually linear.
+        for means in [r_means, g_means, b_means]:
+            line,residuals,_,_,_  = numpy.polyfit(range(5),means,1,full=True)
+            print "Line: m=%f, b=%f, resid=%f"%(line[0], line[1], residuals[0])
+            assert(residuals[0] < RESIDUAL_THRESHOLD)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_locked_burst.py b/apps/CameraITS/tests/scene1/test_locked_burst.py
new file mode 100644
index 0000000..5cea30c
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_locked_burst.py
@@ -0,0 +1,92 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.objects
+import os.path
+import numpy
+import pylab
+import matplotlib
+import matplotlib.pyplot
+
+def main():
+    """Test 3A lock + YUV burst (using auto settings).
+
+    This is a test that is designed to pass even on limited devices that
+    don't have MANUAL_SENSOR or PER_FRAME_CONTROLS. (They must be able to
+    capture bursts with full res @ full frame rate to pass, however).
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    BURST_LEN = 8
+    SPREAD_THRESH = 0.005
+    FPS_MAX_DIFF = 2.0
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+
+        # Converge 3A prior to capture.
+        cam.do_3a(do_af=True, lock_ae=True, lock_awb=True)
+
+        # After 3A has converged, lock AE+AWB for the duration of the test.
+        req = its.objects.auto_capture_request()
+        req["android.control.awbLock"] = True
+        req["android.control.aeLock"] = True
+
+        # Capture bursts of YUV shots.
+        # Get the mean values of a center patch for each.
+        r_means = []
+        g_means = []
+        b_means = []
+        caps = cam.do_capture([req]*BURST_LEN)
+        for i,cap in enumerate(caps):
+            img = its.image.convert_capture_to_rgb_image(cap)
+            its.image.write_image(img, "%s_frame%d.jpg"%(NAME,i))
+            tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+            means = its.image.compute_image_means(tile)
+            r_means.append(means[0])
+            g_means.append(means[1])
+            b_means.append(means[2])
+
+        # Pass/fail based on center patch similarity.
+        for means in [r_means, g_means, b_means]:
+            spread = max(means) - min(means)
+            print "Patch mean spread", spread, \
+                   " (min/max: ",  min(means), "/", max(means), ")"
+            assert(spread < SPREAD_THRESH)
+
+        # Also ensure that the burst was at full frame rate.
+        fmt_code = 0x23
+        configs = props['android.scaler.streamConfigurationMap']\
+                       ['availableStreamConfigurations']
+        min_duration = None
+        for cfg in configs:
+            if cfg['format'] == fmt_code and cfg['input'] == False and \
+                    cfg['width'] == caps[0]["width"] and \
+                    cfg['height'] == caps[0]["height"]:
+                min_duration = cfg["minFrameDuration"]
+        assert(min_duration is not None)
+        tstamps = [c['metadata']['android.sensor.timestamp'] for c in caps]
+        deltas = [tstamps[i]-tstamps[i-1] for i in range(1,len(tstamps))]
+        actual_fps = 1.0 / (max(deltas) / 1000000000.0)
+        actual_fps_max = 1.0 / (min(deltas) / 1000000000.0)
+        max_fps = 1.0 / (min_duration / 1000000000.0)
+        print "Measure FPS min/max", actual_fps, "/", actual_fps_max
+        print "FPS measured %.1f, max advertized %.1f" %(actual_fps, max_fps)
+        assert(max_fps - FPS_MAX_DIFF <= actual_fps <= max_fps + FPS_MAX_DIFF)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_param_color_correction.py b/apps/CameraITS/tests/scene1/test_param_color_correction.py
new file mode 100644
index 0000000..b7fdc7b
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_param_color_correction.py
@@ -0,0 +1,104 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import pylab
+import os.path
+import matplotlib
+import matplotlib.pyplot
+
+def main():
+    """Test that the android.colorCorrection.* params are applied when set.
+
+    Takes shots with different transform and gains values, and tests that
+    they look correspondingly different. The transform and gains are chosen
+    to make the output go redder or bluer.
+
+    Uses a linear tonemap.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    THRESHOLD_MAX_DIFF = 0.1
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.per_frame_control(props))
+
+        # Baseline request
+        e, s = its.target.get_target_exposure_combos(cam)["midSensitivity"]
+        req = its.objects.manual_capture_request(s, e, True)
+        req["android.colorCorrection.mode"] = 0
+
+        # Transforms:
+        # 1. Identity
+        # 2. Identity
+        # 3. Boost blue
+        transforms = [its.objects.int_to_rational([1,0,0, 0,1,0, 0,0,1]),
+                      its.objects.int_to_rational([1,0,0, 0,1,0, 0,0,1]),
+                      its.objects.int_to_rational([1,0,0, 0,1,0, 0,0,2])]
+
+        # Gains:
+        # 1. Unit
+        # 2. Boost red
+        # 3. Unit
+        gains = [[1,1,1,1], [2,1,1,1], [1,1,1,1]]
+
+        r_means = []
+        g_means = []
+        b_means = []
+
+        # Capture requests:
+        # 1. With unit gains, and identity transform.
+        # 2. With a higher red gain, and identity transform.
+        # 3. With unit gains, and a transform that boosts blue.
+        for i in range(len(transforms)):
+            req["android.colorCorrection.transform"] = transforms[i]
+            req["android.colorCorrection.gains"] = gains[i]
+            cap = cam.do_capture(req)
+            img = its.image.convert_capture_to_rgb_image(cap)
+            its.image.write_image(img, "%s_req=%d.jpg" % (NAME, i))
+            tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+            rgb_means = its.image.compute_image_means(tile)
+            r_means.append(rgb_means[0])
+            g_means.append(rgb_means[1])
+            b_means.append(rgb_means[2])
+            ratios = [rgb_means[0] / rgb_means[1], rgb_means[2] / rgb_means[1]]
+            print "Means = ", rgb_means, "   Ratios =", ratios
+
+        # Draw a plot.
+        domain = range(len(transforms))
+        pylab.plot(domain, r_means, 'r')
+        pylab.plot(domain, g_means, 'g')
+        pylab.plot(domain, b_means, 'b')
+        pylab.ylim([0,1])
+        matplotlib.pyplot.savefig("%s_plot_means.png" % (NAME))
+
+        # Expect G0 == G1 == G2, R0 == 0.5*R1 == R2, B0 == B1 == 0.5*B2
+        # Also need to ensure that the image is not clamped to white/black.
+        assert(all(g_means[i] > 0.2 and g_means[i] < 0.8 for i in xrange(3)))
+        assert(abs(g_means[1] - g_means[0]) < THRESHOLD_MAX_DIFF)
+        assert(abs(g_means[2] - g_means[1]) < THRESHOLD_MAX_DIFF)
+        assert(abs(r_means[2] - r_means[0]) < THRESHOLD_MAX_DIFF)
+        assert(abs(r_means[1] - 2.0 * r_means[0]) < THRESHOLD_MAX_DIFF)
+        assert(abs(b_means[1] - b_means[0]) < THRESHOLD_MAX_DIFF)
+        assert(abs(b_means[2] - 2.0 * b_means[0]) < THRESHOLD_MAX_DIFF)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_param_exposure_time.py b/apps/CameraITS/tests/scene1/test_param_exposure_time.py
new file mode 100644
index 0000000..e6078d9
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_param_exposure_time.py
@@ -0,0 +1,68 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import pylab
+import os.path
+import matplotlib
+import matplotlib.pyplot
+
+def main():
+    """Test that the android.sensor.exposureTime parameter is applied.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    exp_times = []
+    r_means = []
+    g_means = []
+    b_means = []
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.per_frame_control(props))
+
+        e,s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
+        for i,e_mult in enumerate([0.8, 0.9, 1.0, 1.1, 1.2]):
+            req = its.objects.manual_capture_request(s, e * e_mult, True)
+            cap = cam.do_capture(req)
+            img = its.image.convert_capture_to_rgb_image(cap)
+            its.image.write_image(
+                    img, "%s_frame%d.jpg" % (NAME, i))
+            tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+            rgb_means = its.image.compute_image_means(tile)
+            exp_times.append(e * e_mult)
+            r_means.append(rgb_means[0])
+            g_means.append(rgb_means[1])
+            b_means.append(rgb_means[2])
+
+    # Draw a plot.
+    pylab.plot(exp_times, r_means, 'r')
+    pylab.plot(exp_times, g_means, 'g')
+    pylab.plot(exp_times, b_means, 'b')
+    pylab.ylim([0,1])
+    matplotlib.pyplot.savefig("%s_plot_means.png" % (NAME))
+
+    # Test for pass/fail: check that each shot is brighter than the previous.
+    for means in [r_means, g_means, b_means]:
+        for i in range(len(means)-1):
+            assert(means[i+1] > means[i])
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_param_flash_mode.py b/apps/CameraITS/tests/scene1/test_param_flash_mode.py
new file mode 100644
index 0000000..aae56aa
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_param_flash_mode.py
@@ -0,0 +1,66 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import os.path
+
+def main():
+    """Test that the android.flash.mode parameter is applied.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.flash(props) and
+                             its.caps.per_frame_control(props))
+
+        flash_modes_reported = []
+        flash_states_reported = []
+        g_means = []
+
+        # Manually set the exposure to be a little on the dark side, so that
+        # it should be obvious whether the flash fired or not, and use a
+        # linear tonemap.
+        e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
+        e /= 4
+        req = its.objects.manual_capture_request(s, e, True)
+
+        for f in [0,1,2]:
+            req["android.flash.mode"] = f
+            cap = cam.do_capture(req)
+            flash_modes_reported.append(cap["metadata"]["android.flash.mode"])
+            flash_states_reported.append(cap["metadata"]["android.flash.state"])
+            img = its.image.convert_capture_to_rgb_image(cap)
+            its.image.write_image(img, "%s_mode=%d.jpg" % (NAME, f))
+            tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+            rgb = its.image.compute_image_means(tile)
+            g_means.append(rgb[1])
+
+        assert(flash_modes_reported == [0,1,2])
+        assert(flash_states_reported[0] not in [3,4])
+        assert(flash_states_reported[1] in [3,4])
+        assert(flash_states_reported[2] in [3,4])
+
+        print "G brightnesses:", g_means
+        assert(g_means[1] > g_means[0])
+        assert(g_means[2] > g_means[0])
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_param_noise_reduction.py b/apps/CameraITS/tests/scene1/test_param_noise_reduction.py
new file mode 100644
index 0000000..f5176a7
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_param_noise_reduction.py
@@ -0,0 +1,99 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import pylab
+import os.path
+import matplotlib
+import matplotlib.pyplot
+
+def main():
+    """Test that the android.noiseReduction.mode param is applied when set.
+
+    Capture images with the camera dimly lit. Uses a high analog gain to
+    ensure the captured image is noisy.
+
+    Captures three images, for NR off, "fast", and "high quality".
+    Also captures an image with low gain and NR off, and uses the variance
+    of this as the baseline.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    # List of variances for Y,U,V.
+    variances = [[],[],[]]
+
+    # Reference (baseline) variance for each of Y,U,V.
+    ref_variance = []
+
+    nr_modes_reported = []
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.per_frame_control(props))
+
+        # NR mode 0 with low gain
+        e, s = its.target.get_target_exposure_combos(cam)["minSensitivity"]
+        req = its.objects.manual_capture_request(s, e)
+        req["android.noiseReduction.mode"] = 0
+        cap = cam.do_capture(req)
+        its.image.write_image(
+                its.image.convert_capture_to_rgb_image(cap),
+                "%s_low_gain.jpg" % (NAME))
+        planes = its.image.convert_capture_to_planes(cap)
+        for j in range(3):
+            img = planes[j]
+            tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+            ref_variance.append(its.image.compute_image_variances(tile)[0])
+        print "Ref variances:", ref_variance
+
+        for i in range(3):
+            # NR modes 0, 1, 2 with high gain
+            e, s = its.target.get_target_exposure_combos(cam)["maxSensitivity"]
+            req = its.objects.manual_capture_request(s, e)
+            req["android.noiseReduction.mode"] = i
+            cap = cam.do_capture(req)
+            nr_modes_reported.append(
+                    cap["metadata"]["android.noiseReduction.mode"])
+            its.image.write_image(
+                    its.image.convert_capture_to_rgb_image(cap),
+                    "%s_high_gain_nr=%d.jpg" % (NAME, i))
+            planes = its.image.convert_capture_to_planes(cap)
+            for j in range(3):
+                img = planes[j]
+                tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+                variance = its.image.compute_image_variances(tile)[0]
+                variances[j].append(variance / ref_variance[j])
+        print "Variances with NR mode [0,1,2]:", variances
+
+    # Draw a plot.
+    for j in range(3):
+        pylab.plot(range(3), variances[j], "rgb"[j])
+    matplotlib.pyplot.savefig("%s_plot_variances.png" % (NAME))
+
+    assert(nr_modes_reported == [0,1,2])
+
+    # Check that the variance of the NR=0 image is higher than for the
+    # NR=1 and NR=2 images.
+    for j in range(3):
+        for i in range(1,3):
+            assert(variances[j][i] < variances[j][0])
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_param_sensitivity.py b/apps/CameraITS/tests/scene1/test_param_sensitivity.py
new file mode 100644
index 0000000..d6b44a2
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_param_sensitivity.py
@@ -0,0 +1,73 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import pylab
+import os.path
+import matplotlib
+import matplotlib.pyplot
+
+def main():
+    """Test that the android.sensor.sensitivity parameter is applied.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    NUM_STEPS = 5
+
+    sensitivities = None
+    r_means = []
+    g_means = []
+    b_means = []
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.per_frame_control(props))
+
+        expt,_ = its.target.get_target_exposure_combos(cam)["midSensitivity"]
+        sens_range = props['android.sensor.info.sensitivityRange']
+        sens_step = (sens_range[1] - sens_range[0]) / float(NUM_STEPS-1)
+        sensitivities = [sens_range[0] + i * sens_step for i in range(NUM_STEPS)]
+
+        for s in sensitivities:
+            req = its.objects.manual_capture_request(s, expt)
+            cap = cam.do_capture(req)
+            img = its.image.convert_capture_to_rgb_image(cap)
+            its.image.write_image(
+                    img, "%s_iso=%04d.jpg" % (NAME, s))
+            tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+            rgb_means = its.image.compute_image_means(tile)
+            r_means.append(rgb_means[0])
+            g_means.append(rgb_means[1])
+            b_means.append(rgb_means[2])
+
+    # Draw a plot.
+    pylab.plot(sensitivities, r_means, 'r')
+    pylab.plot(sensitivities, g_means, 'g')
+    pylab.plot(sensitivities, b_means, 'b')
+    pylab.ylim([0,1])
+    matplotlib.pyplot.savefig("%s_plot_means.png" % (NAME))
+
+    # Test for pass/fail: check that each shot is brighter than the previous.
+    for means in [r_means, g_means, b_means]:
+        for i in range(len(means)-1):
+            assert(means[i+1] > means[i])
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_param_tonemap_mode.py b/apps/CameraITS/tests/scene1/test_param_tonemap_mode.py
new file mode 100644
index 0000000..8c8e626
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_param_tonemap_mode.py
@@ -0,0 +1,103 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import os
+import os.path
+
+def main():
+    """Test that the android.tonemap.mode param is applied.
+
+    Applies different tonemap curves to each R,G,B channel, and checks
+    that the output images are modified as expected.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    THRESHOLD_RATIO_MIN_DIFF = 0.1
+    THRESHOLD_DIFF_MAX_DIFF = 0.05
+
+    # The HAL3.2 spec requires that curves up to 64 control points in length
+    # must be supported.
+    L = 32
+    LM1 = float(L-1)
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.per_frame_control(props))
+
+        e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
+        e /= 2
+
+        # Test 1: that the tonemap curves have the expected effect. Take two
+        # shots, with n in [0,1], where each has a linear tonemap, with the
+        # n=1 shot having a steeper gradient. The gradient for each R,G,B
+        # channel increases (i.e.) R[n=1] should be brighter than R[n=0],
+        # and G[n=1] should be brighter than G[n=0] by a larger margin, etc.
+        rgb_means = []
+
+        for n in [0,1]:
+            req = its.objects.manual_capture_request(s,e)
+            req["android.tonemap.mode"] = 0
+            req["android.tonemap.curveRed"] = (
+                    sum([[i/LM1, min(1.0,(1+0.5*n)*i/LM1)] for i in range(L)], []))
+            req["android.tonemap.curveGreen"] = (
+                    sum([[i/LM1, min(1.0,(1+1.0*n)*i/LM1)] for i in range(L)], []))
+            req["android.tonemap.curveBlue"] = (
+                    sum([[i/LM1, min(1.0,(1+1.5*n)*i/LM1)] for i in range(L)], []))
+            cap = cam.do_capture(req)
+            img = its.image.convert_capture_to_rgb_image(cap)
+            its.image.write_image(
+                    img, "%s_n=%d.jpg" %(NAME, n))
+            tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+            rgb_means.append(its.image.compute_image_means(tile))
+
+        rgb_ratios = [rgb_means[1][i] / rgb_means[0][i] for i in xrange(3)]
+        print "Test 1: RGB ratios:", rgb_ratios
+        assert(rgb_ratios[0] + THRESHOLD_RATIO_MIN_DIFF < rgb_ratios[1])
+        assert(rgb_ratios[1] + THRESHOLD_RATIO_MIN_DIFF < rgb_ratios[2])
+
+
+        # Test 2: that the length of the tonemap curve (i.e. number of control
+        # points) doesn't affect the output.
+        rgb_means = []
+
+        for size in [32,64]:
+            m = float(size-1)
+            curve = sum([[i/m, i/m] for i in range(size)], [])
+            req = its.objects.manual_capture_request(s,e)
+            req["android.tonemap.mode"] = 0
+            req["android.tonemap.curveRed"] = curve
+            req["android.tonemap.curveGreen"] = curve
+            req["android.tonemap.curveBlue"] = curve
+            cap = cam.do_capture(req)
+            img = its.image.convert_capture_to_rgb_image(cap)
+            its.image.write_image(
+                    img, "%s_size=%02d.jpg" %(NAME, size))
+            tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+            rgb_means.append(its.image.compute_image_means(tile))
+
+        rgb_diffs = [rgb_means[1][i] - rgb_means[0][i] for i in xrange(3)]
+        print "Test 2: RGB diffs:", rgb_diffs
+        assert(abs(rgb_diffs[0]) < THRESHOLD_DIFF_MAX_DIFF)
+        assert(abs(rgb_diffs[1]) < THRESHOLD_DIFF_MAX_DIFF)
+        assert(abs(rgb_diffs[2]) < THRESHOLD_DIFF_MAX_DIFF)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_raw_burst_sensitivity.py b/apps/CameraITS/tests/scene1/test_raw_burst_sensitivity.py
new file mode 100644
index 0000000..6c2b5c1
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_raw_burst_sensitivity.py
@@ -0,0 +1,85 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.device
+import its.caps
+import its.objects
+import its.image
+import os.path
+import pylab
+import matplotlib
+import matplotlib.pyplot
+
+def main():
+    """Capture a set of raw images with increasing gains and measure the noise.
+
+    Capture raw-only, in a burst.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    # Each shot must be 1% noisier (by the variance metric) than the previous
+    # one.
+    VAR_THRESH = 1.01
+
+    NUM_STEPS = 5
+
+    with its.device.ItsSession() as cam:
+
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.raw16(props) and
+                             its.caps.manual_sensor(props) and
+                             its.caps.read_3a(props) and
+                             its.caps.per_frame_control(props))
+
+        # Expose for the scene with min sensitivity
+        sens_min, sens_max = props['android.sensor.info.sensitivityRange']
+        sens_step = (sens_max - sens_min) / NUM_STEPS
+        s_ae,e_ae,_,_,_  = cam.do_3a(get_results=True)
+        s_e_prod = s_ae * e_ae
+
+        reqs = []
+        settings = []
+        for s in range(sens_min, sens_max, sens_step):
+            e = int(s_e_prod / float(s))
+            req = its.objects.manual_capture_request(s, e)
+            reqs.append(req)
+            settings.append((s,e))
+
+        caps = cam.do_capture(reqs, cam.CAP_RAW)
+
+        variances = []
+        for i,cap in enumerate(caps):
+            (s,e) = settings[i]
+
+            # Measure the variance. Each shot should be noisier than the
+            # previous shot (as the gain is increasing).
+            plane = its.image.convert_capture_to_planes(cap, props)[1]
+            tile = its.image.get_image_patch(plane, 0.45,0.45,0.1,0.1)
+            var = its.image.compute_image_variances(tile)[0]
+            variances.append(var)
+
+            img = its.image.convert_capture_to_rgb_image(cap, props=props)
+            its.image.write_image(img, "%s_s=%05d_var=%f.jpg" % (NAME,s,var))
+            print "s=%d, e=%d, var=%e"%(s,e,var)
+
+        pylab.plot(range(len(variances)), variances)
+        matplotlib.pyplot.savefig("%s_variances.png" % (NAME))
+
+        # Test that each shot is noisier than the previous one.
+        for i in range(len(variances) - 1):
+            assert(variances[i] < variances[i+1] / VAR_THRESH)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_raw_sensitivity.py b/apps/CameraITS/tests/scene1/test_raw_sensitivity.py
new file mode 100644
index 0000000..14c5eb0
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_raw_sensitivity.py
@@ -0,0 +1,78 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.device
+import its.caps
+import its.objects
+import its.image
+import os.path
+import pylab
+import matplotlib
+import matplotlib.pyplot
+
+def main():
+    """Capture a set of raw images with increasing gains and measure the noise.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    # Each shot must be 1% noisier (by the variance metric) than the previous
+    # one.
+    VAR_THRESH = 1.01
+
+    NUM_STEPS = 5
+
+    with its.device.ItsSession() as cam:
+
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.raw16(props) and
+                             its.caps.manual_sensor(props) and
+                             its.caps.read_3a(props) and
+                             its.caps.per_frame_control(props))
+
+        # Expose for the scene with min sensitivity
+        sens_min, sens_max = props['android.sensor.info.sensitivityRange']
+        sens_step = (sens_max - sens_min) / NUM_STEPS
+        s_ae,e_ae,_,_,_  = cam.do_3a(get_results=True)
+        s_e_prod = s_ae * e_ae
+
+        variances = []
+        for s in range(sens_min, sens_max, sens_step):
+
+            e = int(s_e_prod / float(s))
+            req = its.objects.manual_capture_request(s, e)
+
+            # Capture raw+yuv, but only look at the raw.
+            cap,_ = cam.do_capture(req, cam.CAP_RAW_YUV)
+
+            # Measure the variance. Each shot should be noisier than the
+            # previous shot (as the gain is increasing).
+            plane = its.image.convert_capture_to_planes(cap, props)[1]
+            tile = its.image.get_image_patch(plane, 0.45,0.45,0.1,0.1)
+            var = its.image.compute_image_variances(tile)[0]
+            variances.append(var)
+
+            img = its.image.convert_capture_to_rgb_image(cap, props=props)
+            its.image.write_image(img, "%s_s=%05d_var=%f.jpg" % (NAME,s,var))
+            print "s=%d, e=%d, var=%e"%(s,e,var)
+
+        pylab.plot(range(len(variances)), variances)
+        matplotlib.pyplot.savefig("%s_variances.png" % (NAME))
+
+        # Test that each shot is noisier than the previous one.
+        for i in range(len(variances) - 1):
+            assert(variances[i] < variances[i+1] / VAR_THRESH)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_tonemap_sequence.py b/apps/CameraITS/tests/scene1/test_tonemap_sequence.py
new file mode 100644
index 0000000..18ca506
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_tonemap_sequence.py
@@ -0,0 +1,70 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import os.path
+import numpy
+
+def main():
+    """Test a sequence of shots with different tonemap curves.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    # There should be 3 identical frames followed by a different set of
+    # 3 identical frames.
+    MAX_SAME_DELTA = 0.01
+    MIN_DIFF_DELTA = 0.10
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.manual_sensor(props) and
+                             its.caps.manual_post_proc(props) and
+                             its.caps.per_frame_control(props))
+
+        sens, exp_time, _,_,_ = cam.do_3a(do_af=False,get_results=True)
+
+        means = []
+
+        # Capture 3 manual shots with a linear tonemap.
+        req = its.objects.manual_capture_request(sens, exp_time, True)
+        for i in [0,1,2]:
+            cap = cam.do_capture(req)
+            img = its.image.convert_capture_to_rgb_image(cap)
+            its.image.write_image(img, "%s_i=%d.jpg" % (NAME, i))
+            tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+            means.append(tile.mean(0).mean(0))
+
+        # Capture 3 manual shots with the default tonemap.
+        req = its.objects.manual_capture_request(sens, exp_time, False)
+        for i in [3,4,5]:
+            cap = cam.do_capture(req)
+            img = its.image.convert_capture_to_rgb_image(cap)
+            its.image.write_image(img, "%s_i=%d.jpg" % (NAME, i))
+            tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+            means.append(tile.mean(0).mean(0))
+
+        # Compute the delta between each consecutive frame pair.
+        deltas = [numpy.max(numpy.fabs(means[i+1]-means[i])) \
+                  for i in range(len(means)-1)]
+        print "Deltas between consecutive frames:", deltas
+
+        assert(all([abs(deltas[i]) < MAX_SAME_DELTA for i in [0,1,3,4]]))
+        assert(abs(deltas[2]) > MIN_DIFF_DELTA)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_yuv_jpeg_all.py b/apps/CameraITS/tests/scene1/test_yuv_jpeg_all.py
new file mode 100644
index 0000000..1b278ef
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_yuv_jpeg_all.py
@@ -0,0 +1,84 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import os.path
+import math
+
+def main():
+    """Test that the reported sizes and formats for image capture work.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    THRESHOLD_MAX_RMS_DIFF = 0.03
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.per_frame_control(props))
+
+        # Use a manual request with a linear tonemap so that the YUV and JPEG
+        # should look the same (once converted by the its.image module).
+        e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
+        req = its.objects.manual_capture_request(s, e, True)
+
+        rgbs = []
+
+        for size in its.objects.get_available_output_sizes("yuv", props):
+            out_surface = {"width":size[0], "height":size[1], "format":"yuv"}
+            cap = cam.do_capture(req, out_surface)
+            assert(cap["format"] == "yuv")
+            assert(cap["width"] == size[0])
+            assert(cap["height"] == size[1])
+            print "Captured YUV %dx%d" % (cap["width"], cap["height"])
+            img = its.image.convert_capture_to_rgb_image(cap)
+            its.image.write_image(img, "%s_yuv_w%d_h%d.jpg"%(
+                    NAME,size[0],size[1]))
+            tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+            rgb = its.image.compute_image_means(tile)
+            rgbs.append(rgb)
+
+        for size in its.objects.get_available_output_sizes("jpg", props):
+            out_surface = {"width":size[0], "height":size[1], "format":"jpg"}
+            cap = cam.do_capture(req, out_surface)
+            assert(cap["format"] == "jpeg")
+            assert(cap["width"] == size[0])
+            assert(cap["height"] == size[1])
+            img = its.image.decompress_jpeg_to_rgb_image(cap["data"])
+            its.image.write_image(img, "%s_jpg_w%d_h%d.jpg"%(
+                    NAME,size[0], size[1]))
+            assert(img.shape[0] == size[1])
+            assert(img.shape[1] == size[0])
+            assert(img.shape[2] == 3)
+            print "Captured JPEG %dx%d" % (cap["width"], cap["height"])
+            tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+            rgb = its.image.compute_image_means(tile)
+            rgbs.append(rgb)
+
+        max_diff = 0
+        rgb0 = rgbs[0]
+        for rgb1 in rgbs[1:]:
+            rms_diff = math.sqrt(
+                    sum([pow(rgb0[i] - rgb1[i], 2.0) for i in range(3)]) / 3.0)
+            max_diff = max(max_diff, rms_diff)
+        print "Max RMS difference:", max_diff
+        assert(rms_diff < THRESHOLD_MAX_RMS_DIFF)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_yuv_plus_dng.py b/apps/CameraITS/tests/scene1/test_yuv_plus_dng.py
new file mode 100644
index 0000000..33e7763
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_yuv_plus_dng.py
@@ -0,0 +1,47 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import os.path
+
+def main():
+    """Test capturing a single frame as both DNG and YUV outputs.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.raw(props) and
+                             its.caps.read_3a(props))
+
+        cam.do_3a()
+
+        req = its.objects.auto_capture_request()
+        cap_dng, cap_yuv = cam.do_capture(req, cam.CAP_DNG_YUV)
+
+        img = its.image.convert_capture_to_rgb_image(cap_yuv)
+        its.image.write_image(img, "%s.jpg" % (NAME))
+
+        with open("%s.dng"%(NAME), "wb") as f:
+            f.write(cap_dng["data"])
+
+        # No specific pass/fail check; test is assumed to have succeeded if
+        # it completes.
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_yuv_plus_jpeg.py b/apps/CameraITS/tests/scene1/test_yuv_plus_jpeg.py
new file mode 100644
index 0000000..6daa243
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_yuv_plus_jpeg.py
@@ -0,0 +1,61 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import os.path
+import math
+
+def main():
+    """Test capturing a single frame as both YUV and JPEG outputs.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    THRESHOLD_MAX_RMS_DIFF = 0.01
+
+    fmt_yuv =  {"format":"yuv"}
+    fmt_jpeg = {"format":"jpeg"}
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props))
+
+        # Use a manual request with a linear tonemap so that the YUV and JPEG
+        # should look the same (once converted by the its.image module).
+        e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
+        req = its.objects.manual_capture_request(s, e, True)
+
+        cap_yuv, cap_jpeg = cam.do_capture(req, [fmt_yuv, fmt_jpeg])
+
+        img = its.image.convert_capture_to_rgb_image(cap_yuv, True)
+        its.image.write_image(img, "%s_yuv.jpg" % (NAME))
+        tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+        rgb0 = its.image.compute_image_means(tile)
+
+        img = its.image.convert_capture_to_rgb_image(cap_jpeg, True)
+        its.image.write_image(img, "%s_jpeg.jpg" % (NAME))
+        tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+        rgb1 = its.image.compute_image_means(tile)
+
+        rms_diff = math.sqrt(
+                sum([pow(rgb0[i] - rgb1[i], 2.0) for i in range(3)]) / 3.0)
+        print "RMS difference:", rms_diff
+        assert(rms_diff < THRESHOLD_MAX_RMS_DIFF)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_yuv_plus_raw.py b/apps/CameraITS/tests/scene1/test_yuv_plus_raw.py
new file mode 100644
index 0000000..eb01c1a
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_yuv_plus_raw.py
@@ -0,0 +1,62 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import os.path
+import math
+
+def main():
+    """Test capturing a single frame as both RAW and YUV outputs.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    THRESHOLD_MAX_RMS_DIFF = 0.035
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.raw16(props) and
+                             its.caps.per_frame_control(props))
+
+        # Use a manual request with a linear tonemap so that the YUV and RAW
+        # should look the same (once converted by the its.image module).
+        e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
+        req = its.objects.manual_capture_request(s, e, True)
+
+        cap_raw, cap_yuv = cam.do_capture(req, cam.CAP_RAW_YUV)
+
+        img = its.image.convert_capture_to_rgb_image(cap_yuv)
+        its.image.write_image(img, "%s_yuv.jpg" % (NAME), True)
+        tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+        rgb0 = its.image.compute_image_means(tile)
+
+        # Raw shots are 1/2 x 1/2 smaller after conversion to RGB, so scale the
+        # tile appropriately.
+        img = its.image.convert_capture_to_rgb_image(cap_raw, props=props)
+        its.image.write_image(img, "%s_raw.jpg" % (NAME), True)
+        tile = its.image.get_image_patch(img, 0.475, 0.475, 0.05, 0.05)
+        rgb1 = its.image.compute_image_means(tile)
+
+        rms_diff = math.sqrt(
+                sum([pow(rgb0[i] - rgb1[i], 2.0) for i in range(3)]) / 3.0)
+        print "RMS difference:", rms_diff
+        assert(rms_diff < THRESHOLD_MAX_RMS_DIFF)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_yuv_plus_raw10.py b/apps/CameraITS/tests/scene1/test_yuv_plus_raw10.py
new file mode 100644
index 0000000..910a8ea
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_yuv_plus_raw10.py
@@ -0,0 +1,63 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import os.path
+import math
+
+def main():
+    """Test capturing a single frame as both RAW10 and YUV outputs.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    THRESHOLD_MAX_RMS_DIFF = 0.035
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.raw10(props) and
+                             its.caps.per_frame_control(props))
+
+        # Use a manual request with a linear tonemap so that the YUV and RAW
+        # should look the same (once converted by the its.image module).
+        e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
+        req = its.objects.manual_capture_request(s, e, True)
+
+        cap_raw, cap_yuv = cam.do_capture(req,
+                [{"format":"raw10"}, {"format":"yuv"}])
+
+        img = its.image.convert_capture_to_rgb_image(cap_yuv)
+        its.image.write_image(img, "%s_yuv.jpg" % (NAME), True)
+        tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+        rgb0 = its.image.compute_image_means(tile)
+
+        # Raw shots are 1/2 x 1/2 smaller after conversion to RGB, so scale the
+        # tile appropriately.
+        img = its.image.convert_capture_to_rgb_image(cap_raw, props=props)
+        its.image.write_image(img, "%s_raw.jpg" % (NAME), True)
+        tile = its.image.get_image_patch(img, 0.475, 0.475, 0.05, 0.05)
+        rgb1 = its.image.compute_image_means(tile)
+
+        rms_diff = math.sqrt(
+                sum([pow(rgb0[i] - rgb1[i], 2.0) for i in range(3)]) / 3.0)
+        print "RMS difference:", rms_diff
+        assert(rms_diff < THRESHOLD_MAX_RMS_DIFF)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/sensor_fusion/SensorFusion.pdf b/apps/CameraITS/tests/sensor_fusion/SensorFusion.pdf
new file mode 100644
index 0000000..2e390c7
--- /dev/null
+++ b/apps/CameraITS/tests/sensor_fusion/SensorFusion.pdf
Binary files differ
diff --git a/apps/CameraITS/tests/sensor_fusion/test_sensor_fusion.py b/apps/CameraITS/tests/sensor_fusion/test_sensor_fusion.py
new file mode 100644
index 0000000..49f47a9
--- /dev/null
+++ b/apps/CameraITS/tests/sensor_fusion/test_sensor_fusion.py
@@ -0,0 +1,377 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.objects
+import time
+import math
+import pylab
+import os.path
+import matplotlib
+import matplotlib.pyplot
+import json
+import Image
+import numpy
+import cv2
+import bisect
+import scipy.spatial
+import sys
+
+NAME = os.path.basename(__file__).split(".")[0]
+
+# Capture 210 QVGA frames (which is 7s at 30fps)
+N = 210
+W,H = 320,240
+
+FEATURE_PARAMS = dict( maxCorners = 50,
+                       qualityLevel = 0.3,
+                       minDistance = 7,
+                       blockSize = 7 )
+
+LK_PARAMS = dict( winSize  = (15, 15),
+                  maxLevel = 2,
+                  criteria = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT,
+                        10, 0.03))
+
+# Constants to convert between different time units (for clarity).
+SEC_TO_NSEC = 1000*1000*1000.0
+SEC_TO_MSEC = 1000.0
+MSEC_TO_NSEC = 1000*1000.0
+MSEC_TO_SEC = 1/1000.0
+NSEC_TO_SEC = 1/(1000*1000*1000.0)
+NSEC_TO_MSEC = 1/(1000*1000.0)
+
+# Pass/fail thresholds.
+THRESH_MAX_CORR_DIST = 0.005
+THRESH_MAX_SHIFT_MS = 2
+THRESH_MIN_ROT = 0.001
+
+def main():
+    """Test if image and motion sensor events are well synchronized.
+
+    The instructions for running this test are in the SensorFusion.pdf file in
+    the same directory as this test.
+
+    The command-line argument "replay" may be optionally provided. Without this
+    argument, the test will collect a new set of camera+gyro data from the
+    device and then analyze it (and it will also dump this data to files in the
+    current directory). If the "replay" argument is provided, then the script
+    will instead load the dumped data from a previous run and analyze that
+    instead. This can be helpful for developers who are digging for additional
+    information on their measurements.
+    """
+
+    # Collect or load the camera+gyro data. All gyro events as well as camera
+    # timestamps are in the "events" dictionary, and "frames" is a list of
+    # RGB images as numpy arrays.
+    if "replay" not in sys.argv:
+        events, frames = collect_data()
+    else:
+        events, frames = load_data()
+
+    # Compute the camera rotation displacements (rad) between each pair of
+    # adjacent frames.
+    cam_times = get_cam_times(events["cam"])
+    cam_rots = get_cam_rotations(frames)
+    if max(abs(cam_rots)) < THRESH_MIN_ROT:
+        print "Device wasn't moved enough"
+        assert(0)
+
+    # Find the best offset (time-shift) to align the gyro and camera motion
+    # traces; this function integrates the shifted gyro data between camera
+    # samples for a range of candidate shift values, and returns the shift that
+    # result in the best correlation.
+    offset = get_best_alignment_offset(cam_times, cam_rots, events["gyro"])
+
+    # Plot the camera and gyro traces after applying the best shift.
+    cam_times = cam_times + offset*SEC_TO_NSEC
+    gyro_rots = get_gyro_rotations(events["gyro"], cam_times)
+    plot_rotations(cam_rots, gyro_rots)
+
+    # Pass/fail based on the offset and also the correlation distance.
+    dist = scipy.spatial.distance.correlation(cam_rots,gyro_rots)
+    print "Best correlation of %f at shift of %.2fms"%(dist, offset*SEC_TO_MSEC)
+    assert(dist < THRESH_MAX_CORR_DIST)
+    assert(abs(offset) < THRESH_MAX_SHIFT_MS*MSEC_TO_SEC)
+
+def get_best_alignment_offset(cam_times, cam_rots, gyro_events):
+    """Find the best offset to align the camera and gyro traces.
+
+    Uses a correlation distance metric between the curves, where a smaller
+    value means that the curves are better-correlated.
+
+    Args:
+        cam_times: Array of N camera times, one for each frame.
+        cam_rots: Array of N-1 camera rotation displacements (rad).
+        gyro_events: List of gyro event objects.
+
+    Returns:
+        Offset (seconds) of the best alignment.
+    """
+    # Measure the corr. dist. over a shift of up to +/- 100ms (1ms step size).
+    # Get the shift corresponding to the best (lowest) score.
+    candidates = range(-100,101)
+    dists = []
+    for shift in candidates:
+        times = cam_times + shift*MSEC_TO_NSEC
+        gyro_rots = get_gyro_rotations(gyro_events, times)
+        dists.append(scipy.spatial.distance.correlation(cam_rots,gyro_rots))
+    best_corr_dist = min(dists)
+    best_shift = candidates[dists.index(best_corr_dist)]
+
+    # Fit a curve to the corr. dist. data to measure the minima more
+    # accurately, by looking at the correlation distances within a range of
+    # +/- 20ms from the measured best score; note that this will use fewer
+    # than the full +/- 20 range for the curve fit if the measured score
+    # (which is used as the center of the fit) is within 20ms of the edge of
+    # the +/- 100ms candidate range.
+    i = len(dists)/2 + best_shift
+    candidates = candidates[i-20:i+21]
+    dists = dists[i-20:i+21]
+    a,b,c = numpy.polyfit(candidates, dists, 2)
+    exact_best_shift = -b/(2*a)
+    if abs(best_shift - exact_best_shift) > 2.0 or a <= 0 or c <= 0:
+        print "Test failed; bad fit to time-shift curve"
+        assert(0)
+
+    xfit = [x/10.0 for x in xrange(candidates[0]*10,candidates[-1]*10)]
+    yfit = [a*x*x+b*x+c for x in xfit]
+    fig = matplotlib.pyplot.figure()
+    pylab.plot(candidates, dists, 'r', label="data")
+    pylab.plot(xfit, yfit, 'b', label="fit")
+    pylab.plot([exact_best_shift+x for x in [-0.1,0,0.1]], [0,0.01,0], 'b')
+    pylab.xlabel("Relative horizontal shift between curves (ms)")
+    pylab.ylabel("Correlation distance")
+    pylab.legend()
+    matplotlib.pyplot.savefig("%s_plot_shifts.png" % (NAME))
+
+    return exact_best_shift * MSEC_TO_SEC
+
+def plot_rotations(cam_rots, gyro_rots):
+    """Save a plot of the camera vs. gyro rotational measurements.
+
+    Args:
+        cam_rots: Array of N-1 camera rotation measurements (rad).
+        gyro_rots: Array of N-1 gyro rotation measurements (rad).
+    """
+    # For the plot, scale the rotations to be in degrees.
+    fig = matplotlib.pyplot.figure()
+    cam_rots = cam_rots * (360/(2*math.pi))
+    gyro_rots = gyro_rots * (360/(2*math.pi))
+    pylab.plot(range(len(cam_rots)), cam_rots, 'r', label="camera")
+    pylab.plot(range(len(gyro_rots)), gyro_rots, 'b', label="gyro")
+    pylab.legend()
+    pylab.xlabel("Camera frame number")
+    pylab.ylabel("Angular displacement between adjacent camera frames (deg)")
+    pylab.xlim([0, len(cam_rots)])
+    matplotlib.pyplot.savefig("%s_plot.png" % (NAME))
+
+def get_gyro_rotations(gyro_events, cam_times):
+    """Get the rotation values of the gyro.
+
+    Integrates the gyro data between each camera frame to compute an angular
+    displacement. Uses simple Euler approximation to implement the
+    integration.
+
+    Args:
+        gyro_events: List of gyro event objects.
+        cam_times: Array of N camera times, one for each frame.
+
+    Returns:
+        Array of N-1 gyro rotation measurements (rad).
+    """
+    all_times = numpy.array([e["time"] for e in gyro_events])
+    all_rots = numpy.array([e["z"] for e in gyro_events])
+    gyro_rots = []
+    # Integrate the gyro data between each pair of camera frame times.
+    for icam in range(len(cam_times)-1):
+        # Get the window of gyro samples within the current pair of frames.
+        tcam0 = cam_times[icam]
+        tcam1 = cam_times[icam+1]
+        igyrowindow0 = bisect.bisect(all_times, tcam0)
+        igyrowindow1 = bisect.bisect(all_times, tcam1)
+        sgyro = 0
+        # Integrate samples within the window.
+        for igyro in range(igyrowindow0, igyrowindow1):
+            vgyro0 = all_rots[igyro]
+            vgyro1 = all_rots[igyro+1]
+            tgyro0 = all_times[igyro]
+            tgyro1 = all_times[igyro+1]
+            vgyro = 0.5 * (vgyro0 + vgyro1)
+            deltatgyro = (tgyro1 - tgyro0) * NSEC_TO_SEC
+            sgyro += vgyro * deltatgyro
+        # Handle the fractional intervals at the sides of the window.
+        for side,igyro in enumerate([igyrowindow0-1, igyrowindow1]):
+            vgyro0 = all_rots[igyro]
+            vgyro1 = all_rots[igyro+1]
+            tgyro0 = all_times[igyro]
+            tgyro1 = all_times[igyro+1]
+            vgyro = 0.5 * (vgyro0 + vgyro1)
+            deltatgyro = (tgyro1 - tgyro0) * NSEC_TO_SEC
+            if side == 0:
+                f = (tcam0 - tgyro0) / (tgyro1 - tgyro0)
+                sgyro += vgyro * deltatgyro * (1.0 - f)
+            else:
+                f = (tcam1 - tgyro0) / (tgyro1 - tgyro0)
+                sgyro += vgyro * deltatgyro * f
+        gyro_rots.append(sgyro)
+    gyro_rots = numpy.array(gyro_rots)
+    return gyro_rots
+
+def get_cam_rotations(frames):
+    """Get the rotations of the camera between each pair of frames.
+
+    Takes N frames and returns N-1 angular displacements corresponding to the
+    rotations between adjacent pairs of frames, in radians.
+
+    Args:
+        frames: List of N images (as RGB numpy arrays).
+
+    Returns:
+        Array of N-1 camera rotation measurements (rad).
+    """
+    gframes = []
+    for frame in frames:
+        frame = (frame * 255.0).astype(numpy.uint8)
+        gframes.append(cv2.cvtColor(frame, cv2.COLOR_RGB2GRAY))
+    rots = []
+    for i in range(1,len(gframes)):
+        gframe0 = gframes[i-1]
+        gframe1 = gframes[i]
+        p0 = cv2.goodFeaturesToTrack(gframe0, mask=None, **FEATURE_PARAMS)
+        p1,st,_ = cv2.calcOpticalFlowPyrLK(gframe0, gframe1, p0, None,
+                **LK_PARAMS)
+        tform = procrustes_rotation(p0[st==1], p1[st==1])
+        # TODO: Choose the sign for the rotation so the cam matches the gyro
+        rot = -math.atan2(tform[0, 1], tform[0, 0])
+        rots.append(rot)
+        if i == 1:
+            # Save a debug visualization of the features that are being
+            # tracked in the first frame.
+            frame = frames[i]
+            for x,y in p0[st==1]:
+                cv2.circle(frame, (x,y), 3, (100,100,255), -1)
+            its.image.write_image(frame, "%s_features.jpg"%(NAME))
+    return numpy.array(rots)
+
+def get_cam_times(cam_events):
+    """Get the camera frame times.
+
+    Args:
+        cam_events: List of (start_exposure, exposure_time, readout_duration)
+            tuples, one per captured frame, with times in nanoseconds.
+
+    Returns:
+        frame_times: Array of N times, one corresponding to the "middle" of
+            the exposure of each frame.
+    """
+    # Assign a time to each frame that assumes that the image is instantly
+    # captured in the middle of its exposure.
+    starts = numpy.array([start for start,exptime,readout in cam_events])
+    exptimes = numpy.array([exptime for start,exptime,readout in cam_events])
+    readouts = numpy.array([readout for start,exptime,readout in cam_events])
+    frame_times = starts + (exptimes + readouts) / 2.0
+    return frame_times
+
+def load_data():
+    """Load a set of previously captured data.
+
+    Returns:
+        events: Dictionary containing all gyro events and cam timestamps.
+        frames: List of RGB images as numpy arrays.
+    """
+    with open("%s_events.txt"%(NAME), "r") as f:
+        events = json.loads(f.read())
+    n = len(events["cam"])
+    frames = []
+    for i in range(n):
+        img = Image.open("%s_frame%03d.jpg"%(NAME,i))
+        w,h = img.size[0:2]
+        frames.append(numpy.array(img).reshape(h,w,3) / 255.0)
+    return events, frames
+
+def collect_data():
+    """Capture a new set of data from the device.
+
+    Captures both motion data and camera frames, while the user is moving
+    the device in a proscribed manner.
+
+    Returns:
+        events: Dictionary containing all gyro events and cam timestamps.
+        frames: List of RGB images as numpy arrays.
+    """
+    with its.device.ItsSession() as cam:
+        print "Starting sensor event collection"
+        cam.start_sensor_events()
+
+        # Sleep a few seconds for gyro events to stabilize.
+        time.sleep(5)
+
+        # TODO: Ensure that OIS is disabled; set to DISABLE and wait some time.
+
+        # Capture the frames.
+        props = cam.get_camera_properties()
+        fmt = {"format":"yuv", "width":W, "height":H}
+        s,e,_,_,_ = cam.do_3a(get_results=True)
+        req = its.objects.manual_capture_request(s, e)
+        print "Capturing %dx%d with sens. %d, exp. time %.1fms" % (
+                W, H, s, e*NSEC_TO_MSEC)
+        caps = cam.do_capture([req]*N, fmt)
+
+        # Get the gyro events.
+        print "Reading out sensor events"
+        gyro = cam.get_sensor_events()["gyro"]
+
+        # Combine the events into a single structure.
+        print "Dumping event data"
+        starts = [c["metadata"]["android.sensor.timestamp"] for c in caps]
+        exptimes = [c["metadata"]["android.sensor.exposureTime"] for c in caps]
+        readouts = [c["metadata"]["android.sensor.rollingShutterSkew"]
+                    for c in caps]
+        events = {"gyro": gyro, "cam": zip(starts,exptimes,readouts)}
+        with open("%s_events.txt"%(NAME), "w") as f:
+            f.write(json.dumps(events))
+
+        # Convert the frames to RGB.
+        print "Dumping frames"
+        frames = []
+        for i,c in enumerate(caps):
+            img = its.image.convert_capture_to_rgb_image(c)
+            frames.append(img)
+            its.image.write_image(img, "%s_frame%03d.jpg"%(NAME,i))
+
+        return events, frames
+
+def procrustes_rotation(X, Y):
+    """
+    Procrustes analysis determines a linear transformation (translation,
+    reflection, orthogonal rotation and scaling) of the points in Y to best
+    conform them to the points in matrix X, using the sum of squared errors
+    as the goodness of fit criterion.
+
+    Args:
+        X, Y: Matrices of target and input coordinates.
+
+    Returns:
+        The rotation component of the transformation that maps X to Y.
+    """
+    X0 = (X-X.mean(0)) / numpy.sqrt(((X-X.mean(0))**2.0).sum())
+    Y0 = (Y-Y.mean(0)) / numpy.sqrt(((Y-Y.mean(0))**2.0).sum())
+    U,s,Vt = numpy.linalg.svd(numpy.dot(X0.T, Y0),full_matrices=False)
+    return numpy.dot(Vt.T, U.T)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/tutorial.py b/apps/CameraITS/tests/tutorial.py
new file mode 100644
index 0000000..c266d14
--- /dev/null
+++ b/apps/CameraITS/tests/tutorial.py
@@ -0,0 +1,188 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# --------------------------------------------------------------------------- #
+# The Google Python style guide should be used for scripts:                   #
+# http://google-styleguide.googlecode.com/svn/trunk/pyguide.html              #
+# --------------------------------------------------------------------------- #
+
+# The ITS modules that are in the pymodules/its/ directory. To see formatted
+# docs, use the "pydoc" command:
+#
+# > pydoc its.image
+#
+import its.image
+import its.device
+import its.objects
+import its.target
+
+# Standard Python modules.
+import os.path
+import pprint
+import math
+
+# Modules from the numpy, scipy, and matplotlib libraries. These are used for
+# the image processing code, and images are represented as numpy arrays.
+import pylab
+import numpy
+import matplotlib
+import matplotlib.pyplot
+
+# Each script has a "main" function.
+def main():
+
+    # Each script has a string description of what it does. This is the first
+    # entry inside the main function.
+    """Tutorial script to show how to use the ITS infrastructure.
+    """
+
+    # A convention in each script is to use the filename (without the extension)
+    # as the name of the test, when printing results to the screen or dumping
+    # files.
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    # The standard way to open a session with a connected camera device. This
+    # creates a cam object which encapsulates the session and which is active
+    # within the scope of the "with" block; when the block exits, the camera
+    # session is closed.
+    with its.device.ItsSession() as cam:
+
+        # Get the static properties of the camera device. Returns a Python
+        # associative array object; print it to the console.
+        props = cam.get_camera_properties()
+        pprint.pprint(props)
+
+        # Grab a YUV frame with manual exposure of sensitivity = 200, exposure
+        # duration = 50ms.
+        req = its.objects.manual_capture_request(200, 50*1000*1000)
+        cap = cam.do_capture(req)
+
+        # Print the properties of the captured frame; width and height are
+        # integers, and the metadata is a Python associative array object.
+        print "Captured image width:", cap["width"]
+        print "Captured image height:", cap["height"]
+        pprint.pprint(cap["metadata"])
+
+        # The captured image is YUV420. Convert to RGB, and save as a file.
+        rgbimg = its.image.convert_capture_to_rgb_image(cap)
+        its.image.write_image(rgbimg, "%s_rgb_1.jpg" % (NAME))
+
+        # Can also get the Y,U,V planes separately; save these to greyscale
+        # files.
+        yimg,uimg,vimg = its.image.convert_capture_to_planes(cap)
+        its.image.write_image(yimg, "%s_y_plane_1.jpg" % (NAME))
+        its.image.write_image(uimg, "%s_u_plane_1.jpg" % (NAME))
+        its.image.write_image(vimg, "%s_v_plane_1.jpg" % (NAME))
+
+        # Run 3A on the device. In this case, just use the entire image as the
+        # 3A region, and run each of AWB,AE,AF. Can also change the region and
+        # specify independently for each of AE,AWB,AF whether it should run.
+        #
+        # NOTE: This may fail, if the camera isn't pointed at a reasonable
+        # target scene. If it fails, the script will end. The logcat messages
+        # can be inspected to see the status of 3A running on the device.
+        #
+        # > adb logcat -s 'ItsService:v'
+        #
+        # If this keeps on failing, try also rebooting the device before
+        # running the test.
+        sens, exp, gains, xform, focus = cam.do_3a(get_results=True)
+        print "AE: sensitivity %d, exposure %dms" % (sens, exp/1000000.0)
+        print "AWB: gains", gains, "transform", xform
+        print "AF: distance", focus
+
+        # Grab a new manual frame, using the 3A values, and convert it to RGB
+        # and save it to a file too. Note that the "req" object is just a
+        # Python dictionary that is pre-populated by the its.objets module
+        # functions (in this case a default manual capture), and the key/value
+        # pairs in the object can be used to set any field of the capture
+        # request. Here, the AWB gains and transform (CCM) are being used.
+        # Note that the CCM transform is in a rational format in capture
+        # requests, meaning it is an object with integer numerators and
+        # denominators. The 3A routine returns simple floats instead, however,
+        # so a conversion from float to rational must be performed.
+        req = its.objects.manual_capture_request(sens, exp)
+        xform_rat = its.objects.float_to_rational(xform)
+
+        req["android.colorCorrection.transform"] = xform_rat
+        req["android.colorCorrection.gains"] = gains
+        cap = cam.do_capture(req)
+        rgbimg = its.image.convert_capture_to_rgb_image(cap)
+        its.image.write_image(rgbimg, "%s_rgb_2.jpg" % (NAME))
+
+        # Print out the actual capture request object that was used.
+        pprint.pprint(req)
+
+        # Images are numpy arrays. The dimensions are (h,w,3) when indexing,
+        # in the case of RGB images. Greyscale images are (h,w,1). Pixels are
+        # generally float32 values in the [0,1] range, however some of the
+        # helper functions in its.image deal with the packed YUV420 and other
+        # formats of images that come from the device (and convert them to
+        # float32).
+        # Print the dimensions of the image, and the top-left pixel value,
+        # which is an array of 3 floats.
+        print "RGB image dimensions:", rgbimg.shape
+        print "RGB image top-left pixel:", rgbimg[0,0]
+
+        # Grab a center tile from the image; this returns a new image. Save
+        # this tile image. In this case, the tile is the middle 10% x 10%
+        # rectangle.
+        tile = its.image.get_image_patch(rgbimg, 0.45, 0.45, 0.1, 0.1)
+        its.image.write_image(tile, "%s_rgb_2_tile.jpg" % (NAME))
+
+        # Compute the mean values of the center tile image.
+        rgb_means = its.image.compute_image_means(tile)
+        print "RGB means:", rgb_means
+
+        # Apply a lookup table to the image, and save the new version. The LUT
+        # is basically a tonemap, and can be used to implement a gamma curve.
+        # In this case, the LUT is used to double the value of each pixel.
+        lut = numpy.array([2*i for i in xrange(65536)])
+        rgbimg_lut = its.image.apply_lut_to_image(rgbimg, lut)
+        its.image.write_image(rgbimg_lut, "%s_rgb_2_lut.jpg" % (NAME))
+
+        # Apply a 3x3 matrix to the image, and save the new version. The matrix
+        # is a numpy array, in row major order, and the pixel values are right-
+        # multiplied to it (when considered as column vectors). The example
+        # matrix here just boosts the blue channel by 10%.
+        mat = numpy.array([[1, 0, 0  ],
+                           [0, 1, 0  ],
+                           [0, 0, 1.1]])
+        rgbimg_mat = its.image.apply_matrix_to_image(rgbimg, mat)
+        its.image.write_image(rgbimg_mat, "%s_rgb_2_mat.jpg" % (NAME))
+
+        # Compute a histogram of the luma image, in 256 buckets.
+        yimg,_,_ = its.image.convert_capture_to_planes(cap)
+        hist,_ = numpy.histogram(yimg*255, 256, (0,256))
+
+        # Plot the histogram using matplotlib, and save as a PNG image.
+        pylab.plot(range(256), hist.tolist())
+        pylab.xlabel("Luma DN")
+        pylab.ylabel("Pixel count")
+        pylab.title("Histogram of luma channel of captured image")
+        matplotlib.pyplot.savefig("%s_histogram.png" % (NAME))
+
+        # Capture a frame to be returned as a JPEG. Load it as an RGB image,
+        # then save it back as a JPEG.
+        cap = cam.do_capture(req, cam.CAP_JPEG)
+        rgbimg = its.image.convert_capture_to_rgb_image(cap)
+        its.image.write_image(rgbimg, "%s_jpg.jpg" % (NAME))
+        r,g,b = its.image.convert_capture_to_planes(cap)
+        its.image.write_image(r, "%s_r.jpg" % (NAME))
+
+# This is the standard boilerplate in each test that allows the script to both
+# be executed directly and imported as a module.
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tools/config.py b/apps/CameraITS/tools/config.py
new file mode 100644
index 0000000..6e83412
--- /dev/null
+++ b/apps/CameraITS/tools/config.py
@@ -0,0 +1,66 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.device
+import its.target
+import sys
+
+def main():
+    """Set the target exposure.
+
+    This program is just a wrapper around the its.target module, to allow the
+    functions in it to be invoked from the command line.
+
+    Usage:
+        python config.py        - Measure the target exposure, and cache it.
+        python config.py EXP    - Hard-code (and cache) the target exposure.
+
+    The "reboot" or "reboot=<N>" and "camera=<N>" arguments may also be
+    provided, just as with all the test scripts. The "target" argument is
+    may also be provided but it has no effect on this script since the cached
+    exposure value is cleared regardless.
+
+    If no exposure value is provided, the camera will be used to measure
+    the scene and set a level that will result in the luma (with linear
+    tonemap) being at the 0.5 level. This requires camera 3A and capture
+    to be functioning.
+
+    For bring-up purposes, the exposure value may be manually set to a hard-
+    coded value, without the camera having to be able to perform 3A (or even
+    capture a shot reliably).
+    """
+
+    # Command line args, ignoring any args that will be passed down to the
+    # ItsSession constructor.
+    args = [s for s in sys.argv if s[:6] not in \
+            ["reboot", "camera", "target", "noinit"]]
+
+    if len(args) == 1:
+        with its.device.ItsSession() as cam:
+            # Automatically measure target exposure.
+            its.target.clear_cached_target_exposure()
+            exposure = its.target.get_target_exposure(cam)
+    elif len(args) == 2:
+        # Hard-code the target exposure.
+        exposure = int(args[1])
+        its.target.set_hardcoded_exposure(exposure)
+    else:
+        print "Usage: python %s [EXPOSURE]"
+        sys.exit(0)
+    print "New target exposure set to", exposure
+    print "This corresponds to %dms at ISO 100" % int(exposure/100/1000000.0)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tools/run_all_tests.py b/apps/CameraITS/tools/run_all_tests.py
new file mode 100644
index 0000000..f5a53b1
--- /dev/null
+++ b/apps/CameraITS/tools/run_all_tests.py
@@ -0,0 +1,118 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import os.path
+import tempfile
+import subprocess
+import time
+import sys
+import its.device
+
+def main():
+    """Run all the automated tests, saving intermediate files, and producing
+    a summary/report of the results.
+
+    Script should be run from the top-level CameraITS directory.
+    """
+
+    SKIP_RET_CODE = 101
+
+    # Not yet mandated tests
+    NOT_YET_MANDATED = {
+        "scene0":[
+            "test_jitter"
+        ],
+        "scene1":[
+            "test_ae_precapture_trigger",
+            "test_black_white",
+            "test_crop_region_raw",
+            "test_ev_compensation_advanced",
+            "test_ev_compensation_basic",
+            "test_locked_burst",
+            "test_yuv_plus_jpeg"
+        ]
+    }
+
+    # Get all the scene0 and scene1 tests, which can be run using the same
+    # physical setup.
+    scenes = ["scene0", "scene1"]
+    tests = []
+    for d in scenes:
+        tests += [(d,s[:-3],os.path.join("tests", d, s))
+                  for s in os.listdir(os.path.join("tests",d))
+                  if s[-3:] == ".py"]
+    tests.sort()
+
+    # Make output directories to hold the generated files.
+    topdir = tempfile.mkdtemp()
+    for d in scenes:
+        os.mkdir(os.path.join(topdir, d))
+    print "Saving output files to:", topdir, "\n"
+
+    # determine camera id
+    camera_id = 0
+    for s in sys.argv[1:]:
+        if s[:7] == "camera=" and len(s) > 7:
+            camera_id = s[7:]
+
+    # Run each test, capturing stdout and stderr.
+    numpass = 0
+    numskip = 0
+    numnotmandatedfail = 0
+    numfail = 0
+    for (scene,testname,testpath) in tests:
+        cmd = ['python', os.path.join(os.getcwd(),testpath)] + sys.argv[1:]
+        outdir = os.path.join(topdir,scene)
+        outpath = os.path.join(outdir,testname+"_stdout.txt")
+        errpath = os.path.join(outdir,testname+"_stderr.txt")
+        t0 = time.time()
+        with open(outpath,"w") as fout, open(errpath,"w") as ferr:
+            retcode = subprocess.call(cmd,stderr=ferr,stdout=fout,cwd=outdir)
+        t1 = time.time()
+
+        if retcode == 0:
+            retstr = "PASS "
+            numpass += 1
+        elif retcode == SKIP_RET_CODE:
+            retstr = "SKIP "
+            numskip += 1
+        elif retcode != 0 and testname in NOT_YET_MANDATED[scene]:
+            retstr = "FAIL*"
+            numnotmandatedfail += 1
+        else:
+            retstr = "FAIL "
+            numfail += 1
+
+        print "%s %s/%s [%.1fs]" % (retstr, scene, testname, t1-t0)
+
+    if numskip > 0:
+        skipstr = ", %d test%s skipped" % (numskip, "s" if numskip > 1 else "")
+    else:
+        skipstr = ""
+
+    print "\n%d / %d tests passed (%.1f%%)%s" % (
+            numpass + numnotmandatedfail, len(tests) - numskip,
+            100.0 * float(numpass + numnotmandatedfail) / (len(tests) - numskip)
+                if len(tests) != numskip else 100.0,
+            skipstr)
+
+    if numnotmandatedfail > 0:
+        print "(*) tests are not yet mandated"
+
+    its.device.report_result(camera_id, numfail == 0)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CtsVerifier/Android.mk b/apps/CtsVerifier/Android.mk
index 460b88a..e370c81 100644
--- a/apps/CtsVerifier/Android.mk
+++ b/apps/CtsVerifier/Android.mk
@@ -25,7 +25,7 @@
 
 LOCAL_SRC_FILES := $(call all-java-files-under, src) $(call all-Iaidl-files-under, src)
 
-LOCAL_STATIC_JAVA_LIBRARIES := cts-sensors-tests ctstestrunner
+LOCAL_STATIC_JAVA_LIBRARIES := cts-sensors-tests ctstestrunner android-ex-camera2
 
 LOCAL_PACKAGE_NAME := CtsVerifier
 
@@ -78,12 +78,15 @@
 ifeq ($(HOST_OS),linux)
 $(verifier-zip) : $(HOST_OUT)/bin/cts-usb-accessory
 endif
+$(verifier-zip) : $(HOST_OUT)/CameraITS
+
 $(verifier-zip) : $(call intermediates-dir-for,APPS,CtsVerifier)/package.apk | $(ACP)
 		$(hide) mkdir -p $(verifier-dir)
 		$(hide) $(ACP) -fp $< $(verifier-dir)/CtsVerifier.apk
 ifeq ($(HOST_OS),linux)
 		$(hide) $(ACP) -fp $(HOST_OUT)/bin/cts-usb-accessory $(verifier-dir)/cts-usb-accessory
 endif
+		$(hide) $(ACP) -fpr $(HOST_OUT)/CameraITS $(verifier-dir)
 		$(hide) cd $(cts-dir) && zip -rq $(verifier-dir-name) $(verifier-dir-name)
 
 ifneq ($(filter cts, $(MAKECMDGOALS)),)
diff --git a/apps/CtsVerifier/AndroidManifest.xml b/apps/CtsVerifier/AndroidManifest.xml
index b6abe30..98044e4 100644
--- a/apps/CtsVerifier/AndroidManifest.xml
+++ b/apps/CtsVerifier/AndroidManifest.xml
@@ -17,12 +17,13 @@
 
 <manifest xmlns:android="http://schemas.android.com/apk/res/android"
       package="com.android.cts.verifier"
-      android:versionCode="4"
-      android:versionName="5.0_r1">
+      android:versionCode="5"
+      android:versionName="5.0_r1.91">
 
     <uses-sdk android:minSdkVersion="19" android:targetSdkVersion="21"/>
 
     <uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
+    <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
     <uses-permission android:name="android.permission.ACCESS_WIFI_STATE" />
     <uses-permission android:name="android.permission.BLUETOOTH" />
     <uses-permission android:name="android.permission.BLUETOOTH_ADMIN" />
@@ -34,6 +35,11 @@
     <uses-permission android:name="android.permission.INTERNET" />
     <uses-permission android:name="android.permission.NFC" />
     <uses-permission android:name="android.permission.VIBRATE" />
+    <uses-feature android:name="android.hardware.camera" android:required="false"/>
+    <uses-feature android:name="android.hardware.camera.flash" android:required="false"/>
+    <uses-feature android:name="android.hardware.sensor.accelerometer" android:required="false" />
+    <uses-feature android:name="android.hardware.sensor.compass" android:required="false" />
+    <uses-feature android:name="android.hardware.sensor.gyroscope" android:required="false" />
     <uses-feature android:name="android.hardware.camera.front"
                   android:required="false" />
     <uses-feature android:name="android.hardware.camera.autofocus"
@@ -717,6 +723,15 @@
             <meta-data android:name="android.nfc.cardemulation.host_apdu_service" android:resource="@xml/access_prefix_aid_list"/>
         </service>
 
+        <!-- Service used for Camera ITS tests -->
+        <service android:name=".camera.its.ItsService" >
+            <intent-filter>
+                <action android:name="com.android.cts.verifier.camera.its.START"/>
+                <category android:name="android.intent.category.DEFAULT" />
+                <data android:mimeType="text/plain" />
+            </intent-filter>
+        </service>
+
         <!--
             A DeviceAdmin receiver for sensor tests, it allows sensor tests to turn off the screen.
         -->
@@ -1040,6 +1055,17 @@
                     android:value="android.hardware.camera.any"/>
         </activity>
 
+        <activity android:name=".camera.its.ItsTestActivity"
+                  android:label="@string/camera_its_test"
+                  android:configChanges="keyboardHidden|orientation|screenSize">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_camera" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.camera.any" />
+        </activity>
+
         <activity android:name=".usb.UsbAccessoryTestActivity"
                 android:label="@string/usb_accessory_test"
                 android:configChanges="keyboardHidden|orientation|screenSize">
@@ -1085,7 +1111,7 @@
             <meta-data android:name="test_category" android:value="@string/test_category_notifications" />
         </activity>
 
-        <activity android:name=".notifications.NotificationAttentionManagementVerifierActivity"
+        <activity android:name=".notifications.AttentionManagementVerifierActivity"
                 android:label="@string/attention_test">
             <intent-filter>
                 <action android:name="android.intent.action.MAIN" />
@@ -1103,7 +1129,8 @@
             </intent-filter>
         </service>
 
-        <service  android:name=".notifications.NotificationListenerVerifierActivity$DismissService"/>
+        <service  android:name=".notifications.InteractiveVerifierActivity$DismissService"/>
+
         <activity android:name=".security.CAInstallNotificationVerifierActivity"
                 android:label="@string/cacert_test">
             <intent-filter>
@@ -1419,6 +1446,9 @@
         <service android:name=".jobscheduler.MockJobService"
             android:permission="android.permission.BIND_JOB_SERVICE"/>
 
+        <!-- Used by the SensorTestScreenManipulator to reset the screen timeout after turn off. -->
+        <activity android:name=".os.TimeoutResetActivity"/>
+
     </application>
 
 </manifest>
diff --git a/apps/CtsVerifier/res/drawable-hdpi/fs_clock.png b/apps/CtsVerifier/res/drawable-hdpi/fs_clock.png
new file mode 100644
index 0000000..209d78e
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable-hdpi/fs_clock.png
Binary files differ
diff --git a/apps/CtsVerifier/res/drawable-hdpi/ic_stat_alice.png b/apps/CtsVerifier/res/drawable-hdpi/ic_stat_alice.png
new file mode 100644
index 0000000..e4eea4b
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable-hdpi/ic_stat_alice.png
Binary files differ
diff --git a/apps/CtsVerifier/res/drawable-hdpi/ic_stat_bob.png b/apps/CtsVerifier/res/drawable-hdpi/ic_stat_bob.png
new file mode 100644
index 0000000..c67ff4f
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable-hdpi/ic_stat_bob.png
Binary files differ
diff --git a/apps/CtsVerifier/res/drawable-hdpi/ic_stat_charlie.png b/apps/CtsVerifier/res/drawable-hdpi/ic_stat_charlie.png
new file mode 100644
index 0000000..71afa3e
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable-hdpi/ic_stat_charlie.png
Binary files differ
diff --git a/apps/CtsVerifier/res/drawable-mdpi/fs_clock.png b/apps/CtsVerifier/res/drawable-mdpi/fs_clock.png
new file mode 100644
index 0000000..209d78e
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable-mdpi/fs_clock.png
Binary files differ
diff --git a/apps/CtsVerifier/res/drawable-mdpi/ic_stat_alice.png b/apps/CtsVerifier/res/drawable-mdpi/ic_stat_alice.png
new file mode 100644
index 0000000..3717827
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable-mdpi/ic_stat_alice.png
Binary files differ
diff --git a/apps/CtsVerifier/res/drawable-mdpi/ic_stat_bob.png b/apps/CtsVerifier/res/drawable-mdpi/ic_stat_bob.png
new file mode 100644
index 0000000..f266312
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable-mdpi/ic_stat_bob.png
Binary files differ
diff --git a/apps/CtsVerifier/res/drawable-mdpi/ic_stat_charlie.png b/apps/CtsVerifier/res/drawable-mdpi/ic_stat_charlie.png
new file mode 100644
index 0000000..49c4b9a
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable-mdpi/ic_stat_charlie.png
Binary files differ
diff --git a/apps/CtsVerifier/res/layout-land/sensor_test.xml b/apps/CtsVerifier/res/layout-land/sensor_test.xml
index 293b4b0..f547978 100644
--- a/apps/CtsVerifier/res/layout-land/sensor_test.xml
+++ b/apps/CtsVerifier/res/layout-land/sensor_test.xml
@@ -13,41 +13,46 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
-        android:orientation="vertical"
-        android:layout_width="match_parent"
-        android:layout_height="match_parent"
-        >
-
-    <LinearLayout
-            android:orientation="horizontal"
+<android.support.wearable.view.BoxInsetLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent">
+    <LinearLayout app:layout_box="all"
+            android:orientation="vertical"
             android:layout_width="match_parent"
-            android:layout_height="0dp"
-            android:layout_weight="1">
+            android:layout_height="match_parent"
+            >
 
-        <ScrollView
-                android:id="@+id/log_scroll_view"
-                android:fillViewport="true"
-                android:layout_height="match_parent"
-                android:layout_width="0dp"
+        <LinearLayout
+                android:orientation="horizontal"
+                android:layout_width="match_parent"
+                android:layout_height="0dp"
                 android:layout_weight="1">
 
-            <LinearLayout
-                    android:id="@+id/log_layout"
-                    android:orientation="vertical"
-                    android:layout_width="match_parent"
-                    android:layout_height="match_parent"/>
+            <ScrollView
+                    android:id="@+id/log_scroll_view"
+                    android:fillViewport="true"
+                    android:layout_height="match_parent"
+                    android:layout_width="0dp"
+                    android:layout_weight="1">
 
-        </ScrollView>
+                <LinearLayout
+                        android:id="@+id/log_layout"
+                        android:orientation="vertical"
+                        android:layout_width="match_parent"
+                        android:layout_height="match_parent"/>
 
-        <android.opengl.GLSurfaceView android:id="@+id/gl_surface_view"
-                android:visibility="gone"
-                android:layout_width="0dp"
-                android:layout_height="match_parent"
-                android:layout_weight="1"/>
+            </ScrollView>
+
+            <android.opengl.GLSurfaceView android:id="@+id/gl_surface_view"
+                    android:visibility="gone"
+                    android:layout_width="0dp"
+                    android:layout_height="match_parent"
+                    android:layout_weight="1"/>
+
+        </LinearLayout>
+
+        <include layout="@layout/snsr_next_button" />
 
     </LinearLayout>
-
-    <include layout="@layout/snsr_next_button" />
-
-</LinearLayout>
+</android.support.wearable.view.BoxInsetLayout>
diff --git a/apps/CtsVerifier/res/layout-port/sensor_test.xml b/apps/CtsVerifier/res/layout-port/sensor_test.xml
index eac5357..b4eca4d 100644
--- a/apps/CtsVerifier/res/layout-port/sensor_test.xml
+++ b/apps/CtsVerifier/res/layout-port/sensor_test.xml
@@ -13,30 +13,35 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
-        android:orientation="vertical"
-        android:layout_width="match_parent"
-        android:layout_height="match_parent">
+<android.support.wearable.view.BoxInsetLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent">
+    <LinearLayout app:layout_box="all"
+            android:orientation="vertical"
+            android:layout_width="match_parent"
+            android:layout_height="match_parent">
 
-    <ScrollView android:id="@+id/log_scroll_view"
-            android:fillViewport="true"
-            android:layout_height="0dp"
-            android:layout_weight="1"
-            android:layout_width="match_parent">
+        <ScrollView android:id="@+id/log_scroll_view"
+                android:fillViewport="true"
+                android:layout_height="0dp"
+                android:layout_weight="1"
+                android:layout_width="match_parent">
 
-        <LinearLayout android:id="@+id/log_layout"
-                android:orientation="vertical"
-                android:layout_height="match_parent"
+            <LinearLayout android:id="@+id/log_layout"
+                    android:orientation="vertical"
+                    android:layout_height="match_parent"
+                    android:layout_width="match_parent"/>
+
+        </ScrollView>
+
+        <android.opengl.GLSurfaceView android:id="@+id/gl_surface_view"
+                android:visibility="gone"
+                android:layout_height="0dp"
+                android:layout_weight="1"
                 android:layout_width="match_parent"/>
 
-    </ScrollView>
+        <include layout="@layout/snsr_next_button"/>
 
-    <android.opengl.GLSurfaceView android:id="@+id/gl_surface_view"
-            android:visibility="gone"
-            android:layout_height="0dp"
-            android:layout_weight="1"
-            android:layout_width="match_parent"/>
-
-    <include layout="@layout/snsr_next_button"/>
-
-</LinearLayout>
+    </LinearLayout>
+</android.support.wearable.view.BoxInsetLayout>
diff --git a/apps/CtsVerifier/res/layout/ble_advertiser_hardware_scan_filter.xml b/apps/CtsVerifier/res/layout/ble_advertiser_hardware_scan_filter.xml
index ce3e1e1..a545727 100644
--- a/apps/CtsVerifier/res/layout/ble_advertiser_hardware_scan_filter.xml
+++ b/apps/CtsVerifier/res/layout/ble_advertiser_hardware_scan_filter.xml
@@ -19,63 +19,56 @@
         android:orientation="vertical"
         android:padding="10dip"
         >
-
-    <LinearLayout android:orientation="vertical"
+    <ScrollView xmlns:android="http://schemas.android.com/apk/res/android"
             android:layout_width="match_parent"
             android:layout_height="wrap_content"
-            android:layout_centerInParent="true"
-            >
-        <TextView android:text="@string/ble_advertiser_scannable"
-                android:layout_width="wrap_content"
-                android:layout_height="wrap_content"
-        />
-        <TextView android:text="@string/ble_advertiser_scannable_instruction"
-                android:layout_width="wrap_content"
-                android:layout_height="wrap_content"
-        />
-        <LinearLayout android:orientation="horizontal"
-                android:layout_width="match_parent"
-                android:layout_height="wrap_content"
-                >
-            <Button android:id="@+id/ble_advertiser_scannable_start"
-                    android:layout_width="wrap_content"
-                    android:layout_height="wrap_content"
-                    android:text="@string/ble_advertiser_start"
-                    />
-            <Button android:id="@+id/ble_advertiser_scannable_stop"
-                    android:layout_width="wrap_content"
-                    android:layout_height="wrap_content"
-                    android:text="@string/ble_advertiser_stop"
-                    />
-        </LinearLayout>
-        <TextView android:text="@string/ble_advertiser_unscannable"
-                android:layout_width="wrap_content"
-                android:layout_height="wrap_content"
-        />
-        <TextView android:text="@string/ble_advertiser_unscannable_instruction"
-                android:layout_width="wrap_content"
-                android:layout_height="wrap_content"
-        />
-        <LinearLayout android:orientation="horizontal"
-                android:layout_width="match_parent"
-                android:layout_height="wrap_content"
-                >
-            <Button android:id="@+id/ble_advertiser_unscannable_start"
-                    android:layout_width="wrap_content"
-                    android:layout_height="wrap_content"
-                    android:text="@string/ble_advertiser_start"
-                    />
-            <Button android:id="@+id/ble_advertiser_unscannable_stop"
-                    android:layout_width="wrap_content"
-                    android:layout_height="wrap_content"
-                    android:text="@string/ble_advertiser_stop"
-                    />
-        </LinearLayout>
-    </LinearLayout>
+            android:scrollbars="vertical">
 
-    <include android:layout_width="match_parent"
-            android:layout_height="wrap_content"
-            android:layout_alignParentBottom="true"
-            layout="@layout/pass_fail_buttons"
-            />
+        <LinearLayout android:orientation="vertical"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:layout_centerInParent="true">
+            <TextView android:text="@string/ble_advertiser_scannable"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"/>
+            <TextView android:text="@string/ble_advertiser_scannable_instruction"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"/>
+            <LinearLayout android:orientation="horizontal"
+                    android:layout_width="match_parent"
+                    android:layout_height="wrap_content">
+                <Button android:id="@+id/ble_advertiser_scannable_start"
+                        android:layout_width="wrap_content"
+                        android:layout_height="wrap_content"
+                        android:text="@string/ble_advertiser_start"/>
+                <Button android:id="@+id/ble_advertiser_scannable_stop"
+                        android:layout_width="wrap_content"
+                        android:layout_height="wrap_content"
+                        android:text="@string/ble_advertiser_stop"/>
+            </LinearLayout>
+            <TextView android:text="@string/ble_advertiser_unscannable"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"/>
+            <TextView android:text="@string/ble_advertiser_unscannable_instruction"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"/>
+            <LinearLayout android:orientation="horizontal"
+                    android:layout_width="match_parent"
+                    android:layout_height="wrap_content">
+                <Button android:id="@+id/ble_advertiser_unscannable_start"
+                        android:layout_width="wrap_content"
+                        android:layout_height="wrap_content"
+                        android:text="@string/ble_advertiser_start"/>
+                <Button android:id="@+id/ble_advertiser_unscannable_stop"
+                        android:layout_width="wrap_content"
+                        android:layout_height="wrap_content"
+                        android:text="@string/ble_advertiser_stop"/>
+            </LinearLayout>
+
+            <include android:layout_width="match_parent"
+                    android:layout_height="wrap_content"
+                    android:layout_alignParentBottom="true"
+                    layout="@layout/pass_fail_buttons"/>
+        </LinearLayout>
+    </ScrollView>
 </RelativeLayout>
diff --git a/apps/CtsVerifier/res/layout/ble_advertiser_power_level.xml b/apps/CtsVerifier/res/layout/ble_advertiser_power_level.xml
index ec3284d..c8e0133 100644
--- a/apps/CtsVerifier/res/layout/ble_advertiser_power_level.xml
+++ b/apps/CtsVerifier/res/layout/ble_advertiser_power_level.xml
@@ -19,31 +19,34 @@
         android:orientation="vertical"
         android:padding="10dip"
         >
-
-    <TextView android:text="@string/ble_advertiser_power_level_instruction"
-            android:layout_width="wrap_content"
-            android:layout_height="wrap_content"
-    />
-    <LinearLayout android:orientation="horizontal"
+    <ScrollView xmlns:android="http://schemas.android.com/apk/res/android"
+            android:layout_width="fill_parent"
+            android:layout_height="wrap_content">
+        <LinearLayout android:orientation="vertical"
             android:layout_width="match_parent"
-            android:layout_height="wrap_content"
-            android:layout_centerInParent="true"
-            >
-        <Button android:id="@+id/ble_power_level_start"
-                android:layout_width="wrap_content"
-                android:layout_height="wrap_content"
-                android:text="@string/ble_advertiser_start"
-                />
-        <Button android:id="@+id/ble_power_level_stop"
-                android:layout_width="wrap_content"
-                android:layout_height="wrap_content"
-                android:text="@string/ble_advertiser_stop"
-                />
-    </LinearLayout>
+            android:layout_height="wrap_content">
+            <TextView android:text="@string/ble_advertiser_power_level_instruction"
+                    android:id="@+id/ble_advertiser_power_level_instruction"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:scrollbars="vertical"/>
+            <LinearLayout android:orientation="horizontal"
+                    android:layout_below="@+id/ble_advertiser_power_level_instruction"
+                    android:layout_width="match_parent"
+                    android:layout_height="wrap_content">
+                <Button android:id="@+id/ble_power_level_start"
+                        android:layout_width="wrap_content"
+                        android:layout_height="wrap_content"
+                        android:text="@string/ble_advertiser_start"/>
+                <Button android:id="@+id/ble_power_level_stop"
+                        android:layout_width="wrap_content"
+                        android:layout_height="wrap_content"
+                        android:text="@string/ble_advertiser_stop"/>
+            </LinearLayout>
 
-    <include android:layout_width="match_parent"
-            android:layout_height="wrap_content"
-            android:layout_alignParentBottom="true"
-            layout="@layout/pass_fail_buttons"
-            />
+            <include android:layout_width="match_parent"
+                    android:layout_height="wrap_content"
+                    layout="@layout/pass_fail_buttons"/>
+        </LinearLayout>
+    </ScrollView>
 </RelativeLayout>
diff --git a/apps/CtsVerifier/res/layout/ble_client_connect.xml b/apps/CtsVerifier/res/layout/ble_client_connect.xml
index 54a0a99..30b4edb 100644
--- a/apps/CtsVerifier/res/layout/ble_client_connect.xml
+++ b/apps/CtsVerifier/res/layout/ble_client_connect.xml
@@ -20,22 +20,19 @@
         android:padding="10dip"
         >
 
-    <LinearLayout android:orientation="horizontal"
+    <LinearLayout android:orientation="vertical"
             android:layout_width="match_parent"
             android:layout_height="wrap_content"
             android:layout_centerInParent="true"
             >
-        <EditText android:id="@+id/ble_address"
-                android:layout_weight="1"
-                android:layout_width="0dp"
-                android:layout_height="wrap_content"
-                android:hint="@string/ble_address"
-                />
-        <Button android:id="@+id/ble_connect"
+        <Button android:id="@+id/ble_scan_start"
                 android:layout_width="wrap_content"
                 android:layout_height="wrap_content"
-                android:text="@string/ble_connect"
-                />
+                android:text="@string/ble_scan_start"/>
+        <Button android:id="@+id/ble_scan_stop"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:text="@string/ble_scan_stop"/>
     </LinearLayout>
 
     <include android:layout_width="match_parent"
@@ -43,4 +40,4 @@
             android:layout_alignParentBottom="true"
             layout="@layout/pass_fail_buttons"
             />
-</RelativeLayout>
\ No newline at end of file
+</RelativeLayout>
diff --git a/apps/CtsVerifier/res/layout/ble_client_read_write.xml b/apps/CtsVerifier/res/layout/ble_client_read_write.xml
index 7edba62..a263916 100644
--- a/apps/CtsVerifier/res/layout/ble_client_read_write.xml
+++ b/apps/CtsVerifier/res/layout/ble_client_read_write.xml
@@ -32,6 +32,7 @@
                     android:layout_width="0dp"
                     android:layout_weight="1"
                     android:layout_height="wrap_content"
+                    android:text="@string/ble_test_text"
                     android:hint="@string/ble_write_hint"
                     android:padding="10dip"
                     />
@@ -67,4 +68,4 @@
             android:layout_alignParentBottom="true"
             layout="@layout/pass_fail_buttons"
             />
-</RelativeLayout>
\ No newline at end of file
+</RelativeLayout>
diff --git a/apps/CtsVerifier/res/layout/ble_reliable_write.xml b/apps/CtsVerifier/res/layout/ble_reliable_write.xml
index 7db78ff..05b1812 100644
--- a/apps/CtsVerifier/res/layout/ble_reliable_write.xml
+++ b/apps/CtsVerifier/res/layout/ble_reliable_write.xml
@@ -27,6 +27,7 @@
         <EditText android:id="@+id/write_text"
                 android:layout_width="match_parent"
                 android:layout_height="wrap_content"
+                android:text="@string/ble_test_text"
                 android:hint="@string/ble_write_hint"
                 android:padding="5dip"
                 />
@@ -60,4 +61,4 @@
             android:layout_alignParentBottom="true"
             layout="@layout/pass_fail_buttons"
             />
-</RelativeLayout>
\ No newline at end of file
+</RelativeLayout>
diff --git a/apps/CtsVerifier/res/layout/ble_scanner_hardware_scan_filter.xml b/apps/CtsVerifier/res/layout/ble_scanner_hardware_scan_filter.xml
index f356ded..dabd640 100644
--- a/apps/CtsVerifier/res/layout/ble_scanner_hardware_scan_filter.xml
+++ b/apps/CtsVerifier/res/layout/ble_scanner_hardware_scan_filter.xml
@@ -17,40 +17,41 @@
         android:layout_width="match_parent"
         android:layout_height="match_parent"
         android:orientation="vertical"
-        android:padding="10dip"
-        >
-    <TextView android:text="@string/ble_scanner_scan_filter_instruction"
+        android:padding="10dip">
+    <ScrollView xmlns:android="http://schemas.android.com/apk/res/android"
             android:layout_width="wrap_content"
-            android:layout_height="wrap_content"
-    />
-    <LinearLayout android:orientation="vertical"
-            android:layout_width="match_parent"
-            android:layout_height="wrap_content"
-            android:layout_centerInParent="true"
-            >
-        <LinearLayout android:orientation="horizontal"
-                android:layout_width="match_parent"
-                android:layout_height="wrap_content"
-                >
-            <Button android:id="@+id/ble_scan_with_filter"
+            android:layout_height="wrap_content">
+        <LinearLayout android:orientation="vertical"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content">
+            <TextView android:text="@string/ble_scanner_scan_filter_instruction"
                     android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"/>
+            <LinearLayout android:orientation="vertical"
+                    android:layout_width="match_parent"
                     android:layout_height="wrap_content"
-                    android:text="@string/ble_scan_with_filter"
-                    />
-            <Button android:id="@+id/ble_scan_without_filter"
-                    android:layout_width="wrap_content"
+                    android:layout_centerInParent="true">
+                <LinearLayout android:orientation="vertical"
+                        android:layout_width="match_parent"
+                        android:layout_height="wrap_content">
+                    <Button android:id="@+id/ble_scan_with_filter"
+                            android:layout_width="wrap_content"
+                            android:layout_height="wrap_content"
+                            android:text="@string/ble_scan_with_filter"/>
+                    <Button android:id="@+id/ble_scan_without_filter"
+                            android:layout_width="wrap_content"
+                            android:layout_height="wrap_content"
+                            android:text="@string/ble_scan_without_filter"/>
+                </LinearLayout>
+                <ListView android:id="@+id/ble_scan_result_list"
+                        android:layout_height="wrap_content"
+                        android:layout_width="match_parent">
+                </ListView>
+            </LinearLayout>
+            <include android:layout_width="match_parent"
                     android:layout_height="wrap_content"
-                    android:text="@string/ble_scan_without_filter"
-                    />
+                    android:layout_alignParentBottom="true"
+                    layout="@layout/pass_fail_buttons"/>
         </LinearLayout>
-        <ListView android:id="@+id/ble_scan_result_list"
-                android:layout_height="wrap_content"
-                android:layout_width="match_parent">
-        </ListView>
-    </LinearLayout>
-    <include android:layout_width="match_parent"
-            android:layout_height="wrap_content"
-            android:layout_alignParentBottom="true"
-            layout="@layout/pass_fail_buttons"
-    />
+    </ScrollView>
 </RelativeLayout>
diff --git a/apps/CtsVerifier/res/layout/ble_scanner_power_level.xml b/apps/CtsVerifier/res/layout/ble_scanner_power_level.xml
index b240db6..c24dbb4 100644
--- a/apps/CtsVerifier/res/layout/ble_scanner_power_level.xml
+++ b/apps/CtsVerifier/res/layout/ble_scanner_power_level.xml
@@ -19,154 +19,139 @@
         android:orientation="vertical"
         android:padding="10dip"
         >
-    <TextView android:text="@string/ble_scanner_power_level_instruction"
+    <ScrollView xmlns:android="http://schemas.android.com/apk/res/android"
             android:layout_width="wrap_content"
-            android:layout_height="wrap_content"
-            android:id="@+id/ble_scanner_power_level_instruction"
-    />
-    <LinearLayout android:orientation="vertical"
-            android:layout_width="match_parent"
-            android:layout_height="wrap_content"
-            android:layout_below="@+id/ble_scanner_power_level_instruction"
-            android:layout_centerInParent="true"
-            android:padding="10dp"
-            >
-        <LinearLayout android:orientation="horizontal"
-                android:layout_width="match_parent"
-                android:layout_height="wrap_content"
-                android:layout_centerInParent="true"
-                >
-            <TextView android:text="@string/ble_ultra_low"
-                  android:layout_width="100dp"
-                  android:layout_height="wrap_content"
-            />
-            <TextView android:id="@+id/ble_ultra_low_mac"
-                  android:layout_width="200dp"
-                  android:layout_height="wrap_content"
-            />
-            <TextView android:id="@+id/ble_ultra_low_rssi"
-                  android:layout_width="100dp"
-                  android:layout_height="wrap_content"
-            />
-        </LinearLayout>
-        <LinearLayout android:orientation="horizontal"
+            android:layout_height="wrap_content">
+        <LinearLayout android:orientation="vertical"
                 android:layout_width="match_parent"
                 android:layout_height="wrap_content">
-            <TextView android:layout_width="100dp"
-                  android:layout_height="wrap_content"/>
-            <TextView android:id="@+id/ble_ultra_low_count"
-                    android:layout_width="100dp"
+            <TextView android:text="@string/ble_scanner_power_level_instruction"
+                    android:layout_width="wrap_content"
                     android:layout_height="wrap_content"
+                    android:id="@+id/ble_scanner_power_level_instruction"
             />
-            <TextView android:id="@+id/ble_ultra_low_set_power"
-                    android:layout_width="100dp"
+            <HorizontalScrollView
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content">
+                <LinearLayout android:orientation="vertical"
+                        android:layout_width="match_parent"
+                        android:layout_height="wrap_content"
+                        android:layout_below="@+id/ble_scanner_power_level_instruction"
+                        android:layout_centerInParent="true"
+                        android:padding="10dp">
+                    <LinearLayout android:orientation="horizontal"
+                            android:layout_width="match_parent"
+                            android:layout_height="wrap_content"
+                            android:layout_centerInParent="true">
+                        <TextView android:text="@string/ble_ultra_low"
+                                android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                        <TextView android:id="@+id/ble_ultra_low_mac"
+                                android:layout_width="200dp"
+                                android:layout_height="wrap_content"/>
+                        <TextView android:id="@+id/ble_ultra_low_rssi"
+                                android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                    </LinearLayout>
+                    <LinearLayout android:orientation="horizontal"
+                            android:layout_width="match_parent"
+                            android:layout_height="wrap_content">
+                        <TextView android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                        <TextView android:id="@+id/ble_ultra_low_count"
+                                android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                        <TextView android:id="@+id/ble_ultra_low_set_power"
+                                android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                    </LinearLayout>
+                    <LinearLayout android:orientation="horizontal"
+                            android:layout_width="match_parent"
+                            android:layout_height="wrap_content"
+                            android:layout_centerInParent="true">
+                        <TextView android:text="@string/ble_low"
+                                android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                        <TextView android:id="@+id/ble_low_mac"
+                                android:layout_width="200dp"
+                                android:layout_height="wrap_content"/>
+                        <TextView android:id="@+id/ble_low_rssi"
+                                android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                    </LinearLayout>
+                    <LinearLayout android:orientation="horizontal"
+                            android:layout_width="match_parent"
+                            android:layout_height="wrap_content">
+                        <TextView android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                        <TextView android:id="@+id/ble_low_count"
+                                android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                        <TextView android:id="@+id/ble_low_set_power"
+                                android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                    </LinearLayout>
+                    <LinearLayout android:orientation="horizontal"
+                            android:layout_width="match_parent"
+                            android:layout_height="wrap_content"
+                            android:layout_centerInParent="true">
+                        <TextView android:text="@string/ble_medium"
+                                android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                        <TextView android:id="@+id/ble_medium_mac"
+                                android:layout_width="200dp"
+                                android:layout_height="wrap_content"/>
+                        <TextView android:id="@+id/ble_medium_rssi"
+                                android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                    </LinearLayout>
+                    <LinearLayout android:orientation="horizontal"
+                            android:layout_width="match_parent"
+                            android:layout_height="wrap_content">
+                        <TextView android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                        <TextView android:id="@+id/ble_medium_count"
+                                android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                        <TextView android:id="@+id/ble_medium_set_power"
+                                android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                    </LinearLayout>
+                    <LinearLayout android:orientation="horizontal"
+                            android:layout_width="match_parent"
+                            android:layout_height="wrap_content"
+                            android:layout_centerInParent="true">
+                        <TextView android:text="@string/ble_high"
+                                android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                        <TextView android:id="@+id/ble_high_mac"
+                                android:layout_width="200dp"
+                                android:layout_height="wrap_content"/>
+                        <TextView android:id="@+id/ble_high_rssi"
+                                android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                    </LinearLayout>
+                    <LinearLayout android:orientation="horizontal"
+                            android:layout_width="match_parent"
+                            android:layout_height="wrap_content">
+                        <TextView android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                        <TextView android:id="@+id/ble_high_count"
+                                android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                        <TextView android:id="@+id/ble_high_set_power"
+                                android:layout_width="100dp"
+                                android:layout_height="wrap_content"/>
+                    </LinearLayout>
+                </LinearLayout>
+            </HorizontalScrollView>
+            <TextView android:id="@+id/ble_timer"
+                    android:layout_width="fill_parent"
+                    android:layout_height="wrap_content" />
+            <include android:layout_width="match_parent"
                     android:layout_height="wrap_content"
-            />
+                    android:layout_alignParentBottom="true"
+                    layout="@layout/pass_fail_buttons"/>
         </LinearLayout>
-        <LinearLayout android:orientation="horizontal"
-                android:layout_width="match_parent"
-                android:layout_height="wrap_content"
-                android:layout_centerInParent="true"
-                >
-            <TextView android:text="@string/ble_low"
-                  android:layout_width="100dp"
-                  android:layout_height="wrap_content"
-            />
-            <TextView android:id="@+id/ble_low_mac"
-                  android:layout_width="200dp"
-                  android:layout_height="wrap_content"
-            />
-            <TextView android:id="@+id/ble_low_rssi"
-                  android:layout_width="100dp"
-                  android:layout_height="wrap_content"
-            />
-        </LinearLayout>
-        <LinearLayout android:orientation="horizontal"
-                android:layout_width="match_parent"
-                android:layout_height="wrap_content">
-            <TextView android:layout_width="100dp"
-                  android:layout_height="wrap_content"/>
-            <TextView android:id="@+id/ble_low_count"
-                    android:layout_width="100dp"
-                    android:layout_height="wrap_content"
-            />
-            <TextView android:id="@+id/ble_low_set_power"
-                    android:layout_width="100dp"
-                    android:layout_height="wrap_content"
-            />
-        </LinearLayout>
-        <LinearLayout android:orientation="horizontal"
-                android:layout_width="match_parent"
-                android:layout_height="wrap_content"
-                android:layout_centerInParent="true"
-                >
-            <TextView android:text="@string/ble_medium"
-                  android:layout_width="100dp"
-                  android:layout_height="wrap_content"
-            />
-            <TextView android:id="@+id/ble_medium_mac"
-                  android:layout_width="200dp"
-                  android:layout_height="wrap_content"
-            />
-            <TextView android:id="@+id/ble_medium_rssi"
-                  android:layout_width="100dp"
-                  android:layout_height="wrap_content"
-            />
-        </LinearLayout>
-        <LinearLayout android:orientation="horizontal"
-                android:layout_width="match_parent"
-                android:layout_height="wrap_content">
-            <TextView android:layout_width="100dp"
-                  android:layout_height="wrap_content"/>
-            <TextView android:id="@+id/ble_medium_count"
-                    android:layout_width="100dp"
-                    android:layout_height="wrap_content"
-            />
-            <TextView android:id="@+id/ble_medium_set_power"
-                    android:layout_width="100dp"
-                    android:layout_height="wrap_content"
-            />
-        </LinearLayout>
-        <LinearLayout android:orientation="horizontal"
-                android:layout_width="match_parent"
-                android:layout_height="wrap_content"
-                android:layout_centerInParent="true"
-                >
-            <TextView android:text="@string/ble_high"
-                  android:layout_width="100dp"
-                  android:layout_height="wrap_content"
-            />
-            <TextView android:id="@+id/ble_high_mac"
-                  android:layout_width="200dp"
-                  android:layout_height="wrap_content"
-            />
-            <TextView android:id="@+id/ble_high_rssi"
-                  android:layout_width="100dp"
-                  android:layout_height="wrap_content"
-            />
-        </LinearLayout>
-        <LinearLayout android:orientation="horizontal"
-                android:layout_width="match_parent"
-                android:layout_height="wrap_content">
-            <TextView android:layout_width="100dp"
-                  android:layout_height="wrap_content"/>
-            <TextView android:id="@+id/ble_high_count"
-                    android:layout_width="100dp"
-                    android:layout_height="wrap_content"
-            />
-            <TextView android:id="@+id/ble_high_set_power"
-                    android:layout_width="100dp"
-                    android:layout_height="wrap_content"
-            />
-        </LinearLayout>
-        <TextView android:id="@+id/ble_timer"
-                android:layout_width="fill_parent"
-                android:layout_height="wrap_content" />
-    </LinearLayout>
-
-    <include android:layout_width="match_parent"
-            android:layout_height="wrap_content"
-            android:layout_alignParentBottom="true"
-            layout="@layout/pass_fail_buttons"
-            />
+    </ScrollView>
 </RelativeLayout>
diff --git a/apps/CtsVerifier/res/layout/bt_device_picker.xml b/apps/CtsVerifier/res/layout/bt_device_picker.xml
index ecca0e5..48a4b43 100644
--- a/apps/CtsVerifier/res/layout/bt_device_picker.xml
+++ b/apps/CtsVerifier/res/layout/bt_device_picker.xml
@@ -19,6 +19,13 @@
         android:orientation="vertical"
         >
 
+    <ProgressBar android:id="@+id/bt_progress_bar"
+            android:indeterminate="true"
+            android:layout_height="4dp"
+            android:layout_width="match_parent"
+            style="@android:style/Widget.DeviceDefault.ProgressBar.Horizontal"
+            />
+
     <TextView android:layout_width="match_parent"
             android:layout_height="wrap_content"
             android:text="@string/bt_paired_devices"
diff --git a/apps/CtsVerifier/res/layout/bt_messages.xml b/apps/CtsVerifier/res/layout/bt_messages.xml
index cb46811..1504431 100644
--- a/apps/CtsVerifier/res/layout/bt_messages.xml
+++ b/apps/CtsVerifier/res/layout/bt_messages.xml
@@ -18,6 +18,14 @@
         android:layout_width="match_parent"
         android:layout_height="match_parent"
         >
+
+    <ProgressBar android:id="@+id/bt_progress_bar"
+        android:indeterminate="true"
+        android:layout_height="4dp"
+        android:layout_width="match_parent"
+        style="@android:style/Widget.DeviceDefault.ProgressBar.Horizontal"
+        />
+
     <TextView android:layout_width="match_parent"
             android:layout_height="wrap_content"
             android:text="@string/bt_sent_messages"
diff --git a/apps/CtsVerifier/res/layout/ca_boot_notify.xml b/apps/CtsVerifier/res/layout/ca_boot_notify.xml
index e9309d4..0ceece1 100644
--- a/apps/CtsVerifier/res/layout/ca_boot_notify.xml
+++ b/apps/CtsVerifier/res/layout/ca_boot_notify.xml
@@ -14,56 +14,60 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
-  android:orientation="vertical" android:layout_width="fill_parent"
-  android:layout_height="fill_parent">
-
-  <ScrollView
+<android.support.wearable.view.BoxInsetLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
     android:layout_width="match_parent"
-    android:layout_height="match_parent"
-    android:layout_alignParentTop="true" >
+    android:layout_height="match_parent">
+    <LinearLayout app:layout_box="all"
+      android:orientation="vertical" android:layout_width="fill_parent"
+      android:layout_height="fill_parent">
 
-    <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
-      android:orientation="vertical"
-      android:layout_width="fill_parent"
-      android:layout_height="wrap_content">
+      <ScrollView
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:layout_alignParentTop="true" >
 
-      <TextView
-          android:id="@+id/check_cert_desc"
-          android:layout_width="wrap_content"
-          android:layout_height="wrap_content"
-          android:text="@string/caboot_check_cert_installed"/>
+        <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+          android:orientation="vertical"
+          android:layout_width="fill_parent"
+          android:layout_height="wrap_content">
 
-      <Button android:id="@+id/check_creds"
-          android:layout_width="wrap_content"
-          android:layout_height="wrap_content"
-          android:text="@string/caboot_check_creds" />
+          <TextView
+              android:id="@+id/check_cert_desc"
+              android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:text="@string/caboot_check_cert_installed"/>
 
-      <TextView
-          android:id="@+id/need_to_install_cert"
-          android:layout_width="wrap_content"
-          android:layout_height="wrap_content"
-          android:text="@string/caboot_if_not_installed"/>
+          <Button android:id="@+id/check_creds"
+              android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:text="@string/caboot_check_creds" />
 
-      <Button android:id="@+id/install"
-          android:layout_width="wrap_content"
-          android:layout_height="wrap_content"
-          android:text="@string/caboot_install_cert" />
+          <TextView
+              android:id="@+id/need_to_install_cert"
+              android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:text="@string/caboot_if_not_installed"/>
 
-      <TextView
-          android:id="@+id/reboot"
-          android:layout_width="wrap_content"
-          android:layout_height="wrap_content"
-          android:text="@string/caboot_reboot_desc"/>
+          <Button android:id="@+id/install"
+              android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:text="@string/caboot_install_cert" />
 
-      <TextView
-          android:id="@+id/after_reboot"
-          android:layout_width="wrap_content"
-          android:layout_height="wrap_content"
-          android:text="@string/caboot_after_boot"/>
+          <TextView
+              android:id="@+id/reboot"
+              android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:text="@string/caboot_reboot_desc"/>
 
-      <include layout="@layout/pass_fail_buttons" />
+          <TextView
+              android:id="@+id/after_reboot"
+              android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:text="@string/caboot_after_boot"/>
+
+          <include layout="@layout/pass_fail_buttons" />
+        </LinearLayout>
+      </ScrollView>
     </LinearLayout>
-  </ScrollView>
-</LinearLayout>
+</android.support.wearable.view.BoxInsetLayout>
diff --git a/apps/CtsVerifier/res/layout/ca_main.xml b/apps/CtsVerifier/res/layout/ca_main.xml
index 467ed01..274430d 100644
--- a/apps/CtsVerifier/res/layout/ca_main.xml
+++ b/apps/CtsVerifier/res/layout/ca_main.xml
@@ -14,65 +14,68 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
-  android:orientation="vertical" android:layout_width="fill_parent"
-  android:layout_height="fill_parent">
+<android.support.wearable.view.BoxInsetLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent">
+    <LinearLayout app:layout_box="all"
+      android:orientation="vertical" android:layout_width="fill_parent"
+      android:layout_height="fill_parent">
 
 
-  <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
-    android:orientation="horizontal" android:layout_width="fill_parent"
-    android:layout_height="wrap_content">
-    <!--Button android:id="@+id/focusmodesbutton" android:layout_width="0px"
-      android:layout_height="wrap_content" android:text="@string/ca_focus_modes_label"
-      android:layout_weight="1" /-->
-    <Button android:id="@+id/findcheckerboardbutton" android:layout_width="0px"
-      android:layout_height="wrap_content" android:text="@string/ca_find_checkerboard_label"
-      android:layout_weight="1" />
+      <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+        android:orientation="horizontal" android:layout_width="fill_parent"
+        android:layout_height="wrap_content">
+        <!--Button android:id="@+id/focusmodesbutton" android:layout_width="0px"
+          android:layout_height="wrap_content" android:text="@string/ca_focus_modes_label"
+          android:layout_weight="1" /-->
+        <Button android:id="@+id/findcheckerboardbutton" android:layout_width="0px"
+          android:layout_height="wrap_content" android:text="@string/ca_find_checkerboard_label"
+          android:layout_weight="1" />
 
-    <Button android:id="@+id/meteringbutton" android:layout_width="0px"
-      android:layout_height="wrap_content" android:text="@string/ca_metering_label"
-      android:layout_weight="1" />
+        <Button android:id="@+id/meteringbutton" android:layout_width="0px"
+          android:layout_height="wrap_content" android:text="@string/ca_metering_label"
+          android:layout_weight="1" />
 
-    <Button android:id="@+id/exposurecompensationbutton" android:layout_width="0px"
-      android:layout_height="wrap_content" android:text="@string/ca_exposure_test_label"
-      android:layout_weight="1"/>
+        <Button android:id="@+id/exposurecompensationbutton" android:layout_width="0px"
+          android:layout_height="wrap_content" android:text="@string/ca_exposure_test_label"
+          android:layout_weight="1"/>
 
-    <Button android:id="@+id/whitebalancebutton" android:layout_width="0px"
-      android:layout_height="wrap_content" android:text="@string/ca_wb_test_label"
-      android:layout_weight="1" />
+        <Button android:id="@+id/whitebalancebutton" android:layout_width="0px"
+          android:layout_height="wrap_content" android:text="@string/ca_wb_test_label"
+          android:layout_weight="1" />
 
-    <Button android:id="@+id/lockbutton" android:layout_width="0px"
-      android:layout_height="wrap_content" android:text="@string/ca_lock_test_label"
-      android:layout_weight="1" />
-  </LinearLayout>
+        <Button android:id="@+id/lockbutton" android:layout_width="0px"
+          android:layout_height="wrap_content" android:text="@string/ca_lock_test_label"
+          android:layout_weight="1" />
+      </LinearLayout>
 
-  <LinearLayout android:orientation="horizontal"
-    android:layout_width="fill_parent" android:layout_height="0px"
-    android:layout_weight="1">
+      <LinearLayout android:orientation="horizontal"
+        android:layout_width="fill_parent" android:layout_height="0px"
+        android:layout_weight="1">
 
-    <SurfaceView android:id="@+id/cameraview" android:layout_height="fill_parent"
-      android:layout_width="wrap_content"
-      android:layout_weight="0" />
+        <SurfaceView android:id="@+id/cameraview" android:layout_height="fill_parent"
+          android:layout_width="wrap_content"
+          android:layout_weight="0" />
 
-    <LinearLayout android:orientation="vertical"
-      android:layout_width="fill_parent" android:layout_height="match_parent"
-      android:layout_weight="1">
+        <LinearLayout android:orientation="vertical"
+          android:layout_width="fill_parent" android:layout_height="match_parent"
+          android:layout_weight="1">
 
-       <ListView android:id="@+id/ca_tests"
+           <ListView android:id="@+id/ca_tests"
+                android:layout_width="fill_parent"
+                android:layout_height="wrap_content"
+                android:layout_weight="1"
+                android:layout_marginLeft="10px"/>
+
+          <ImageView android:id="@+id/resultview" android:layout_height="wrap_content"
             android:layout_width="fill_parent"
-            android:layout_height="wrap_content"
-            android:layout_weight="1"
-            android:layout_marginLeft="10px"/>
+            android:layout_weight="1" />
+        </LinearLayout>
 
-      <ImageView android:id="@+id/resultview" android:layout_height="wrap_content"
-        android:layout_width="fill_parent"
-        android:layout_weight="1" />
+      </LinearLayout>
+
+      <include layout="@layout/pass_fail_buttons" />
+
     </LinearLayout>
-
-  </LinearLayout>
-
-  <include layout="@layout/pass_fail_buttons" />
-
-</LinearLayout>
-
+</android.support.wearable.view.BoxInsetLayout>
diff --git a/apps/CtsVerifier/res/layout/cainstallnotify_main.xml b/apps/CtsVerifier/res/layout/cainstallnotify_main.xml
index 16882bd..6cb6160 100644
--- a/apps/CtsVerifier/res/layout/cainstallnotify_main.xml
+++ b/apps/CtsVerifier/res/layout/cainstallnotify_main.xml
@@ -14,32 +14,37 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+<android.support.wearable.view.BoxInsetLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
     android:layout_width="match_parent"
-    android:layout_height="match_parent"
-    android:orientation="vertical"
-    android:padding="10dip" >
-
-    <ScrollView
-        android:id="@+id/ca_notify_test_scroller"
+    android:layout_height="match_parent">
+    <LinearLayout xmlns:app="http://schemas.android.com/apk/res-auto"
         android:layout_width="match_parent"
-        android:layout_height="0dp"
-        android:layout_weight="1"
+        android:layout_height="match_parent"
         android:orientation="vertical"
         android:padding="10dip" >
 
-        <LinearLayout
-            android:id="@+id/ca_notify_test_items"
+        <ScrollView
+            android:id="@+id/ca_notify_test_scroller"
+            android:layout_width="match_parent"
+            android:layout_height="0dp"
+            android:layout_weight="1"
+            android:orientation="vertical"
+            android:padding="10dip" >
+
+            <LinearLayout
+                android:id="@+id/ca_notify_test_items"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:orientation="vertical" >
+            </LinearLayout>
+        </ScrollView>
+
+        <include
             android:layout_width="match_parent"
             android:layout_height="wrap_content"
-            android:orientation="vertical" >
-        </LinearLayout>
-    </ScrollView>
+            android:layout_weight="0"
+            layout="@layout/pass_fail_buttons" />
 
-    <include
-        android:layout_width="match_parent"
-        android:layout_height="wrap_content"
-        android:layout_weight="0"
-        layout="@layout/pass_fail_buttons" />
-
-</LinearLayout>
\ No newline at end of file
+    </LinearLayout>
+</android.support.wearable.view.BoxInsetLayout>
diff --git a/apps/CtsVerifier/res/layout/fs_main.xml b/apps/CtsVerifier/res/layout/fs_main.xml
index 7473f0f..8a78c81 100644
--- a/apps/CtsVerifier/res/layout/fs_main.xml
+++ b/apps/CtsVerifier/res/layout/fs_main.xml
@@ -13,29 +13,34 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
-         android:orientation="vertical"
-         android:layout_width="match_parent"
-         android:layout_height="match_parent">
+<android.support.wearable.view.BoxInsetLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent">
+    <LinearLayout app:layout_box="all"
+             android:orientation="vertical"
+             android:layout_width="match_parent"
+             android:layout_height="match_parent">
 
-     <TextView android:id="@+id/fs_warnings"
-               android:layout_width="wrap_content"
-               android:layout_height="wrap_content"
-               android:text="@string/empty"/>
+         <TextView android:id="@+id/fs_warnings"
+                   android:layout_width="wrap_content"
+                   android:layout_height="wrap_content"
+                   android:text="@string/empty"/>
 
-     <ListView android:id="@id/android:list"
-               android:layout_width="match_parent"
-               android:layout_height="match_parent"
-               android:background="#000000"
-               android:layout_weight="1"
-               android:drawSelectorOnTop="false"/>
+         <ListView android:id="@id/android:list"
+                   android:layout_width="match_parent"
+                   android:layout_height="match_parent"
+                   android:background="#000000"
+                   android:layout_weight="1"
+                   android:drawSelectorOnTop="false"/>
 
-     <TextView android:id="@id/android:empty"
-               android:layout_width="match_parent"
-               android:layout_height="match_parent"
-               android:background="#000000"
-               android:text="@string/fs_no_data"/>
+         <TextView android:id="@id/android:empty"
+                   android:layout_width="match_parent"
+                   android:layout_height="match_parent"
+                   android:background="#000000"
+                   android:text="@string/fs_no_data"/>
 
-    <include layout="@layout/pass_fail_buttons" />
+        <include layout="@layout/pass_fail_buttons" />
 
-</LinearLayout>
+    </LinearLayout>
+</android.support.wearable.view.BoxInsetLayout>
diff --git a/apps/CtsVerifier/res/layout/intent_driven_test.xml b/apps/CtsVerifier/res/layout/intent_driven_test.xml
index 00c1cf6..bd9e4ca 100644
--- a/apps/CtsVerifier/res/layout/intent_driven_test.xml
+++ b/apps/CtsVerifier/res/layout/intent_driven_test.xml
@@ -1,30 +1,36 @@
 <?xml version="1.0" encoding="utf-8"?>
 
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+<android.support.wearable.view.BoxInsetLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
     android:layout_width="match_parent"
-    android:layout_height="match_parent"
-    android:orientation="vertical">
-
-  <ScrollView
-      android:layout_width="match_parent"
-      android:layout_height="0dp"
-      android:layout_weight="1">
-    <TextView android:id="@+id/info"
+    android:layout_height="match_parent">
+    <LinearLayout app:layout_box="all"
         android:layout_width="match_parent"
-        android:layout_height="wrap_content"
-        android:textSize="18sp"
-        android:padding="5dp"
-        android:text="@string/dc_start_alarm_test_info"/>
-  </ScrollView>
+        android:layout_height="match_parent"
+        android:orientation="vertical">
 
-  <LinearLayout android:id="@+id/buttons"
-      android:orientation="horizontal"
-      android:layout_width="wrap_content"
-      android:layout_height="wrap_content"/>
+        <ScrollView
+            android:layout_width="match_parent"
+            android:layout_height="0dp"
+            android:layout_weight="1">
+            <TextView android:id="@+id/info"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:textSize="18sp"
+                android:padding="5dp"
+                android:text="@string/dc_start_alarm_test_info"/>
+        </ScrollView>
 
-    <LinearLayout
-        android:layout_width="match_parent"
-        android:layout_height="wrap_content">
-      <include layout="@layout/pass_fail_buttons"/>
-  </LinearLayout>
-</LinearLayout>
+        <LinearLayout android:id="@+id/buttons"
+            android:orientation="horizontal"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"/>
+
+        <LinearLayout
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content">
+
+            <include layout="@layout/pass_fail_buttons"/>
+        </LinearLayout>
+    </LinearLayout>
+</android.support.wearable.view.BoxInsetLayout>
diff --git a/apps/CtsVerifier/res/layout/its_main.xml b/apps/CtsVerifier/res/layout/its_main.xml
new file mode 100644
index 0000000..2f5eade
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/its_main.xml
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2014 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+              android:orientation="vertical"
+              android:layout_width="match_parent"
+              android:layout_height="match_parent"
+    >
+
+    <include layout="@layout/pass_fail_buttons" />
+
+</LinearLayout>
diff --git a/apps/CtsVerifier/res/layout/js_charging.xml b/apps/CtsVerifier/res/layout/js_charging.xml
index 4c0e552..8d9ed1d 100644
--- a/apps/CtsVerifier/res/layout/js_charging.xml
+++ b/apps/CtsVerifier/res/layout/js_charging.xml
@@ -1,67 +1,76 @@
 <?xml version="1.0" encoding="utf-8"?>
-
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
-    android:orientation="vertical" android:layout_width="match_parent"
+<android.support.wearable.view.BoxInsetLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    android:layout_width="match_parent"
     android:layout_height="match_parent">
-    <TextView
+    <ScrollView app:layout_box="all"
         android:layout_width="match_parent"
-        android:layout_height="wrap_content"
-        android:text="@string/js_test_description"
-        android:layout_margin="@dimen/js_padding"/>
-    <TextView
-        android:layout_width="match_parent"
-        android:layout_height="wrap_content"
-        android:layout_margin="@dimen/js_padding"
-        android:text="@string/js_charging_description_1"
-        android:textStyle="bold"/>
-    <Button
-        android:id="@+id/js_charging_start_test_button"
-        android:layout_width="wrap_content"
-        android:layout_height="wrap_content"
-        android:layout_gravity="center"
-        android:text="@string/js_start_test_text"
-        android:onClick="startTest"
-        android:enabled="false"/>
+        android:layout_height="match_parent">
+        <LinearLayout
+            android:orientation="vertical"
+            android:layout_width="match_parent"
+            android:layout_height="match_parent">
+            <TextView
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:text="@string/js_test_description"
+                android:layout_margin="@dimen/js_padding"/>
+            <TextView
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:layout_margin="@dimen/js_padding"
+                android:text="@string/js_charging_description_1"
+                android:textStyle="bold"/>
+            <Button
+                android:id="@+id/js_charging_start_test_button"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:layout_gravity="center"
+                android:text="@string/js_start_test_text"
+                android:onClick="startTest"
+                android:enabled="false"/>
 
-    <LinearLayout
-        android:layout_width="wrap_content"
-        android:layout_height="wrap_content"
-        android:layout_marginTop="@dimen/js_padding"
-        android:layout_marginBottom="@dimen/js_padding">
-        <ImageView
-            android:id="@+id/charging_off_test_image"
-            android:layout_width="wrap_content"
-            android:layout_height="wrap_content"
-            android:src="@drawable/fs_indeterminate"
-            android:layout_marginRight="@dimen/js_padding"/>
-        <TextView
-            android:layout_width="wrap_content"
-            android:layout_height="wrap_content"
-            android:text="@string/js_charging_off_test"
-            android:textSize="16dp"/>
-    </LinearLayout>
-    <TextView
-        android:layout_width="match_parent"
-        android:layout_height="wrap_content"
-        android:layout_margin="@dimen/js_padding"
-        android:text="@string/js_charging_description_2"
-        android:textStyle="bold"/>
-    <LinearLayout
-        android:layout_width="wrap_content"
-        android:layout_height="wrap_content"
-        android:layout_marginTop="@dimen/js_padding"
-        android:layout_marginBottom="@dimen/js_padding">
-        <ImageView
-            android:id="@+id/charging_on_test_image"
-            android:layout_width="wrap_content"
-            android:layout_height="wrap_content"
-            android:src="@drawable/fs_indeterminate"
-            android:layout_marginRight="@dimen/js_padding"/>
-        <TextView
-            android:layout_width="wrap_content"
-            android:layout_height="wrap_content"
-            android:text="@string/js_charging_on_test"
-            android:textSize="16dp"/>
-    </LinearLayout>
-    <include layout="@layout/pass_fail_buttons" />
-</LinearLayout>
\ No newline at end of file
+            <LinearLayout
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:layout_marginTop="@dimen/js_padding"
+                android:layout_marginBottom="@dimen/js_padding">
+                <ImageView
+                    android:id="@+id/charging_off_test_image"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:src="@drawable/fs_indeterminate"
+                    android:layout_marginRight="@dimen/js_padding"/>
+                <TextView
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:text="@string/js_charging_off_test"
+                    android:textSize="16dp"/>
+            </LinearLayout>
+            <TextView
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:layout_margin="@dimen/js_padding"
+                android:text="@string/js_charging_description_2"
+                android:textStyle="bold"/>
+            <LinearLayout
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:layout_marginTop="@dimen/js_padding"
+                android:layout_marginBottom="@dimen/js_padding">
+                <ImageView
+                    android:id="@+id/charging_on_test_image"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:src="@drawable/fs_indeterminate"
+                    android:layout_marginRight="@dimen/js_padding"/>
+                <TextView
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:text="@string/js_charging_on_test"
+                    android:textSize="16dp"/>
+            </LinearLayout>
+            <include layout="@layout/pass_fail_buttons" />
+        </LinearLayout>
+    </ScrollView>
+</android.support.wearable.view.BoxInsetLayout>
diff --git a/apps/CtsVerifier/res/layout/js_connectivity.xml b/apps/CtsVerifier/res/layout/js_connectivity.xml
index 5208c18..b0e2824 100644
--- a/apps/CtsVerifier/res/layout/js_connectivity.xml
+++ b/apps/CtsVerifier/res/layout/js_connectivity.xml
@@ -1,83 +1,91 @@
 <?xml version="1.0" encoding="utf-8"?>
-
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
-    android:orientation="vertical" android:layout_width="match_parent"
+<android.support.wearable.view.BoxInsetLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    android:layout_width="match_parent"
     android:layout_height="match_parent">
-    <TextView
+    <ScrollView app:layout_box="all"
         android:layout_width="match_parent"
-        android:layout_height="wrap_content"
-        android:text="@string/js_test_description"
-        android:layout_margin="@dimen/js_padding"/>
+        android:layout_height="match_parent">
+        <LinearLayout
+            android:orientation="vertical" android:layout_width="match_parent"
+            android:layout_height="match_parent">
+            <TextView
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:text="@string/js_test_description"
+                android:layout_margin="@dimen/js_padding"/>
 
-    <TextView
-        android:layout_width="match_parent"
-        android:layout_height="wrap_content"
-        android:text="@string/js_connectivity_description_1"
-        android:layout_margin="@dimen/js_padding"
-        android:textStyle="bold"/>
+            <TextView
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:text="@string/js_connectivity_description_1"
+                android:layout_margin="@dimen/js_padding"
+                android:textStyle="bold"/>
 
-    <Button
-        android:id="@+id/js_connectivity_start_test_button"
-        android:layout_width="wrap_content"
-        android:layout_height="wrap_content"
-        android:layout_gravity="center"
-        android:text="@string/js_start_test_text"
-        android:onClick="startTest"
-        android:enabled="false"/>
+            <Button
+                android:id="@+id/js_connectivity_start_test_button"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:layout_gravity="center"
+                android:text="@string/js_start_test_text"
+                android:onClick="startTest"
+                android:enabled="false"/>
 
-    <LinearLayout
-        android:layout_width="wrap_content"
-        android:layout_height="wrap_content"
-        android:layout_marginTop="@dimen/js_padding"
-        android:layout_marginBottom="@dimen/js_padding">
-        <ImageView
-            android:id="@+id/connectivity_off_test_unmetered_image"
-            android:layout_width="wrap_content"
-            android:layout_height="wrap_content"
-            android:src="@drawable/fs_indeterminate"
-            android:layout_marginRight="@dimen/js_padding"/>
-        <TextView
-            android:layout_width="wrap_content"
-            android:layout_height="wrap_content"
-            android:text="@string/js_unmetered_connectivity_test"
-            android:textSize="16dp"/>
-    </LinearLayout>
+            <LinearLayout
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:layout_marginTop="@dimen/js_padding"
+                android:layout_marginBottom="@dimen/js_padding">
+                <ImageView
+                    android:id="@+id/connectivity_off_test_unmetered_image"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:src="@drawable/fs_indeterminate"
+                    android:layout_marginRight="@dimen/js_padding"/>
+                <TextView
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:text="@string/js_unmetered_connectivity_test"
+                    android:textSize="16dp"/>
+            </LinearLayout>
 
-    <LinearLayout
-        android:layout_width="wrap_content"
-        android:layout_height="wrap_content"
-        android:layout_marginTop="@dimen/js_padding"
-        android:layout_marginBottom="@dimen/js_padding">
-        <ImageView
-            android:id="@+id/connectivity_off_test_any_connectivity_image"
-            android:layout_width="wrap_content"
-            android:layout_height="wrap_content"
-            android:src="@drawable/fs_indeterminate"
-            android:layout_marginRight="@dimen/js_padding"/>
-        <TextView
-            android:layout_width="wrap_content"
-            android:layout_height="wrap_content"
-            android:text="@string/js_any_connectivity_test"
-            android:textSize="16dp"/>
-    </LinearLayout>
+            <LinearLayout
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:layout_marginTop="@dimen/js_padding"
+                android:layout_marginBottom="@dimen/js_padding">
+                <ImageView
+                    android:id="@+id/connectivity_off_test_any_connectivity_image"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:src="@drawable/fs_indeterminate"
+                    android:layout_marginRight="@dimen/js_padding"/>
+                <TextView
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:text="@string/js_any_connectivity_test"
+                    android:textSize="16dp"/>
+            </LinearLayout>
 
-    <LinearLayout
-        android:layout_width="wrap_content"
-        android:layout_height="wrap_content"
-        android:layout_marginTop="@dimen/js_padding"
-        android:layout_marginBottom="@dimen/js_padding">
-        <ImageView
-            android:id="@+id/connectivity_off_test_no_connectivity_image"
-            android:layout_width="wrap_content"
-            android:layout_height="wrap_content"
-            android:src="@drawable/fs_indeterminate"
-            android:layout_marginRight="@dimen/js_padding"/>
-        <TextView
-            android:layout_width="wrap_content"
-            android:layout_height="wrap_content"
-            android:text="@string/js_no_connectivity_test"
-            android:textSize="16dp"/>
-    </LinearLayout>
+            <LinearLayout
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:layout_marginTop="@dimen/js_padding"
+                android:layout_marginBottom="@dimen/js_padding">
+                <ImageView
+                    android:id="@+id/connectivity_off_test_no_connectivity_image"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:src="@drawable/fs_indeterminate"
+                    android:layout_marginRight="@dimen/js_padding"/>
+                <TextView
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:text="@string/js_no_connectivity_test"
+                    android:textSize="16dp"/>
+            </LinearLayout>
 
-    <include layout="@layout/pass_fail_buttons" />
-</LinearLayout>
\ No newline at end of file
+            <include layout="@layout/pass_fail_buttons" />
+        </LinearLayout>
+    </ScrollView>
+</android.support.wearable.view.BoxInsetLayout>
diff --git a/apps/CtsVerifier/res/layout/js_idle.xml b/apps/CtsVerifier/res/layout/js_idle.xml
index 90e55ec..4277173 100644
--- a/apps/CtsVerifier/res/layout/js_idle.xml
+++ b/apps/CtsVerifier/res/layout/js_idle.xml
@@ -1,63 +1,71 @@
 <?xml version="1.0" encoding="utf-8"?>
-
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
-    android:orientation="vertical" android:layout_width="match_parent"
+<android.support.wearable.view.BoxInsetLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    android:layout_width="match_parent"
     android:layout_height="match_parent">
-
-    <TextView
+    <ScrollView app:layout_box="all"
         android:layout_width="match_parent"
-        android:layout_height="wrap_content"
-        android:text="@string/js_test_description"
-        android:layout_margin="@dimen/js_padding"/>
-    <TextView
-        android:layout_width="match_parent"
-        android:layout_height="wrap_content"
-        android:text="@string/js_idle_description_1"
-        android:layout_margin="@dimen/js_padding"
-        android:textStyle="bold"/>
+        android:layout_height="match_parent">
+        <LinearLayout
+            android:orientation="vertical"
+            android:layout_width="match_parent"
+            android:layout_height="match_parent">
+            <TextView
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:text="@string/js_test_description"
+                android:layout_margin="@dimen/js_padding"/>
+            <TextView
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:text="@string/js_idle_description_1"
+                android:layout_margin="@dimen/js_padding"
+                android:textStyle="bold"/>
 
-    <Button
-        android:id="@+id/js_idle_start_test_button"
-        android:layout_width="wrap_content"
-        android:layout_height="wrap_content"
-        android:layout_gravity="center"
-        android:text="@string/js_start_test_text"
-        android:onClick="startTest"
-        android:enabled="false"/>
+            <Button
+                android:id="@+id/js_idle_start_test_button"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:layout_gravity="center"
+                android:text="@string/js_start_test_text"
+                android:onClick="startTest"
+                android:enabled="false"/>
 
-    <LinearLayout
-        android:layout_width="wrap_content"
-        android:layout_height="wrap_content"
-        android:layout_marginTop="@dimen/js_padding"
-        android:layout_marginBottom="@dimen/js_padding">
-        <ImageView
-            android:id="@+id/idle_off_test_image"
-            android:layout_width="wrap_content"
-            android:layout_height="wrap_content"
-            android:src="@drawable/fs_indeterminate"
-            android:layout_marginRight="@dimen/js_padding"/>
-        <TextView
-            android:layout_width="wrap_content"
-            android:layout_height="wrap_content"
-            android:text="@string/js_idle_item_idle_off"
-            android:textSize="16dp"/>
-    </LinearLayout>
-    <LinearLayout
-        android:layout_width="wrap_content"
-        android:layout_height="wrap_content"
-        android:layout_marginTop="@dimen/js_padding"
-        android:layout_marginBottom="@dimen/js_padding">
-        <ImageView
-            android:id="@+id/idle_on_test_image"
-            android:layout_width="wrap_content"
-            android:layout_height="wrap_content"
-            android:src="@drawable/fs_indeterminate"
-            android:layout_marginRight="@dimen/js_padding"/>
-        <TextView
-            android:layout_width="wrap_content"
-            android:layout_height="wrap_content"
-            android:text="@string/js_idle_item_idle_on"
-            android:textSize="16dp"/>
-    </LinearLayout>
-    <include layout="@layout/pass_fail_buttons" />
-</LinearLayout>
\ No newline at end of file
+            <LinearLayout
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:layout_marginTop="@dimen/js_padding"
+                android:layout_marginBottom="@dimen/js_padding">
+                <ImageView
+                    android:id="@+id/idle_off_test_image"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:src="@drawable/fs_indeterminate"
+                    android:layout_marginRight="@dimen/js_padding"/>
+                <TextView
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:text="@string/js_idle_item_idle_off"
+                    android:textSize="16dp"/>
+            </LinearLayout>
+            <LinearLayout
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:layout_marginTop="@dimen/js_padding"
+                android:layout_marginBottom="@dimen/js_padding">
+                <ImageView
+                    android:id="@+id/idle_on_test_image"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:src="@drawable/fs_indeterminate"
+                    android:layout_marginRight="@dimen/js_padding"/>
+                <TextView
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:text="@string/js_idle_item_idle_on"
+                    android:textSize="16dp"/>
+            </LinearLayout>
+            <include layout="@layout/pass_fail_buttons" />
+        </LinearLayout>
+    </ScrollView>
+</android.support.wearable.view.BoxInsetLayout>
diff --git a/apps/CtsVerifier/res/layout/location_mode_main.xml b/apps/CtsVerifier/res/layout/location_mode_main.xml
index fde6aba..1768434 100644
--- a/apps/CtsVerifier/res/layout/location_mode_main.xml
+++ b/apps/CtsVerifier/res/layout/location_mode_main.xml
@@ -14,32 +14,37 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+<android.support.wearable.view.BoxInsetLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
     android:layout_width="match_parent"
-    android:layout_height="match_parent"
-    android:orientation="vertical"
-    android:padding="10dip" >
-
-    <ScrollView
-        android:id="@+id/test_scroller"
+    android:layout_height="match_parent">
+    <LinearLayout app:layout_box="all"
         android:layout_width="match_parent"
-        android:layout_height="0dp"
-        android:layout_weight="1"
+        android:layout_height="match_parent"
         android:orientation="vertical"
         android:padding="10dip" >
 
-        <LinearLayout
-            android:id="@+id/test_items"
+        <ScrollView
+            android:id="@+id/test_scroller"
+            android:layout_width="match_parent"
+            android:layout_height="0dp"
+            android:layout_weight="1"
+            android:orientation="vertical"
+            android:padding="10dip" >
+
+            <LinearLayout
+                android:id="@+id/test_items"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:orientation="vertical" >
+            </LinearLayout>
+        </ScrollView>
+
+        <include
             android:layout_width="match_parent"
             android:layout_height="wrap_content"
-            android:orientation="vertical" >
-        </LinearLayout>
-    </ScrollView>
+            android:layout_weight="0"
+            layout="@layout/pass_fail_buttons" />
 
-    <include
-        android:layout_width="match_parent"
-        android:layout_height="wrap_content"
-        android:layout_weight="0"
-        layout="@layout/pass_fail_buttons" />
-
-</LinearLayout>
+    </LinearLayout>
+</android.support.wearable.view.BoxInsetLayout>
diff --git a/apps/CtsVerifier/res/layout/pa_main.xml b/apps/CtsVerifier/res/layout/pa_main.xml
index 76cb7d4..832af71 100644
--- a/apps/CtsVerifier/res/layout/pa_main.xml
+++ b/apps/CtsVerifier/res/layout/pa_main.xml
@@ -13,19 +13,24 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
+<android.support.wearable.view.BoxInsetLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
     android:layout_width="match_parent"
-    android:layout_height="match_parent" >
-
-    <include
-        android:id="@+id/pass_fail_buttons"
-        android:layout_gravity="top"
-        layout="@layout/pass_fail_buttons" />
-
-    <TextureView
-        android:id="@+id/texture_view"
+    android:layout_height="match_parent">
+    <RelativeLayout app:layout_box="all"
         android:layout_width="match_parent"
-        android:layout_height="match_parent"
-        android:layout_below="@id/pass_fail_buttons" />
+        android:layout_height="match_parent" >
 
-</RelativeLayout>
+        <include
+            android:id="@+id/pass_fail_buttons"
+            android:layout_gravity="top"
+            layout="@layout/pass_fail_buttons" />
+
+        <TextureView
+            android:id="@+id/texture_view"
+            android:layout_width="match_parent"
+            android:layout_height="match_parent"
+            android:layout_below="@id/pass_fail_buttons" />
+
+    </RelativeLayout>
+</android.support.wearable.view.BoxInsetLayout>
diff --git a/apps/CtsVerifier/res/layout/pass_fail_buttons.xml b/apps/CtsVerifier/res/layout/pass_fail_buttons.xml
index 5eec539..b269dcd 100644
--- a/apps/CtsVerifier/res/layout/pass_fail_buttons.xml
+++ b/apps/CtsVerifier/res/layout/pass_fail_buttons.xml
@@ -13,28 +13,31 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" 
-        android:orientation="horizontal"
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
         android:layout_width="match_parent"
-        android:layout_height="wrap_content">
+        android:layout_height="wrap_content"
+        android:orientation="horizontal">
 
     <ImageButton android:id="@+id/pass_button"
             android:layout_width="wrap_content"
             android:layout_height="wrap_content"
-            android:layout_weight="1"            
+            android:layout_weight="1"
+            android:contentDescription="@string/pass_button_text"
             android:src="@drawable/fs_good"/>
-            
+
     <ImageButton android:id="@+id/info_button"
             android:layout_width="wrap_content"
             android:layout_height="wrap_content"
             android:layout_weight="1"
+            android:contentDescription="@string/info_button_text"
             android:src="@drawable/fs_indeterminate"
             android:visibility="gone"/>
 
     <ImageButton android:id="@+id/fail_button"
             android:layout_width="wrap_content"
             android:layout_height="wrap_content"
-            android:layout_weight="1"            
+            android:layout_weight="1"
+            android:contentDescription="@string/fail_button_text"
             android:src="@drawable/fs_error"/>
-            
+
 </LinearLayout>
diff --git a/apps/CtsVerifier/res/layout/pass_fail_list.xml b/apps/CtsVerifier/res/layout/pass_fail_list.xml
index 0b247f4..cdd40e1 100644
--- a/apps/CtsVerifier/res/layout/pass_fail_list.xml
+++ b/apps/CtsVerifier/res/layout/pass_fail_list.xml
@@ -13,24 +13,30 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
-        android:orientation="vertical"
-        android:layout_width="match_parent"
-        android:layout_height="match_parent"
-        >
+<android.support.wearable.view.BoxInsetLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent">
 
-    <ListView android:id="@id/android:list"
+    <LinearLayout app:layout_box="all"
+            android:orientation="vertical"
             android:layout_width="match_parent"
             android:layout_height="match_parent"
-            android:layout_weight="1"
-            />
+            >
 
-    <TextView android:id="@id/android:empty"
-            android:layout_width="match_parent"
-            android:layout_height="match_parent"
-            android:layout_weight="1"
-            />
+        <ListView android:id="@id/android:list"
+                android:layout_width="match_parent"
+                android:layout_height="match_parent"
+                android:layout_weight="1"
+                />
 
-    <include layout="@layout/pass_fail_buttons" />
+        <TextView android:id="@id/android:empty"
+                android:layout_width="match_parent"
+                android:layout_height="match_parent"
+                android:layout_weight="1"
+                />
 
-</LinearLayout>
+        <include layout="@layout/pass_fail_buttons" />
+
+    </LinearLayout>
+</android.support.wearable.view.BoxInsetLayout>
diff --git a/apps/CtsVerifier/res/layout/poa_main.xml b/apps/CtsVerifier/res/layout/poa_main.xml
index 578a6a6..41bade0 100644
--- a/apps/CtsVerifier/res/layout/poa_main.xml
+++ b/apps/CtsVerifier/res/layout/poa_main.xml
@@ -13,17 +13,22 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
-    android:layout_width="fill_parent"
-    android:layout_height="fill_parent"
-    android:orientation="vertical" >
+<android.support.wearable.view.BoxInsetLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent">
+    <LinearLayout app:layout_box="all"
+        android:layout_width="fill_parent"
+        android:layout_height="fill_parent"
+        android:orientation="vertical" >
 
-    <include layout="@layout/pass_fail_buttons" />
+        <include layout="@layout/pass_fail_buttons" />
 
-    <TextView
-        android:id="@+id/poa_status_text"
-        android:layout_width="match_parent"
-        android:layout_height="match_parent"
-        android:textAppearance="@style/InstructionsFont" />
+        <TextView
+            android:id="@+id/poa_status_text"
+            android:layout_width="match_parent"
+            android:layout_height="match_parent"
+            android:textAppearance="@style/InstructionsFont" />
 
-</LinearLayout>
\ No newline at end of file
+    </LinearLayout>
+</android.support.wearable.view.BoxInsetLayout>
diff --git a/apps/CtsVerifier/res/layout/pwa_widgets.xml b/apps/CtsVerifier/res/layout/pwa_widgets.xml
index 4bfcec6..537fc32 100644
--- a/apps/CtsVerifier/res/layout/pwa_widgets.xml
+++ b/apps/CtsVerifier/res/layout/pwa_widgets.xml
@@ -13,16 +13,19 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
+<android.support.wearable.view.BoxInsetLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
     android:layout_width="match_parent"
-    android:layout_height="match_parent" >
+    android:layout_height="match_parent">
 
      <TextureView
+         app:layout_box="all"
          android:id="@+id/texture_view"
          android:layout_width="match_parent"
          android:layout_height="match_parent" />
 
      <LinearLayout
+         app:layout_box="all"
          android:layout_width="fill_parent"
          android:layout_height="wrap_content"
          android:orientation="vertical" >
@@ -70,4 +73,4 @@
          </LinearLayout>
      </LinearLayout>
 
-</FrameLayout>
+</android.support.wearable.view.BoxInsetLayout>
diff --git a/apps/CtsVerifier/res/layout/test_list_footer.xml b/apps/CtsVerifier/res/layout/test_list_footer.xml
new file mode 100644
index 0000000..fdb8e43
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/test_list_footer.xml
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+<!--
+  ~ Copyright (C) 2014 The Android Open Source Project
+  ~
+  ~ Licensed under the Apache License, Version 2.0 (the "License");
+  ~ you may not use this file except in compliance with the License.
+  ~ You may obtain a copy of the License at
+  ~
+  ~      http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License
+  -->
+<GridLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    android:orientation="horizontal"
+    android:layout_width="match_parent"
+    android:layout_height="wrap_content">
+
+    <Button
+        android:id="@+id/clear"
+        android:text="@string/clear"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content" />
+    <Button
+        android:id="@+id/view"
+        android:text="@string/view"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content" />
+    <Button
+        android:id="@+id/export"
+        android:text="@string/export"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content" />
+</GridLayout>
diff --git a/apps/CtsVerifier/res/values/strings.xml b/apps/CtsVerifier/res/values/strings.xml
index 3225bcf..e687087 100644
--- a/apps/CtsVerifier/res/values/strings.xml
+++ b/apps/CtsVerifier/res/values/strings.xml
@@ -217,6 +217,7 @@
     <string name="ble_waiting_notification">Waiting on notification</string>
     <string name="ble_read_rssi">Read RSSI</string>
     <string name="ble_disconnect">Disconnect</string>
+    <string name="ble_test_text">TEST</string>
 
     <!-- BLE server side strings -->
     <string name="ble_server_service_name">Bluetooth LE GATT Server Handler Service</string>
@@ -266,6 +267,8 @@
     <string name="ble_scanner_scan_filter_instruction">Scan filter is to scan data with service UUID = 0x6666 only. If you scan without scan filter, data with service UUID = 0x5555 and 0x6666 will show up on screen.\nFor monsoon test:\n\tClick scan with filter, lock the screen, connect to monsoon. It will not wake up when advertiser is advertising unscannable data packets, but will show a peak in power usage when advertiser is advertising scannable data.\nFor logcat test:\n\tClick scan with filter, logcat the scanner. No data will be received by GattService when advertiser is advertising unscannable data.</string>
     <string name="ble_scan_with_filter">Scan with filter</string>
     <string name="ble_scan_without_filter">Scan without filter</string>
+    <string name="ble_scan_start">Start scan</string>
+    <string name="ble_scan_stop">Stop scan</string>
 
     <!-- Strings for FeatureSummaryActivity -->
     <string name="feature_summary">Hardware/Software Feature Summary</string>
@@ -769,6 +772,30 @@
     <string name="usb_test_passed">Received all expected messages. Pass button enabled!</string>
     <string name="usb_file_descriptor_error">Could not open file descriptor for USB accessory... try reconnecting and restarting the accessory?</string>
 
+    <!-- Strings for the Camera ITS test activity -->
+    <string name="camera_its_test">Camera ITS Test</string>
+    <string name="camera_its_test_info">
+        1. Connect your Android device to a computer with adb installed via a USB cable.
+        \n\n2. Setup the CameraITS test environment by following the setup instructions in the
+        README file found in the CameraITS directory included in the CTS Verifier bundle
+        (cd CameraITS; source build/envsetup.sh;).
+        \n\n3. Setup the test scene described in the CameraITS README file, and aim the camera
+        at it.
+        \n\n4. Run the full ITS test suite on all possible camera Ids.
+        (cd CameraITS; python tools/run_all_tests.py camera=[cameraId]).  Once all
+        of the tests have been run, the \'PASS\' button will be enabled if all of the tests have
+        succeeded.  Please note that these tests can take 20+ minutes to run.
+    </string>
+    <string name="no_camera_manager">
+        No camera manager exists!  This test device is in a bad state.
+    </string>
+    <string name="all_legacy_devices">
+        All cameras on this device are LEGACY mode only - ITS tests will only be applied to LIMITED
+        or better devices.  \'PASS\' button enabled.
+    </string>
+    <string name="its_test_passed">All Camera ITS tests passed.  Pass button enabled!</string>
+    <string name="its_test_failed">Some Camera ITS tests failed.</string>
+
     <!-- Strings for StreamingVideoActivity -->
     <string name="streaming_video">Streaming Video Quality Verifier</string>
     <string name="streaming_video_info">This is a test for assessing the quality of streaming videos.  Play each stream and verify that the video is smooth and in sync with the audio, and that there are no quality problems.</string>
@@ -1282,7 +1309,7 @@
 
     <string name="js_charging_test">Charging Constraints</string>
     <string name="js_charging_instructions">Verify the behaviour of the JobScheduler API for when the device is on power and unplugged from power. Simply follow the on-screen instructions.</string>
-    <string name="js_charging_description_1">Unplug the phone in order to begin.</string>
+    <string name="js_charging_description_1">Unplug the device in order to begin.</string>
     <string name="js_charging_off_test">Device not charging will not execute a job with a charging constraint.</string>
     <string name="js_charging_on_test">Device when charging will execute a job with a charging constraint.</string>
     <string name="js_charging_description_2">After the above test has passed, plug the device back in to continue. If the above failed, you can simply fail this test.</string>
diff --git a/apps/CtsVerifier/src/android/support/wearable/view/BoxInsetLayout.java b/apps/CtsVerifier/src/android/support/wearable/view/BoxInsetLayout.java
index 95bac11..81e6dd0 100644
--- a/apps/CtsVerifier/src/android/support/wearable/view/BoxInsetLayout.java
+++ b/apps/CtsVerifier/src/android/support/wearable/view/BoxInsetLayout.java
@@ -18,6 +18,8 @@
 
 import com.android.cts.verifier.R;
 
+import android.annotation.TargetApi;
+import android.os.Build;
 import android.content.Context;
 import android.content.res.TypedArray;
 import android.graphics.Rect;
@@ -38,6 +40,7 @@
  * The {@code layout_box} attribute is ignored on a device with a rectangular
  * screen.
  */
+@TargetApi(Build.VERSION_CODES.KITKAT_WATCH)
 public class BoxInsetLayout extends FrameLayout {
 
     private static float FACTOR = 0.146467f; //(1 - sqrt(2)/2)/2
@@ -187,9 +190,9 @@
             int totalMargin = 0;
             // BoxInset is a padding. Ignore margin when we want to do BoxInset.
             if (mLastKnownRound && ((lp.boxedEdges & LayoutParams.BOX_LEFT) != 0)) {
-                totalPadding = boxInset;
+                totalPadding += boxInset;
             } else {
-                totalMargin = plwf + lp.leftMargin;
+                totalMargin += plwf + lp.leftMargin;
             }
             if (mLastKnownRound && ((lp.boxedEdges & LayoutParams.BOX_RIGHT) != 0)) {
                 totalPadding += boxInset;
@@ -206,10 +209,12 @@
             }
 
             // adjust height
+            totalPadding = 0;
+            totalMargin = 0;
             if (mLastKnownRound && ((lp.boxedEdges & LayoutParams.BOX_TOP) != 0)) {
-                totalPadding = boxInset;
+                totalPadding += boxInset;
             } else {
-                totalMargin = ptwf + lp.topMargin;
+                totalMargin += ptwf + lp.topMargin;
             }
             if (mLastKnownRound && ((lp.boxedEdges & LayoutParams.BOX_BOTTOM) != 0)) {
                 totalPadding += boxInset;
@@ -236,7 +241,7 @@
     }
 
     private void layoutBoxChildren(int left, int top, int right, int bottom,
-            boolean forceLeftGravity) {
+                                  boolean forceLeftGravity) {
         final int count = getChildCount();
         int boxInset = (int)(FACTOR * Math.max(right - left, bottom - top));
 
@@ -272,55 +277,79 @@
                 int paddingTop = child.getPaddingTop();
                 int paddingBottom = child.getPaddingBottom();
 
-                switch (absoluteGravity & Gravity.HORIZONTAL_GRAVITY_MASK) {
-                    case Gravity.CENTER_HORIZONTAL:
-                        childLeft = parentLeft + (parentRight - parentLeft - width) / 2 +
-                                lp.leftMargin - lp.rightMargin;
-                        break;
-                    case Gravity.RIGHT:
-                        if (!forceLeftGravity) {
-                            if (mLastKnownRound
-                                    && ((lp.boxedEdges & LayoutParams.BOX_RIGHT) != 0)) {
-                                paddingRight = boxInset;
-                                childLeft = right - left - width;
-                            } else {
-                                childLeft = parentRight - width - lp.rightMargin;
-                            }
+                // If the child's width is match_parent, we ignore gravity and set boxInset padding
+                // on both sides, with a left position of 0.
+                if (lp.width == LayoutParams.MATCH_PARENT) {
+                    if (mLastKnownRound && ((lp.boxedEdges & LayoutParams.BOX_LEFT) != 0)) {
+                        paddingLeft = boxInset;
+                    }
+                    if (mLastKnownRound && ((lp.boxedEdges & LayoutParams.BOX_RIGHT) != 0)) {
+                        paddingRight = boxInset;
+                    }
+                    childLeft = 0;
+                } else {
+                    switch (absoluteGravity & Gravity.HORIZONTAL_GRAVITY_MASK) {
+                        case Gravity.CENTER_HORIZONTAL:
+                            childLeft = parentLeft + (parentRight - parentLeft - width) / 2 +
+                                    lp.leftMargin - lp.rightMargin;
                             break;
-                        }
-                    case Gravity.LEFT:
-                    default:
-                        if (mLastKnownRound && ((lp.boxedEdges & LayoutParams.BOX_LEFT) != 0)) {
-                            paddingLeft = boxInset;
-                            childLeft = 0;
-                        } else {
-                            childLeft = parentLeft + lp.leftMargin;
-                        }
+                        case Gravity.RIGHT:
+                            if (!forceLeftGravity) {
+                                if (mLastKnownRound
+                                        && ((lp.boxedEdges & LayoutParams.BOX_RIGHT) != 0)) {
+                                    paddingRight = boxInset;
+                                    childLeft = right - left - width;
+                                } else {
+                                    childLeft = parentRight - width - lp.rightMargin;
+                                }
+                                break;
+                            }
+                        case Gravity.LEFT:
+                        default:
+                            if (mLastKnownRound && ((lp.boxedEdges & LayoutParams.BOX_LEFT) != 0)) {
+                                paddingLeft = boxInset;
+                                childLeft = 0;
+                            } else {
+                                childLeft = parentLeft + lp.leftMargin;
+                            }
+                    }
                 }
 
-                switch (verticalGravity) {
-                    case Gravity.TOP:
-                        if (mLastKnownRound && ((lp.boxedEdges & LayoutParams.BOX_TOP) != 0)) {
-                            paddingTop = boxInset;
-                            childTop = 0;
-                        } else {
+                // If the child's height is match_parent, we ignore gravity and set boxInset padding
+                // on both top and bottom, with a top position of 0.
+                if (lp.height == LayoutParams.MATCH_PARENT) {
+                    if (mLastKnownRound && ((lp.boxedEdges & LayoutParams.BOX_TOP) != 0)) {
+                        paddingTop = boxInset;
+                    }
+                    if (mLastKnownRound && ((lp.boxedEdges & LayoutParams.BOX_BOTTOM) != 0)) {
+                        paddingBottom = boxInset;
+                    }
+                    childTop = 0;
+                } else {
+                    switch (verticalGravity) {
+                        case Gravity.TOP:
+                            if (mLastKnownRound && ((lp.boxedEdges & LayoutParams.BOX_TOP) != 0)) {
+                                paddingTop = boxInset;
+                                childTop = 0;
+                            } else {
+                                childTop = parentTop + lp.topMargin;
+                            }
+                            break;
+                        case Gravity.CENTER_VERTICAL:
+                            childTop = parentTop + (parentBottom - parentTop - height) / 2 +
+                                    lp.topMargin - lp.bottomMargin;
+                            break;
+                        case Gravity.BOTTOM:
+                            if (mLastKnownRound && ((lp.boxedEdges & LayoutParams.BOX_BOTTOM) != 0)) {
+                                paddingBottom = boxInset;
+                                childTop = bottom - top - height;
+                            } else {
+                                childTop = parentBottom - height - lp.bottomMargin;
+                            }
+                            break;
+                        default:
                             childTop = parentTop + lp.topMargin;
-                        }
-                        break;
-                    case Gravity.CENTER_VERTICAL:
-                        childTop = parentTop + (parentBottom - parentTop - height) / 2 +
-                                lp.topMargin - lp.bottomMargin;
-                        break;
-                    case Gravity.BOTTOM:
-                        if (mLastKnownRound && ((lp.boxedEdges & LayoutParams.BOX_BOTTOM) != 0)) {
-                            paddingBottom = boxInset;
-                            childTop = bottom - top - height;
-                        } else {
-                            childTop = parentBottom - height - lp.bottomMargin;
-                        }
-                        break;
-                    default:
-                        childTop = parentTop + lp.topMargin;
+                    }
                 }
 
                 child.setPadding(paddingLeft, paddingTop, paddingRight, paddingBottom);
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/PassFailButtons.java b/apps/CtsVerifier/src/com/android/cts/verifier/PassFailButtons.java
index 444a250..ab119bd 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/PassFailButtons.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/PassFailButtons.java
@@ -32,6 +32,7 @@
 import android.view.View;
 import android.view.View.OnClickListener;
 import android.widget.ImageButton;
+import android.widget.Toast;
 
 /**
  * {@link Activity}s to handle clicks to the pass and fail buttons of the pass fail buttons layout.
@@ -242,8 +243,25 @@
             }
         };
 
-        activity.findViewById(R.id.pass_button).setOnClickListener(clickListener);
-        activity.findViewById(R.id.fail_button).setOnClickListener(clickListener);
+        View passButton = activity.findViewById(R.id.pass_button);
+        passButton.setOnClickListener(clickListener);
+        passButton.setOnLongClickListener(new View.OnLongClickListener() {
+            @Override
+            public boolean onLongClick(View view) {
+                Toast.makeText(activity, R.string.pass_button_text, Toast.LENGTH_SHORT).show();
+                return true;
+            }
+        });
+
+        View failButton = activity.findViewById(R.id.fail_button);
+        failButton.setOnClickListener(clickListener);
+        failButton.setOnLongClickListener(new View.OnLongClickListener() {
+            @Override
+            public boolean onLongClick(View view) {
+                Toast.makeText(activity, R.string.fail_button_text, Toast.LENGTH_SHORT).show();
+                return true;
+            }
+        });
     }
 
     private static void setInfo(final android.app.Activity activity, final int titleId,
@@ -257,6 +275,13 @@
                 showInfoDialog(activity, titleId, messageId, viewId);
             }
         });
+        infoButton.setOnLongClickListener(new View.OnLongClickListener() {
+            @Override
+            public boolean onLongClick(View view) {
+                Toast.makeText(activity, R.string.info_button_text, Toast.LENGTH_SHORT).show();
+                return true;
+            }
+        });
 
         // Show the info dialog if the user has never seen it before.
         if (!hasSeenInfoDialog(activity)) {
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/TestListActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/TestListActivity.java
index 43d300a..8cfc6df 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/TestListActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/TestListActivity.java
@@ -23,19 +23,42 @@
 import android.view.Menu;
 import android.view.MenuInflater;
 import android.view.MenuItem;
+import android.view.View;
+import android.view.Window;
 import android.widget.Toast;
 
 import java.io.IOException;
 
 /** Top-level {@link ListActivity} for launching tests and managing results. */
-public class TestListActivity extends AbstractTestListActivity {
+public class TestListActivity extends AbstractTestListActivity implements View.OnClickListener {
 
     private static final String TAG = TestListActivity.class.getSimpleName();
 
     @Override
+    public void onClick (View v) {
+        handleMenuItemSelected(v.getId());
+    }
+
+    @Override
     protected void onCreate(Bundle savedInstanceState) {
         super.onCreate(savedInstanceState);
+
+        if (!isTaskRoot()) {
+            finish();
+        }
+
         setTitle(getString(R.string.title_version, Version.getVersionName(this)));
+
+        if (!getWindow().hasFeature(Window.FEATURE_ACTION_BAR)) {
+            View footer = getLayoutInflater().inflate(R.layout.test_list_footer, null);
+
+            footer.findViewById(R.id.clear).setOnClickListener(this);
+            footer.findViewById(R.id.view).setOnClickListener(this);
+            footer.findViewById(R.id.export).setOnClickListener(this);
+
+            getListView().addFooterView(footer);
+        }
+
         setTestListAdapter(new ManifestTestListAdapter(this, null));
     }
 
@@ -48,22 +71,7 @@
 
     @Override
     public boolean onOptionsItemSelected(MenuItem item) {
-        switch (item.getItemId()) {
-            case R.id.clear:
-                handleClearItemSelected();
-                return true;
-
-            case R.id.view:
-                handleViewItemSelected();
-                return true;
-
-            case R.id.export:
-                handleExportItemSelected();
-                return true;
-
-            default:
-                return super.onOptionsItemSelected(item);
-        }
+        return handleMenuItemSelected(item.getItemId()) ? true : super.onOptionsItemSelected(item);
     }
 
     private void handleClearItemSelected() {
@@ -86,4 +94,23 @@
     private void handleExportItemSelected() {
         new ReportExporter(this, mAdapter).execute();
     }
+
+    private boolean handleMenuItemSelected(int id) {
+        switch (id) {
+            case R.id.clear:
+                handleClearItemSelected();
+                return true;
+
+            case R.id.view:
+                handleViewItemSelected();
+                return true;
+
+            case R.id.export:
+                handleExportItemSelected();
+                return true;
+
+            default:
+                return false;
+        }
+    }
 }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleClientConnectActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleClientConnectActivity.java
index fb351b1..4e1c268 100755
--- a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleClientConnectActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleClientConnectActivity.java
@@ -45,22 +45,25 @@
                          R.string.ble_client_send_connect_info, -1);
         getPassButton().setEnabled(false);
 
-        mEditText = (EditText) findViewById(R.id.ble_address);
-
-        ((Button) findViewById(R.id.ble_connect)).setOnClickListener(new OnClickListener() {
+        ((Button) findViewById(R.id.ble_scan_start)).setOnClickListener(new OnClickListener() {
             @Override
             public void onClick(View v) {
-                String address = mEditText.getText().toString();
-                if (!BluetoothAdapter.checkBluetoothAddress(address)) {
-                    showMessage("Invalid bluetooth address.");
-                } else {
-                    Intent intent = new Intent(BleClientConnectActivity.this,
-                                               BleClientService.class);
-                    intent.putExtra(BleClientService.EXTRA_COMMAND,
-                                    BleClientService.COMMAND_CONNECT);
-                    intent.putExtra(BluetoothDevice.EXTRA_DEVICE, address);
-                    startService(intent);
-                }
+                Intent intent = new Intent(BleClientConnectActivity.this,
+                        BleClientService.class);
+                intent.putExtra(BleClientService.EXTRA_COMMAND,
+                        BleClientService.COMMAND_SCAN_START);
+                startService(intent);
+            }
+        });
+
+        ((Button) findViewById(R.id.ble_scan_stop)).setOnClickListener(new OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                Intent intent = new Intent(BleClientConnectActivity.this,
+                        BleClientService.class);
+                intent.putExtra(BleClientService.EXTRA_COMMAND,
+                        BleClientService.COMMAND_SCAN_STOP);
+                startService(intent);
             }
         });
 
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleClientService.java b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleClientService.java
index 556ad06..6765362 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleClientService.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleClientService.java
@@ -16,6 +16,7 @@
 
 package com.android.cts.verifier.bluetooth;
 
+import java.util.Arrays;
 import java.util.UUID;
 import java.util.List;
 
@@ -29,10 +30,16 @@
 import android.bluetooth.BluetoothGattService;
 import android.bluetooth.BluetoothManager;
 import android.bluetooth.BluetoothProfile;
+import android.bluetooth.le.BluetoothLeScanner;
+import android.bluetooth.le.ScanCallback;
+import android.bluetooth.le.ScanFilter;
+import android.bluetooth.le.ScanResult;
+import android.bluetooth.le.ScanSettings;
 import android.content.Context;
 import android.content.Intent;
 import android.os.Handler;
 import android.os.IBinder;
+import android.os.ParcelUuid;
 import android.util.Log;
 import android.widget.Toast;
 
@@ -53,6 +60,8 @@
     public static final int COMMAND_BEGIN_WRITE = 9;
     public static final int COMMAND_EXECUTE_WRITE = 10;
     public static final int COMMAND_ABORT_RELIABLE = 11;
+    public static final int COMMAND_SCAN_START = 12;
+    public static final int COMMAND_SCAN_STOP = 13;
 
     public static final String BLE_BLUETOOTH_CONNECTED =
             "com.android.cts.verifier.bluetooth.BLE_BLUETOOTH_CONNECTED";
@@ -102,6 +111,8 @@
     private BluetoothDevice mDevice;
     private BluetoothGatt mBluetoothGatt;
     private Handler mHandler;
+    private Context mContext;
+    private BluetoothLeScanner mScanner;
 
     @Override
     public void onCreate() {
@@ -110,6 +121,8 @@
         mBluetoothManager = (BluetoothManager) getSystemService(Context.BLUETOOTH_SERVICE);
         mBluetoothAdapter = mBluetoothManager.getAdapter();
         mHandler = new Handler();
+        mContext = this;
+        mScanner = mBluetoothAdapter.getBluetoothLeScanner();
     }
 
     @Override
@@ -128,6 +141,7 @@
         super.onDestroy();
         mBluetoothGatt.disconnect();
         mBluetoothGatt.close();
+        stopScan();
     }
 
     private void handleIntent(Intent intent) {
@@ -177,6 +191,12 @@
             case COMMAND_ABORT_RELIABLE:
                 if (mBluetoothGatt != null) mBluetoothGatt.abortReliableWrite(mDevice);
                 break;
+            case COMMAND_SCAN_START:
+                startScan();
+                break;
+            case COMMAND_SCAN_STOP:
+                stopScan();
+                break;
             default:
                 showMessage("Unrecognized command: " + command);
                 break;
@@ -343,8 +363,8 @@
         @Override
         public void onCharacteristicWrite(BluetoothGatt gatt,
                                           BluetoothGattCharacteristic characteristic, int status) {
-            if (DEBUG) Log.d(TAG, "onCharacteristicWrite: characteristic.val=" + characteristic.getStringValue(0)
-                                  + " status=" + status);
+            if (DEBUG) Log.d(TAG, "onCharacteristicWrite: characteristic.val="
+                    + characteristic.getStringValue(0) + " status=" + status);
             BluetoothGattCharacteristic mCharacteristic = getCharacteristic(CHARACTERISTIC_UUID);
             if ((status == BluetoothGatt.GATT_SUCCESS) &&
                 (characteristic.getStringValue(0).equals(mCharacteristic.getStringValue(0)))) {
@@ -387,4 +407,25 @@
             if (status == BluetoothGatt.GATT_SUCCESS) notifyReadRemoteRssi(rssi);
         }
     };
-}
\ No newline at end of file
+
+    private final ScanCallback mScanCallback = new ScanCallback() {
+        @Override
+        public void onScanResult(int callbackType, ScanResult result) {
+            mBluetoothGatt = result.getDevice().connectGatt(mContext, false, mGattCallbacks);
+        }
+    };
+
+    private void startScan() {
+        if (DEBUG) Log.d(TAG, "startScan");
+        List<ScanFilter> filter = Arrays.asList(new ScanFilter.Builder().setServiceUuid(
+                new ParcelUuid(BleServerService.ADV_SERVICE_UUID)).build());
+        ScanSettings setting = new ScanSettings.Builder()
+                .setScanMode(ScanSettings.SCAN_MODE_LOW_POWER).build();
+        mScanner.startScan(filter, setting, mScanCallback);
+    }
+
+    private void stopScan() {
+        if (DEBUG) Log.d(TAG, "stopScan");
+        mScanner.stopScan(mScanCallback);
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleReadWriteActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleReadWriteActivity.java
index 22233ef..8041ce0 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleReadWriteActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleReadWriteActivity.java
@@ -124,4 +124,4 @@
             }
         }
     };
-}
\ No newline at end of file
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleReliableWriteActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleReliableWriteActivity.java
index c7460b5..9b65bb4 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleReliableWriteActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleReliableWriteActivity.java
@@ -114,4 +114,4 @@
             }
         }
     };
-}
\ No newline at end of file
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleServerService.java b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleServerService.java
index 91b3a6c..8718f57 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleServerService.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleServerService.java
@@ -33,10 +33,15 @@
 import android.bluetooth.BluetoothGattService;
 import android.bluetooth.BluetoothManager;
 import android.bluetooth.BluetoothProfile;
+import android.bluetooth.le.AdvertiseCallback;
+import android.bluetooth.le.AdvertiseData;
+import android.bluetooth.le.AdvertiseSettings;
+import android.bluetooth.le.BluetoothLeAdvertiser;
 import android.content.Context;
 import android.content.Intent;
 import android.os.Handler;
 import android.os.IBinder;
+import android.os.ParcelUuid;
 import android.util.Log;
 import android.widget.Toast;
 
@@ -76,6 +81,8 @@
             UUID.fromString("00009997-0000-1000-8000-00805f9b34fb");
     private static final UUID DESCRIPTOR_UUID =
             UUID.fromString("00009996-0000-1000-8000-00805f9b34fb");
+    public static final UUID ADV_SERVICE_UUID=
+            UUID.fromString("00003333-0000-1000-8000-00805f9b34fb");
 
     private BluetoothManager mBluetoothManager;
     private BluetoothGattServer mGattServer;
@@ -84,12 +91,14 @@
     private Timer mNotificationTimer;
     private Handler mHandler;
     private String mReliableWriteValue;
+    private BluetoothLeAdvertiser mAdvertiser;
 
     @Override
     public void onCreate() {
         super.onCreate();
 
         mBluetoothManager = (BluetoothManager) getSystemService(Context.BLUETOOTH_SERVICE);
+        mAdvertiser = mBluetoothManager.getAdapter().getBluetoothLeAdvertiser();
         mGattServer = mBluetoothManager.openGattServer(this, mCallbacks);
         mService = createService();
         if (mGattServer != null) {
@@ -106,6 +115,7 @@
 
     @Override
     public int onStartCommand(Intent intent, int flags, int startId) {
+        startAdvertise();
         return START_NOT_STICKY;
     }
 
@@ -117,6 +127,7 @@
     @Override
     public void onDestroy() {
         super.onDestroy();
+        stopAdvertise();
         if (mGattServer == null) {
            return;
         }
@@ -366,5 +377,26 @@
             }
         }
     };
+
+    private void startAdvertise() {
+        if (DEBUG) Log.d(TAG, "startAdvertise");
+        AdvertiseData data = new AdvertiseData.Builder()
+            .addServiceData(new ParcelUuid(ADV_SERVICE_UUID), new byte[]{1,2,3})
+            .addServiceUuid(new ParcelUuid(ADV_SERVICE_UUID))
+            .build();
+        AdvertiseSettings setting = new AdvertiseSettings.Builder()
+            .setAdvertiseMode(AdvertiseSettings.ADVERTISE_MODE_LOW_LATENCY)
+            .setTxPowerLevel(AdvertiseSettings.ADVERTISE_TX_POWER_MEDIUM)
+            .setConnectable(true)
+            .build();
+        mAdvertiser.startAdvertising(setting, data, mAdvertiseCallback);
+    }
+
+    private void stopAdvertise() {
+        if (DEBUG) Log.d(TAG, "stopAdvertise");
+        mAdvertiser.stopAdvertising(mAdvertiseCallback);
+    }
+
+    private final AdvertiseCallback mAdvertiseCallback = new AdvertiseCallback(){};
 }
 
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/DevicePickerActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/DevicePickerActivity.java
index be71f66..a5dea4b 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/DevicePickerActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/DevicePickerActivity.java
@@ -29,12 +29,12 @@
 import android.content.IntentFilter;
 import android.os.Bundle;
 import android.view.View;
-import android.view.Window;
 import android.view.View.OnClickListener;
 import android.widget.AdapterView;
 import android.widget.ArrayAdapter;
 import android.widget.Button;
 import android.widget.ListView;
+import android.widget.ProgressBar;
 import android.widget.TextView;
 import android.widget.AdapterView.OnItemClickListener;
 
@@ -61,12 +61,15 @@
 
     private TextView mEmptyNewView;
 
+    private ProgressBar mProgressBar;
+
     @Override
     protected void onCreate(Bundle savedInstanceState) {
         super.onCreate(savedInstanceState);
-        requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS);
         setContentView(R.layout.bt_device_picker);
 
+        mProgressBar = (ProgressBar) findViewById(R.id.bt_progress_bar);
+
         mPairedDevicesAdapter = new ArrayAdapter<Device>(this, R.layout.bt_device_name);
         ListView pairedDevicesListView = (ListView) findViewById(R.id.bt_paired_devices);
         pairedDevicesListView.setAdapter(mPairedDevicesAdapter);
@@ -182,10 +185,10 @@
         public void onReceive(Context context, Intent intent) {
             if (BluetoothAdapter.ACTION_DISCOVERY_STARTED.equals(intent.getAction())) {
                 mEmptyNewView.setText(R.string.bt_scanning);
-                setProgressBarIndeterminateVisibility(true);
+                mProgressBar.setVisibility(View.VISIBLE);
             } else if (BluetoothAdapter.ACTION_DISCOVERY_FINISHED.equals(intent.getAction())) {
                 mEmptyNewView.setText(R.string.bt_no_devices);
-                setProgressBarIndeterminateVisibility(false);
+                mProgressBar.setVisibility(View.INVISIBLE);
             } else if (BluetoothDevice.ACTION_FOUND.equals(intent.getAction())) {
                 BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE);
                 if (device.getBondState() != BluetoothDevice.BOND_BONDED) {
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/MessageTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/MessageTestActivity.java
index 2c6324b..4e0b78f 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/MessageTestActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/MessageTestActivity.java
@@ -33,10 +33,10 @@
 import android.os.Message;
 import android.view.View;
 import android.view.View.OnClickListener;
-import android.view.Window;
 import android.widget.ArrayAdapter;
 import android.widget.Button;
 import android.widget.ListView;
+import android.widget.ProgressBar;
 import android.widget.TextView;
 import android.widget.Toast;
 
@@ -71,6 +71,8 @@
 
     private AlertDialog mInstructionsDialog;
 
+    private ProgressBar mProgressBar;
+
     private String mDeviceAddress;
 
     private final boolean mSecure;
@@ -89,10 +91,11 @@
     @Override
     protected void onCreate(Bundle savedInstanceState) {
         super.onCreate(savedInstanceState);
-        requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS);
         setContentView(R.layout.bt_messages);
         setPassFailButtonClickListeners();
 
+        mProgressBar = (ProgressBar) findViewById(R.id.bt_progress_bar);
+
         if (mServer) {
             setTitle(mSecure ? R.string.bt_secure_server : R.string.bt_insecure_server);
         } else {
@@ -217,18 +220,18 @@
         switch (state) {
             case BluetoothChatService.STATE_LISTEN:
                 setEmptyViewText(R.string.bt_waiting);
-                setProgressBarIndeterminateVisibility(true);
+                mProgressBar.setVisibility(View.VISIBLE);
                 showInstructionsDialog();
                 break;
 
             case BluetoothChatService.STATE_CONNECTING:
                 setEmptyViewText(R.string.bt_connecting);
-                setProgressBarIndeterminateVisibility(true);
+                mProgressBar.setVisibility(View.VISIBLE);
                 break;
 
             case BluetoothChatService.STATE_CONNECTED:
                 setEmptyViewText(R.string.bt_no_messages);
-                setProgressBarIndeterminateVisibility(false);
+                mProgressBar.setVisibility(View.INVISIBLE);
 
                 hideInstructionsDialog();
                 sendInitialMessageFromClient();
@@ -236,7 +239,7 @@
 
             case BluetoothChatService.STATE_NONE:
                 setEmptyViewText(R.string.bt_no_messages);
-                setProgressBarIndeterminateVisibility(false);
+                mProgressBar.setVisibility(View.INVISIBLE);
                 break;
         }
     }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsException.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsException.java
new file mode 100644
index 0000000..d390bb1
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsException.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.camera.its;
+
+/**
+ * All exceptions are converted to ItsExceptions.
+ */
+class ItsException extends Exception {
+    public ItsException(Throwable cause) {
+        super(cause);
+    }
+
+    public ItsException(String message, Throwable cause) {
+        super(message, cause);
+    }
+
+    public ItsException(String message) {
+        super(message);
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsSerializer.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsSerializer.java
new file mode 100644
index 0000000..cf8365a
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsSerializer.java
@@ -0,0 +1,714 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.camera.its;
+
+import android.graphics.Point;
+import android.graphics.Rect;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.params.BlackLevelPattern;
+import android.hardware.camera2.params.ColorSpaceTransform;
+import android.hardware.camera2.params.Face;
+import android.hardware.camera2.params.LensShadingMap;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.hardware.camera2.params.RggbChannelVector;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.hardware.camera2.params.TonemapCurve;
+import android.location.Location;
+import android.util.Log;
+import android.util.Pair;
+import android.util.Rational;
+import android.util.Size;
+import android.util.SizeF;
+import android.util.Range;
+
+import org.json.JSONArray;
+import org.json.JSONObject;
+
+import java.lang.reflect.Array;
+import java.lang.reflect.Field;
+import java.lang.reflect.GenericArrayType;
+import java.lang.reflect.Modifier;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Class to deal with serializing and deserializing between JSON and Camera2 objects.
+ */
+public class ItsSerializer {
+    public static final String TAG = ItsSerializer.class.getSimpleName();
+
+    private static class MetadataEntry {
+        public MetadataEntry(String k, Object v) {
+            key = k;
+            value = v;
+        }
+        public String key;
+        public Object value;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeRational(Rational rat) throws org.json.JSONException {
+        JSONObject ratObj = new JSONObject();
+        ratObj.put("numerator", rat.getNumerator());
+        ratObj.put("denominator", rat.getDenominator());
+        return ratObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeSize(Size size) throws org.json.JSONException {
+        JSONObject sizeObj = new JSONObject();
+        sizeObj.put("width", size.getWidth());
+        sizeObj.put("height", size.getHeight());
+        return sizeObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeSizeF(SizeF size) throws org.json.JSONException {
+        JSONObject sizeObj = new JSONObject();
+        sizeObj.put("width", size.getWidth());
+        sizeObj.put("height", size.getHeight());
+        return sizeObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeRect(Rect rect) throws org.json.JSONException {
+        JSONObject rectObj = new JSONObject();
+        rectObj.put("left", rect.left);
+        rectObj.put("right", rect.right);
+        rectObj.put("top", rect.top);
+        rectObj.put("bottom", rect.bottom);
+        return rectObj;
+    }
+
+    private static Object serializePoint(Point point) throws org.json.JSONException {
+        JSONObject pointObj = new JSONObject();
+        pointObj.put("x", point.x);
+        pointObj.put("y", point.y);
+        return pointObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeFace(Face face)
+            throws org.json.JSONException {
+        JSONObject faceObj = new JSONObject();
+        faceObj.put("bounds", serializeRect(face.getBounds()));
+        faceObj.put("score", face.getScore());
+        faceObj.put("id", face.getId());
+        faceObj.put("leftEye", serializePoint(face.getLeftEyePosition()));
+        faceObj.put("rightEye", serializePoint(face.getRightEyePosition()));
+        faceObj.put("mouth", serializePoint(face.getMouthPosition()));
+        return faceObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeStreamConfigurationMap(
+            StreamConfigurationMap map)
+            throws org.json.JSONException {
+        // TODO: Serialize the rest of the StreamConfigurationMap fields.
+        JSONObject mapObj = new JSONObject();
+        JSONArray cfgArray = new JSONArray();
+        int fmts[] = map.getOutputFormats();
+        if (fmts != null) {
+            for (int fi = 0; fi < Array.getLength(fmts); fi++) {
+                Size sizes[] = map.getOutputSizes(fmts[fi]);
+                if (sizes != null) {
+                    for (int si = 0; si < Array.getLength(sizes); si++) {
+                        JSONObject obj = new JSONObject();
+                        obj.put("format", fmts[fi]);
+                        obj.put("width",sizes[si].getWidth());
+                        obj.put("height", sizes[si].getHeight());
+                        obj.put("input", false);
+                        obj.put("minFrameDuration",
+                                map.getOutputMinFrameDuration(fmts[fi],sizes[si]));
+                        cfgArray.put(obj);
+                    }
+                }
+            }
+        }
+        mapObj.put("availableStreamConfigurations", cfgArray);
+        return mapObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeMeteringRectangle(MeteringRectangle rect)
+            throws org.json.JSONException {
+        JSONObject rectObj = new JSONObject();
+        rectObj.put("x", rect.getX());
+        rectObj.put("y", rect.getY());
+        rectObj.put("width", rect.getWidth());
+        rectObj.put("height", rect.getHeight());
+        rectObj.put("weight", rect.getMeteringWeight());
+        return rectObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializePair(Pair pair)
+            throws org.json.JSONException {
+        JSONArray pairObj = new JSONArray();
+        pairObj.put(pair.first);
+        pairObj.put(pair.second);
+        return pairObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeRange(Range range)
+            throws org.json.JSONException {
+        JSONArray rangeObj = new JSONArray();
+        rangeObj.put(range.getLower());
+        rangeObj.put(range.getUpper());
+        return rangeObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeColorSpaceTransform(ColorSpaceTransform xform)
+            throws org.json.JSONException {
+        JSONArray xformObj = new JSONArray();
+        for (int row = 0; row < 3; row++) {
+            for (int col = 0; col < 3; col++) {
+                xformObj.put(serializeRational(xform.getElement(col,row)));
+            }
+        }
+        return xformObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeTonemapCurve(TonemapCurve curve)
+            throws org.json.JSONException {
+        JSONObject curveObj = new JSONObject();
+        String names[] = {"red", "green", "blue"};
+        for (int ch = 0; ch < 3; ch++) {
+            JSONArray curveArr = new JSONArray();
+            int len = curve.getPointCount(ch);
+            for (int i = 0; i < len; i++) {
+                curveArr.put(curve.getPoint(ch,i).x);
+                curveArr.put(curve.getPoint(ch,i).y);
+            }
+            curveObj.put(names[ch], curveArr);
+        }
+        return curveObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeRggbChannelVector(RggbChannelVector vec)
+            throws org.json.JSONException {
+        JSONArray vecObj = new JSONArray();
+        vecObj.put(vec.getRed());
+        vecObj.put(vec.getGreenEven());
+        vecObj.put(vec.getGreenOdd());
+        vecObj.put(vec.getBlue());
+        return vecObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeBlackLevelPattern(BlackLevelPattern pat)
+            throws org.json.JSONException {
+        int patVals[] = new int[4];
+        pat.copyTo(patVals, 0);
+        JSONArray patObj = new JSONArray();
+        patObj.put(patVals[0]);
+        patObj.put(patVals[1]);
+        patObj.put(patVals[2]);
+        patObj.put(patVals[3]);
+        return patObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeLocation(Location loc)
+            throws org.json.JSONException {
+        return loc.toString();
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeLensShadingMap(LensShadingMap map)
+            throws org.json.JSONException {
+        JSONArray mapObj = new JSONArray();
+        for (int row = 0; row < map.getRowCount(); row++) {
+            for (int col = 0; col < map.getColumnCount(); col++) {
+                for (int ch = 0; ch < 4; ch++) {
+                    mapObj.put(map.getGainFactor(ch, col, row));
+                }
+            }
+        }
+        return mapObj;
+    }
+
+    private static String getKeyName(Object keyObj) throws ItsException {
+        if (keyObj.getClass() == CaptureResult.Key.class
+                || keyObj.getClass() == TotalCaptureResult.class) {
+            return ((CaptureResult.Key)keyObj).getName();
+        } else if (keyObj.getClass() == CaptureRequest.Key.class) {
+            return ((CaptureRequest.Key)keyObj).getName();
+        } else if (keyObj.getClass() == CameraCharacteristics.Key.class) {
+            return ((CameraCharacteristics.Key)keyObj).getName();
+        }
+        throw new ItsException("Invalid key object");
+    }
+
+    private static Object getKeyValue(CameraMetadata md, Object keyObj) throws ItsException {
+        if (md.getClass() == CaptureResult.class || md.getClass() == TotalCaptureResult.class) {
+            return ((CaptureResult)md).get((CaptureResult.Key)keyObj);
+        } else if (md.getClass() == CaptureRequest.class) {
+            return ((CaptureRequest)md).get((CaptureRequest.Key)keyObj);
+        } else if (md.getClass() == CameraCharacteristics.class) {
+            return ((CameraCharacteristics)md).get((CameraCharacteristics.Key)keyObj);
+        }
+        throw new ItsException("Invalid key object");
+    }
+
+    @SuppressWarnings("unchecked")
+    private static MetadataEntry serializeEntry(Type keyType, Object keyObj, CameraMetadata md)
+            throws ItsException {
+        String keyName = getKeyName(keyObj);
+
+        try {
+            Object keyValue = getKeyValue(md, keyObj);
+            if (keyValue == null) {
+                return new MetadataEntry(keyName, JSONObject.NULL);
+            } else if (keyType == Float.class) {
+                // The JSON serializer doesn't handle floating point NaN or Inf.
+                if (((Float)keyValue).isInfinite() || ((Float)keyValue).isNaN()) {
+                    Logt.w(TAG, "Inf/NaN floating point value serialized: " + keyName);
+                    return null;
+                }
+                return new MetadataEntry(keyName, keyValue);
+            } else if (keyType == Integer.class || keyType == Long.class || keyType == Byte.class ||
+                       keyType == Boolean.class || keyType == String.class) {
+                return new MetadataEntry(keyName, keyValue);
+            } else if (keyType == Rational.class) {
+                return new MetadataEntry(keyName, serializeRational((Rational)keyValue));
+            } else if (keyType == Size.class) {
+                return new MetadataEntry(keyName, serializeSize((Size)keyValue));
+            } else if (keyType == SizeF.class) {
+                return new MetadataEntry(keyName, serializeSizeF((SizeF)keyValue));
+            } else if (keyType == Rect.class) {
+                return new MetadataEntry(keyName, serializeRect((Rect)keyValue));
+            } else if (keyType == Face.class) {
+                return new MetadataEntry(keyName, serializeFace((Face)keyValue));
+            } else if (keyType == StreamConfigurationMap.class) {
+                return new MetadataEntry(keyName,
+                        serializeStreamConfigurationMap((StreamConfigurationMap)keyValue));
+            } else if (keyType instanceof ParameterizedType &&
+                    ((ParameterizedType)keyType).getRawType() == Range.class) {
+                return new MetadataEntry(keyName, serializeRange((Range)keyValue));
+            } else if (keyType == ColorSpaceTransform.class) {
+                return new MetadataEntry(keyName,
+                        serializeColorSpaceTransform((ColorSpaceTransform)keyValue));
+            } else if (keyType == MeteringRectangle.class) {
+                return new MetadataEntry(keyName,
+                        serializeMeteringRectangle((MeteringRectangle)keyValue));
+            } else if (keyType == Location.class) {
+                return new MetadataEntry(keyName,
+                        serializeLocation((Location)keyValue));
+            } else if (keyType == RggbChannelVector.class) {
+                return new MetadataEntry(keyName,
+                        serializeRggbChannelVector((RggbChannelVector)keyValue));
+            } else if (keyType == BlackLevelPattern.class) {
+                return new MetadataEntry(keyName,
+                        serializeBlackLevelPattern((BlackLevelPattern)keyValue));
+            } else if (keyType == TonemapCurve.class) {
+                return new MetadataEntry(keyName,
+                        serializeTonemapCurve((TonemapCurve)keyValue));
+            } else if (keyType == Point.class) {
+                return new MetadataEntry(keyName,
+                        serializePoint((Point)keyValue));
+            } else if (keyType == LensShadingMap.class) {
+                return new MetadataEntry(keyName,
+                        serializeLensShadingMap((LensShadingMap)keyValue));
+            } else {
+                Logt.w(TAG, String.format("Serializing unsupported key type: " + keyType));
+                return null;
+            }
+        } catch (org.json.JSONException e) {
+            throw new ItsException("JSON error for key: " + keyName + ": ", e);
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    private static MetadataEntry serializeArrayEntry(Type keyType, Object keyObj, CameraMetadata md)
+            throws ItsException {
+        String keyName = getKeyName(keyObj);
+        try {
+            Object keyValue = getKeyValue(md, keyObj);
+            if (keyValue == null) {
+                return new MetadataEntry(keyName, JSONObject.NULL);
+            }
+            int arrayLen = Array.getLength(keyValue);
+            Type elmtType = ((GenericArrayType)keyType).getGenericComponentType();
+            if (elmtType == int.class  || elmtType == float.class || elmtType == byte.class ||
+                elmtType == long.class || elmtType == double.class || elmtType == boolean.class) {
+                return new MetadataEntry(keyName, new JSONArray(keyValue));
+            } else if (elmtType == Rational.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeRational((Rational)Array.get(keyValue,i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == Size.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeSize((Size)Array.get(keyValue,i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == Rect.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeRect((Rect)Array.get(keyValue,i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == Face.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeFace((Face)Array.get(keyValue, i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == StreamConfigurationMap.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeStreamConfigurationMap(
+                            (StreamConfigurationMap)Array.get(keyValue,i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType instanceof ParameterizedType &&
+                    ((ParameterizedType)elmtType).getRawType() == Range.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeRange((Range)Array.get(keyValue,i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType instanceof ParameterizedType &&
+                    ((ParameterizedType)elmtType).getRawType() == Pair.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializePair((Pair)Array.get(keyValue,i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == MeteringRectangle.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeMeteringRectangle(
+                            (MeteringRectangle)Array.get(keyValue,i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == Location.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeLocation((Location)Array.get(keyValue,i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == RggbChannelVector.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeRggbChannelVector(
+                            (RggbChannelVector)Array.get(keyValue,i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == BlackLevelPattern.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeBlackLevelPattern(
+                            (BlackLevelPattern)Array.get(keyValue,i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == Point.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializePoint((Point)Array.get(keyValue,i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else {
+                Logt.w(TAG, String.format("Serializing unsupported array type: " + elmtType));
+                return null;
+            }
+        } catch (org.json.JSONException e) {
+            throw new ItsException("JSON error for key: " + keyName + ": ", e);
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    public static JSONObject serialize(CameraMetadata md)
+            throws ItsException {
+        JSONObject jsonObj = new JSONObject();
+        Field[] allFields = md.getClass().getDeclaredFields();
+        if (md.getClass() == TotalCaptureResult.class) {
+            allFields = CaptureResult.class.getDeclaredFields();
+        }
+        for (Field field : allFields) {
+            if (Modifier.isPublic(field.getModifiers()) &&
+                    Modifier.isStatic(field.getModifiers()) &&
+                    (field.getType() == CaptureRequest.Key.class
+                      || field.getType() == CaptureResult.Key.class
+                      || field.getType() == TotalCaptureResult.Key.class
+                      || field.getType() == CameraCharacteristics.Key.class) &&
+                    field.getGenericType() instanceof ParameterizedType) {
+                ParameterizedType paramType = (ParameterizedType)field.getGenericType();
+                Type[] argTypes = paramType.getActualTypeArguments();
+                if (argTypes.length > 0) {
+                    try {
+                        Type keyType = argTypes[0];
+                        Object keyObj = field.get(md);
+                        MetadataEntry entry;
+                        if (keyType instanceof GenericArrayType) {
+                            entry = serializeArrayEntry(keyType, keyObj, md);
+                        } else {
+                            entry = serializeEntry(keyType, keyObj, md);
+                        }
+
+                        // TODO: Figure this weird case out.
+                        // There is a weird case where the entry is non-null but the toString
+                        // of the entry is null, and if this happens, the null-ness spreads like
+                        // a virus and makes the whole JSON object null from the top level down.
+                        // Not sure if it's a bug in the library or I'm just not using it right.
+                        // Workaround by checking for this case explicitly and not adding the
+                        // value to the jsonObj when it is detected.
+                        if (entry != null && entry.key != null && entry.value != null
+                                          && entry.value.toString() == null) {
+                            Logt.w(TAG, "Error encountered serializing value for key: " + entry.key);
+                        } else if (entry != null) {
+                            jsonObj.put(entry.key, entry.value);
+                        } else {
+                            // Ignore.
+                        }
+                    } catch (IllegalAccessException e) {
+                        throw new ItsException(
+                                "Access error for field: " + field + ": ", e);
+                    } catch (org.json.JSONException e) {
+                        throw new ItsException(
+                                "JSON error for field: " + field + ": ", e);
+                    }
+                }
+            }
+        }
+        return jsonObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    public static CaptureRequest.Builder deserialize(CaptureRequest.Builder mdDefault,
+            JSONObject jsonReq) throws ItsException {
+        try {
+            Logt.i(TAG, "Parsing JSON capture request ...");
+
+            // Iterate over the CaptureRequest reflected fields.
+            CaptureRequest.Builder md = mdDefault;
+            Field[] allFields = CaptureRequest.class.getDeclaredFields();
+            for (Field field : allFields) {
+                if (Modifier.isPublic(field.getModifiers()) &&
+                        Modifier.isStatic(field.getModifiers()) &&
+                        field.getType() == CaptureRequest.Key.class &&
+                        field.getGenericType() instanceof ParameterizedType) {
+                    ParameterizedType paramType = (ParameterizedType)field.getGenericType();
+                    Type[] argTypes = paramType.getActualTypeArguments();
+                    if (argTypes.length > 0) {
+                        CaptureRequest.Key key = (CaptureRequest.Key)field.get(md);
+                        String keyName = key.getName();
+                        Type keyType = argTypes[0];
+
+                        // For each reflected CaptureRequest entry, look inside the JSON object
+                        // to see if it is being set. If it is found, remove the key from the
+                        // JSON object. After this process, there should be no keys left in the
+                        // JSON (otherwise an invalid key was specified).
+
+                        if (jsonReq.has(keyName) && !jsonReq.isNull(keyName)) {
+                            if (keyType instanceof GenericArrayType) {
+                                Type elmtType =
+                                        ((GenericArrayType)keyType).getGenericComponentType();
+                                JSONArray ja = jsonReq.getJSONArray(keyName);
+                                Object val[] = new Object[ja.length()];
+                                for (int i = 0; i < ja.length(); i++) {
+                                    if (elmtType == int.class) {
+                                        Array.set(val, i, ja.getInt(i));
+                                    } else if (elmtType == byte.class) {
+                                        Array.set(val, i, (byte)ja.getInt(i));
+                                    } else if (elmtType == float.class) {
+                                        Array.set(val, i, (float)ja.getDouble(i));
+                                    } else if (elmtType == long.class) {
+                                        Array.set(val, i, ja.getLong(i));
+                                    } else if (elmtType == double.class) {
+                                        Array.set(val, i, ja.getDouble(i));
+                                    } else if (elmtType == boolean.class) {
+                                        Array.set(val, i, ja.getBoolean(i));
+                                    } else if (elmtType == String.class) {
+                                        Array.set(val, i, ja.getString(i));
+                                    } else if (elmtType == Size.class){
+                                        JSONObject obj = ja.getJSONObject(i);
+                                        Array.set(val, i, new Size(
+                                                obj.getInt("width"), obj.getInt("height")));
+                                    } else if (elmtType == Rect.class) {
+                                        JSONObject obj = ja.getJSONObject(i);
+                                        Array.set(val, i, new Rect(
+                                                obj.getInt("left"), obj.getInt("top"),
+                                                obj.getInt("bottom"), obj.getInt("right")));
+                                    } else if (elmtType == Rational.class) {
+                                        JSONObject obj = ja.getJSONObject(i);
+                                        Array.set(val, i, new Rational(
+                                                obj.getInt("numerator"),
+                                                obj.getInt("denominator")));
+                                    } else if (elmtType == RggbChannelVector.class) {
+                                        JSONArray arr = ja.getJSONArray(i);
+                                        Array.set(val, i, new RggbChannelVector(
+                                                (float)arr.getDouble(0),
+                                                (float)arr.getDouble(1),
+                                                (float)arr.getDouble(2),
+                                                (float)arr.getDouble(3)));
+                                    } else if (elmtType == ColorSpaceTransform.class) {
+                                        JSONArray arr = ja.getJSONArray(i);
+                                        Rational xform[] = new Rational[9];
+                                        for (int j = 0; j < 9; j++) {
+                                            xform[j] = new Rational(
+                                                    arr.getJSONObject(j).getInt("numerator"),
+                                                    arr.getJSONObject(j).getInt("denominator"));
+                                        }
+                                        Array.set(val, i, new ColorSpaceTransform(xform));
+                                    } else if (elmtType == MeteringRectangle.class) {
+                                        JSONObject obj = ja.getJSONObject(i);
+                                        Array.set(val, i, new MeteringRectangle(
+                                                obj.getInt("x"),
+                                                obj.getInt("y"),
+                                                obj.getInt("width"),
+                                                obj.getInt("height"),
+                                                obj.getInt("weight")));
+                                    } else {
+                                        throw new ItsException(
+                                                "Failed to parse key from JSON: " + keyName);
+                                    }
+                                }
+                                if (val != null) {
+                                    Logt.i(TAG, "Set: "+keyName+" -> "+Arrays.toString(val));
+                                    md.set(key, val);
+                                    jsonReq.remove(keyName);
+                                }
+                            } else {
+                                Object val = null;
+                                if (keyType == Integer.class) {
+                                    val = jsonReq.getInt(keyName);
+                                } else if (keyType == Byte.class) {
+                                    val = (byte)jsonReq.getInt(keyName);
+                                } else if (keyType == Double.class) {
+                                    val = jsonReq.getDouble(keyName);
+                                } else if (keyType == Long.class) {
+                                    val = jsonReq.getLong(keyName);
+                                } else if (keyType == Float.class) {
+                                    val = (float)jsonReq.getDouble(keyName);
+                                } else if (keyType == Boolean.class) {
+                                    val = jsonReq.getBoolean(keyName);
+                                } else if (keyType == String.class) {
+                                    val = jsonReq.getString(keyName);
+                                } else if (keyType == Size.class) {
+                                    JSONObject obj = jsonReq.getJSONObject(keyName);
+                                    val = new Size(
+                                            obj.getInt("width"), obj.getInt("height"));
+                                } else if (keyType == Rect.class) {
+                                    JSONObject obj = jsonReq.getJSONObject(keyName);
+                                    val = new Rect(
+                                            obj.getInt("left"), obj.getInt("top"),
+                                            obj.getInt("right"), obj.getInt("bottom"));
+                                } else if (keyType == Rational.class) {
+                                    JSONObject obj = jsonReq.getJSONObject(keyName);
+                                    val = new Rational(obj.getInt("numerator"),
+                                                       obj.getInt("denominator"));
+                                } else if (keyType == RggbChannelVector.class) {
+                                    JSONObject obj = jsonReq.optJSONObject(keyName);
+                                    JSONArray arr = jsonReq.optJSONArray(keyName);
+                                    if (arr != null) {
+                                        val = new RggbChannelVector(
+                                                (float)arr.getDouble(0),
+                                                (float)arr.getDouble(1),
+                                                (float)arr.getDouble(2),
+                                                (float)arr.getDouble(3));
+                                    } else if (obj != null) {
+                                        val = new RggbChannelVector(
+                                                (float)obj.getDouble("red"),
+                                                (float)obj.getDouble("greenEven"),
+                                                (float)obj.getDouble("greenOdd"),
+                                                (float)obj.getDouble("blue"));
+                                    } else {
+                                        throw new ItsException("Invalid RggbChannelVector object");
+                                    }
+                                } else if (keyType == ColorSpaceTransform.class) {
+                                    JSONArray arr = jsonReq.getJSONArray(keyName);
+                                    Rational a[] = new Rational[9];
+                                    for (int i = 0; i < 9; i++) {
+                                        a[i] = new Rational(
+                                                arr.getJSONObject(i).getInt("numerator"),
+                                                arr.getJSONObject(i).getInt("denominator"));
+                                    }
+                                    val = new ColorSpaceTransform(a);
+                                } else if (keyType instanceof ParameterizedType &&
+                                        ((ParameterizedType)keyType).getRawType() == Range.class &&
+                                        ((ParameterizedType)keyType).getActualTypeArguments().length == 1 &&
+                                        ((ParameterizedType)keyType).getActualTypeArguments()[0] == Integer.class) {
+                                    JSONArray arr = jsonReq.getJSONArray(keyName);
+                                    val = new Range<Integer>(arr.getInt(0), arr.getInt(1));
+                                } else {
+                                    throw new ItsException(
+                                            "Failed to parse key from JSON: " +
+                                            keyName + ", " + keyType);
+                                }
+                                if (val != null) {
+                                    Logt.i(TAG, "Set: " + keyName + " -> " + val);
+                                    md.set(key ,val);
+                                    jsonReq.remove(keyName);
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+
+            // Ensure that there were no invalid keys in the JSON request object.
+            if (jsonReq.length() != 0) {
+                throw new ItsException("Invalid JSON key(s): " + jsonReq.toString());
+            }
+
+            Logt.i(TAG, "Parsing JSON capture request completed");
+            return md;
+        } catch (java.lang.IllegalAccessException e) {
+            throw new ItsException("Access error: ", e);
+        } catch (org.json.JSONException e) {
+            throw new ItsException("JSON error: ", e);
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    public static List<CaptureRequest.Builder> deserializeRequestList(
+            CameraDevice device, JSONObject jsonObjTop)
+            throws ItsException {
+        try {
+            List<CaptureRequest.Builder> requests = null;
+            JSONArray jsonReqs = jsonObjTop.getJSONArray("captureRequests");
+            requests = new LinkedList<CaptureRequest.Builder>();
+            for (int i = 0; i < jsonReqs.length(); i++) {
+                CaptureRequest.Builder templateReq = device.createCaptureRequest(
+                        CameraDevice.TEMPLATE_STILL_CAPTURE);
+                requests.add(
+                    deserialize(templateReq, jsonReqs.getJSONObject(i)));
+            }
+            return requests;
+        } catch (org.json.JSONException e) {
+            throw new ItsException("JSON error: ", e);
+        } catch (android.hardware.camera2.CameraAccessException e) {
+            throw new ItsException("Access error: ", e);
+        }
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java
new file mode 100644
index 0000000..a305cd2
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java
@@ -0,0 +1,1334 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.camera.its;
+
+import android.app.Service;
+import android.content.Context;
+import android.content.Intent;
+import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CaptureFailure;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.DngCreator;
+import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.hardware.Sensor;
+import android.hardware.SensorEvent;
+import android.hardware.SensorEventListener;
+import android.hardware.SensorManager;
+import android.media.Image;
+import android.media.ImageReader;
+import android.net.Uri;
+import android.os.ConditionVariable;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.IBinder;
+import android.os.Message;
+import android.os.Vibrator;
+import android.util.Log;
+import android.util.Rational;
+import android.util.Size;
+import android.view.Surface;
+
+import com.android.ex.camera2.blocking.BlockingCameraManager;
+import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException;
+import com.android.ex.camera2.blocking.BlockingStateCallback;
+import com.android.ex.camera2.blocking.BlockingSessionCallback;
+
+import org.json.JSONArray;
+import org.json.JSONObject;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
+import java.io.PrintWriter;
+import java.math.BigInteger;
+import java.net.ServerSocket;
+import java.net.Socket;
+import java.net.SocketTimeoutException;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.security.MessageDigest;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.LinkedBlockingDeque;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+
+public class ItsService extends Service implements SensorEventListener {
+    public static final String TAG = ItsService.class.getSimpleName();
+
+    // Timeouts, in seconds.
+    public static final int TIMEOUT_CALLBACK = 3;
+    public static final int TIMEOUT_3A = 10;
+
+    // State transition timeouts, in ms.
+    private static final long TIMEOUT_IDLE_MS = 2000;
+    private static final long TIMEOUT_STATE_MS = 500;
+
+    // Timeout to wait for a capture result after the capture buffer has arrived, in ms.
+    private static final long TIMEOUT_CAP_RES = 2000;
+
+    private static final int MAX_CONCURRENT_READER_BUFFERS = 10;
+
+    // Supports at most RAW+YUV+JPEG, one surface each.
+    private static final int MAX_NUM_OUTPUT_SURFACES = 3;
+
+    public static final int SERVERPORT = 6000;
+
+    public static final String REGION_KEY = "regions";
+    public static final String REGION_AE_KEY = "ae";
+    public static final String REGION_AWB_KEY = "awb";
+    public static final String REGION_AF_KEY = "af";
+    public static final String LOCK_AE_KEY = "aeLock";
+    public static final String LOCK_AWB_KEY = "awbLock";
+    public static final String TRIGGER_KEY = "triggers";
+    public static final String TRIGGER_AE_KEY = "ae";
+    public static final String TRIGGER_AF_KEY = "af";
+    public static final String VIB_PATTERN_KEY = "pattern";
+    public static final String EVCOMP_KEY = "evComp";
+
+    private CameraManager mCameraManager = null;
+    private HandlerThread mCameraThread = null;
+    private Handler mCameraHandler = null;
+    private BlockingCameraManager mBlockingCameraManager = null;
+    private BlockingStateCallback mCameraListener = null;
+    private CameraDevice mCamera = null;
+    private CameraCaptureSession mSession = null;
+    private ImageReader[] mCaptureReaders = null;
+    private CameraCharacteristics mCameraCharacteristics = null;
+
+    private Vibrator mVibrator = null;
+
+    private HandlerThread mSaveThreads[] = new HandlerThread[MAX_NUM_OUTPUT_SURFACES];
+    private Handler mSaveHandlers[] = new Handler[MAX_NUM_OUTPUT_SURFACES];
+    private HandlerThread mResultThread = null;
+    private Handler mResultHandler = null;
+
+    private volatile boolean mThreadExitFlag = false;
+
+    private volatile ServerSocket mSocket = null;
+    private volatile SocketRunnable mSocketRunnableObj = null;
+    private volatile BlockingQueue<ByteBuffer> mSocketWriteQueue =
+            new LinkedBlockingDeque<ByteBuffer>();
+    private final Object mSocketWriteEnqueueLock = new Object();
+    private final Object mSocketWriteDrainLock = new Object();
+
+    private volatile BlockingQueue<Object[]> mSerializerQueue =
+            new LinkedBlockingDeque<Object[]>();
+
+    private AtomicInteger mCountCallbacksRemaining = new AtomicInteger();
+    private AtomicInteger mCountRawOrDng = new AtomicInteger();
+    private AtomicInteger mCountRaw10 = new AtomicInteger();
+    private AtomicInteger mCountJpg = new AtomicInteger();
+    private AtomicInteger mCountYuv = new AtomicInteger();
+    private AtomicInteger mCountCapRes = new AtomicInteger();
+    private boolean mCaptureRawIsDng;
+    private CaptureResult mCaptureResults[] = null;
+
+    private volatile ConditionVariable mInterlock3A = new ConditionVariable(true);
+    private volatile boolean mIssuedRequest3A = false;
+    private volatile boolean mConvergedAE = false;
+    private volatile boolean mConvergedAF = false;
+    private volatile boolean mConvergedAWB = false;
+    private volatile boolean mLockedAE = false;
+    private volatile boolean mLockedAWB = false;
+    private volatile boolean mNeedsLockedAE = false;
+    private volatile boolean mNeedsLockedAWB = false;
+
+    class MySensorEvent {
+        public Sensor sensor;
+        public int accuracy;
+        public long timestamp;
+        public float values[];
+    }
+
+    // For capturing motion sensor traces.
+    private SensorManager mSensorManager = null;
+    private Sensor mAccelSensor = null;
+    private Sensor mMagSensor = null;
+    private Sensor mGyroSensor = null;
+    private volatile LinkedList<MySensorEvent> mEvents = null;
+    private volatile Object mEventLock = new Object();
+    private volatile boolean mEventsEnabled = false;
+
+    public interface CaptureCallback {
+        void onCaptureAvailable(Image capture);
+    }
+
+    public abstract class CaptureResultListener extends CameraCaptureSession.CaptureCallback {}
+
+    @Override
+    public IBinder onBind(Intent intent) {
+        return null;
+    }
+
+    @Override
+    public void onCreate() {
+        try {
+            mThreadExitFlag = false;
+
+            // Get handle to camera manager.
+            mCameraManager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE);
+            if (mCameraManager == null) {
+                throw new ItsException("Failed to connect to camera manager");
+            }
+            mBlockingCameraManager = new BlockingCameraManager(mCameraManager);
+            mCameraListener = new BlockingStateCallback();
+
+            // Register for motion events.
+            mEvents = new LinkedList<MySensorEvent>();
+            mSensorManager = (SensorManager)getSystemService(Context.SENSOR_SERVICE);
+            mAccelSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
+            mMagSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
+            mGyroSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE);
+            mSensorManager.registerListener(this, mAccelSensor, SensorManager.SENSOR_DELAY_FASTEST);
+            mSensorManager.registerListener(this, mMagSensor, SensorManager.SENSOR_DELAY_FASTEST);
+            mSensorManager.registerListener(this, mGyroSensor, SensorManager.SENSOR_DELAY_FASTEST);
+
+            // Get a handle to the system vibrator.
+            mVibrator = (Vibrator)getSystemService(Context.VIBRATOR_SERVICE);
+
+            // Create threads to receive images and save them.
+            for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) {
+                mSaveThreads[i] = new HandlerThread("SaveThread" + i);
+                mSaveThreads[i].start();
+                mSaveHandlers[i] = new Handler(mSaveThreads[i].getLooper());
+            }
+
+            // Create a thread to handle object serialization.
+            (new Thread(new SerializerRunnable())).start();;
+
+            // Create a thread to receive capture results and process them.
+            mResultThread = new HandlerThread("ResultThread");
+            mResultThread.start();
+            mResultHandler = new Handler(mResultThread.getLooper());
+
+            // Create a thread for the camera device.
+            mCameraThread = new HandlerThread("ItsCameraThread");
+            mCameraThread.start();
+            mCameraHandler = new Handler(mCameraThread.getLooper());
+
+            // Create a thread to process commands, listening on a TCP socket.
+            mSocketRunnableObj = new SocketRunnable();
+            (new Thread(mSocketRunnableObj)).start();
+        } catch (ItsException e) {
+            Logt.e(TAG, "Service failed to start: ", e);
+        }
+    }
+
+    @Override
+    public int onStartCommand(Intent intent, int flags, int startId) {
+        try {
+            // Just log a message indicating that the service is running and is able to accept
+            // socket connections.
+            while (!mThreadExitFlag && mSocket==null) {
+                Thread.sleep(1);
+            }
+            if (!mThreadExitFlag){
+                Logt.i(TAG, "ItsService ready");
+            } else {
+                Logt.e(TAG, "Starting ItsService in bad state");
+            }
+        } catch (java.lang.InterruptedException e) {
+            Logt.e(TAG, "Error starting ItsService (interrupted)", e);
+        }
+        return START_STICKY;
+    }
+
+    @Override
+    public void onDestroy() {
+        mThreadExitFlag = true;
+        for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) {
+            if (mSaveThreads[i] != null) {
+                mSaveThreads[i].quit();
+                mSaveThreads[i] = null;
+            }
+        }
+        if (mResultThread != null) {
+            mResultThread.quitSafely();
+            mResultThread = null;
+        }
+        if (mCameraThread != null) {
+            mCameraThread.quitSafely();
+            mCameraThread = null;
+        }
+    }
+
+    public void openCameraDevice(int cameraId) throws ItsException {
+        Logt.i(TAG, String.format("Opening camera %d", cameraId));
+
+        String[] devices;
+        try {
+            devices = mCameraManager.getCameraIdList();
+            if (devices == null || devices.length == 0) {
+                throw new ItsException("No camera devices");
+            }
+        } catch (CameraAccessException e) {
+            throw new ItsException("Failed to get device ID list", e);
+        }
+
+        try {
+            mCamera = mBlockingCameraManager.openCamera(devices[cameraId],
+                    mCameraListener, mCameraHandler);
+            mCameraCharacteristics = mCameraManager.getCameraCharacteristics(
+                    devices[cameraId]);
+        } catch (CameraAccessException e) {
+            throw new ItsException("Failed to open camera", e);
+        } catch (BlockingOpenException e) {
+            throw new ItsException("Failed to open camera (after blocking)", e);
+        }
+        mSocketRunnableObj.sendResponse("cameraOpened", "");
+    }
+
+    public void closeCameraDevice() throws ItsException {
+        try {
+            if (mCamera != null) {
+                Logt.i(TAG, "Closing camera");
+                mCamera.close();
+                mCamera = null;
+            }
+        } catch (Exception e) {
+            throw new ItsException("Failed to close device");
+        }
+        mSocketRunnableObj.sendResponse("cameraClosed", "");
+    }
+
+    class SerializerRunnable implements Runnable {
+        // Use a separate thread to perform JSON serialization (since this can be slow due to
+        // the reflection).
+        @Override
+        public void run() {
+            Logt.i(TAG, "Serializer thread starting");
+            while (! mThreadExitFlag) {
+                try {
+                    Object objs[] = mSerializerQueue.take();
+                    JSONObject jsonObj = new JSONObject();
+                    String tag = null;
+                    for (int i = 0; i < objs.length; i++) {
+                        Object obj = objs[i];
+                        if (obj instanceof String) {
+                            if (tag != null) {
+                                throw new ItsException("Multiple tags for socket response");
+                            }
+                            tag = (String)obj;
+                        } else if (obj instanceof CameraCharacteristics) {
+                            jsonObj.put("cameraProperties", ItsSerializer.serialize(
+                                    (CameraCharacteristics)obj));
+                        } else if (obj instanceof CaptureRequest) {
+                            jsonObj.put("captureRequest", ItsSerializer.serialize(
+                                    (CaptureRequest)obj));
+                        } else if (obj instanceof CaptureResult) {
+                            jsonObj.put("captureResult", ItsSerializer.serialize(
+                                    (CaptureResult)obj));
+                        } else if (obj instanceof JSONArray) {
+                            jsonObj.put("outputs", (JSONArray)obj);
+                        } else {
+                            throw new ItsException("Invalid object received for serialiation");
+                        }
+                    }
+                    if (tag == null) {
+                        throw new ItsException("No tag provided for socket response");
+                    }
+                    mSocketRunnableObj.sendResponse(tag, null, jsonObj, null);
+                    Logt.i(TAG, String.format("Serialized %s", tag));
+                } catch (org.json.JSONException e) {
+                    Logt.e(TAG, "Error serializing object", e);
+                    break;
+                } catch (ItsException e) {
+                    Logt.e(TAG, "Error serializing object", e);
+                    break;
+                } catch (java.lang.InterruptedException e) {
+                    Logt.e(TAG, "Error serializing object (interrupted)", e);
+                    break;
+                }
+            }
+            Logt.i(TAG, "Serializer thread terminated");
+        }
+    }
+
+    class SocketWriteRunnable implements Runnable {
+
+        // Use a separate thread to service a queue of objects to be written to the socket,
+        // writing each sequentially in order. This is needed since different handler functions
+        // (called on different threads) will need to send data back to the host script.
+
+        public Socket mOpenSocket = null;
+
+        public SocketWriteRunnable(Socket openSocket) {
+            mOpenSocket = openSocket;
+        }
+
+        public void setOpenSocket(Socket openSocket) {
+            mOpenSocket = openSocket;
+        }
+
+        @Override
+        public void run() {
+            Logt.i(TAG, "Socket writer thread starting");
+            while (true) {
+                try {
+                    ByteBuffer b = mSocketWriteQueue.take();
+                    synchronized(mSocketWriteDrainLock) {
+                        if (mOpenSocket == null) {
+                            continue;
+                        }
+                        if (b.hasArray()) {
+                            mOpenSocket.getOutputStream().write(b.array());
+                        } else {
+                            byte[] barray = new byte[b.capacity()];
+                            b.get(barray);
+                            mOpenSocket.getOutputStream().write(barray);
+                        }
+                        mOpenSocket.getOutputStream().flush();
+                        Logt.i(TAG, String.format("Wrote to socket: %d bytes", b.capacity()));
+                    }
+                } catch (IOException e) {
+                    Logt.e(TAG, "Error writing to socket", e);
+                    break;
+                } catch (java.lang.InterruptedException e) {
+                    Logt.e(TAG, "Error writing to socket (interrupted)", e);
+                    break;
+                }
+            }
+            Logt.i(TAG, "Socket writer thread terminated");
+        }
+    }
+
+    class SocketRunnable implements Runnable {
+
+        // Format of sent messages (over the socket):
+        // * Serialized JSON object on a single line (newline-terminated)
+        // * For byte buffers, the binary data then follows
+        //
+        // Format of received messages (from the socket):
+        // * Serialized JSON object on a single line (newline-terminated)
+
+        private Socket mOpenSocket = null;
+        private SocketWriteRunnable mSocketWriteRunnable = null;
+
+        @Override
+        public void run() {
+            Logt.i(TAG, "Socket thread starting");
+            try {
+                mSocket = new ServerSocket(SERVERPORT);
+            } catch (IOException e) {
+                Logt.e(TAG, "Failed to create socket", e);
+            }
+
+            // Create a new thread to handle writes to this socket.
+            mSocketWriteRunnable = new SocketWriteRunnable(null);
+            (new Thread(mSocketWriteRunnable)).start();
+
+            while (!mThreadExitFlag) {
+                // Receive the socket-open request from the host.
+                try {
+                    Logt.i(TAG, "Waiting for client to connect to socket");
+                    mOpenSocket = mSocket.accept();
+                    if (mOpenSocket == null) {
+                        Logt.e(TAG, "Socket connection error");
+                        break;
+                    }
+                    mSocketWriteQueue.clear();
+                    mSocketWriteRunnable.setOpenSocket(mOpenSocket);
+                    Logt.i(TAG, "Socket connected");
+                } catch (IOException e) {
+                    Logt.e(TAG, "Socket open error: ", e);
+                    break;
+                }
+
+                // Process commands over the open socket.
+                while (!mThreadExitFlag) {
+                    try {
+                        BufferedReader input = new BufferedReader(
+                                new InputStreamReader(mOpenSocket.getInputStream()));
+                        if (input == null) {
+                            Logt.e(TAG, "Failed to get socket input stream");
+                            break;
+                        }
+                        String line = input.readLine();
+                        if (line == null) {
+                            Logt.i(TAG, "Socket readline retuned null (host disconnected)");
+                            break;
+                        }
+                        processSocketCommand(line);
+                    } catch (IOException e) {
+                        Logt.e(TAG, "Socket read error: ", e);
+                        break;
+                    } catch (ItsException e) {
+                        Logt.e(TAG, "Script error: ", e);
+                        break;
+                    }
+                }
+
+                // Close socket and go back to waiting for a new connection.
+                try {
+                    synchronized(mSocketWriteDrainLock) {
+                        mSocketWriteQueue.clear();
+                        mOpenSocket.close();
+                        mOpenSocket = null;
+                        Logt.i(TAG, "Socket disconnected");
+                    }
+                } catch (java.io.IOException e) {
+                    Logt.e(TAG, "Exception closing socket");
+                }
+            }
+
+            // It's an overall error state if the code gets here; no recevery.
+            // Try to do some cleanup, but the service probably needs to be restarted.
+            Logt.i(TAG, "Socket server loop exited");
+            mThreadExitFlag = true;
+            try {
+                if (mOpenSocket != null) {
+                    mOpenSocket.close();
+                    mOpenSocket = null;
+                }
+            } catch (java.io.IOException e) {
+                Logt.w(TAG, "Exception closing socket");
+            }
+            try {
+                if (mSocket != null) {
+                    mSocket.close();
+                    mSocket = null;
+                }
+            } catch (java.io.IOException e) {
+                Logt.w(TAG, "Exception closing socket");
+            }
+        }
+
+        public void processSocketCommand(String cmd)
+                throws ItsException {
+            // Each command is a serialized JSON object.
+            try {
+                JSONObject cmdObj = new JSONObject(cmd);
+                if ("open".equals(cmdObj.getString("cmdName"))) {
+                    int cameraId = cmdObj.getInt("cameraId");
+                    openCameraDevice(cameraId);
+                } else if ("close".equals(cmdObj.getString("cmdName"))) {
+                    closeCameraDevice();
+                } else if ("getCameraProperties".equals(cmdObj.getString("cmdName"))) {
+                    doGetProps();
+                } else if ("startSensorEvents".equals(cmdObj.getString("cmdName"))) {
+                    doStartSensorEvents();
+                } else if ("getSensorEvents".equals(cmdObj.getString("cmdName"))) {
+                    doGetSensorEvents();
+                } else if ("do3A".equals(cmdObj.getString("cmdName"))) {
+                    do3A(cmdObj);
+                } else if ("doCapture".equals(cmdObj.getString("cmdName"))) {
+                    doCapture(cmdObj);
+                } else if ("doVibrate".equals(cmdObj.getString("cmdName"))) {
+                    doVibrate(cmdObj);
+                } else {
+                    throw new ItsException("Unknown command: " + cmd);
+                }
+            } catch (org.json.JSONException e) {
+                Logt.e(TAG, "Invalid command: ", e);
+            }
+        }
+
+        public void sendResponse(String tag, String str, JSONObject obj, ByteBuffer bbuf)
+                throws ItsException {
+            try {
+                JSONObject jsonObj = new JSONObject();
+                jsonObj.put("tag", tag);
+                if (str != null) {
+                    jsonObj.put("strValue", str);
+                }
+                if (obj != null) {
+                    jsonObj.put("objValue", obj);
+                }
+                if (bbuf != null) {
+                    jsonObj.put("bufValueSize", bbuf.capacity());
+                }
+                ByteBuffer bstr = ByteBuffer.wrap(
+                        (jsonObj.toString()+"\n").getBytes(Charset.defaultCharset()));
+                synchronized(mSocketWriteEnqueueLock) {
+                    if (bstr != null) {
+                        mSocketWriteQueue.put(bstr);
+                    }
+                    if (bbuf != null) {
+                        mSocketWriteQueue.put(bbuf);
+                    }
+                }
+            } catch (org.json.JSONException e) {
+                throw new ItsException("JSON error: ", e);
+            } catch (java.lang.InterruptedException e) {
+                throw new ItsException("Socket error: ", e);
+            }
+        }
+
+        public void sendResponse(String tag, String str)
+                throws ItsException {
+            sendResponse(tag, str, null, null);
+        }
+
+        public void sendResponse(String tag, JSONObject obj)
+                throws ItsException {
+            sendResponse(tag, null, obj, null);
+        }
+
+        public void sendResponseCaptureBuffer(String tag, ByteBuffer bbuf)
+                throws ItsException {
+            sendResponse(tag, null, null, bbuf);
+        }
+
+        public void sendResponse(LinkedList<MySensorEvent> events)
+                throws ItsException {
+            try {
+                JSONArray accels = new JSONArray();
+                JSONArray mags = new JSONArray();
+                JSONArray gyros = new JSONArray();
+                for (MySensorEvent event : events) {
+                    JSONObject obj = new JSONObject();
+                    obj.put("time", event.timestamp);
+                    obj.put("x", event.values[0]);
+                    obj.put("y", event.values[1]);
+                    obj.put("z", event.values[2]);
+                    if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
+                        accels.put(obj);
+                    } else if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) {
+                        mags.put(obj);
+                    } else if (event.sensor.getType() == Sensor.TYPE_GYROSCOPE) {
+                        gyros.put(obj);
+                    }
+                }
+                JSONObject obj = new JSONObject();
+                obj.put("accel", accels);
+                obj.put("mag", mags);
+                obj.put("gyro", gyros);
+                sendResponse("sensorEvents", null, obj, null);
+            } catch (org.json.JSONException e) {
+                throw new ItsException("JSON error: ", e);
+            }
+        }
+
+        public void sendResponse(CameraCharacteristics props)
+                throws ItsException {
+            try {
+                Object objs[] = new Object[2];
+                objs[0] = "cameraProperties";
+                objs[1] = props;
+                mSerializerQueue.put(objs);
+            } catch (InterruptedException e) {
+                throw new ItsException("Interrupted: ", e);
+            }
+        }
+
+        public void sendResponseCaptureResult(CameraCharacteristics props,
+                                              CaptureRequest request,
+                                              CaptureResult result,
+                                              ImageReader[] readers)
+                throws ItsException {
+            try {
+                JSONArray jsonSurfaces = new JSONArray();
+                for (int i = 0; i < readers.length; i++) {
+                    JSONObject jsonSurface = new JSONObject();
+                    jsonSurface.put("width", readers[i].getWidth());
+                    jsonSurface.put("height", readers[i].getHeight());
+                    int format = readers[i].getImageFormat();
+                    if (format == ImageFormat.RAW_SENSOR) {
+                        jsonSurface.put("format", "raw");
+                    } else if (format == ImageFormat.RAW10) {
+                        jsonSurface.put("format", "raw10");
+                    } else if (format == ImageFormat.JPEG) {
+                        jsonSurface.put("format", "jpeg");
+                    } else if (format == ImageFormat.YUV_420_888) {
+                        jsonSurface.put("format", "yuv");
+                    } else {
+                        throw new ItsException("Invalid format");
+                    }
+                    jsonSurfaces.put(jsonSurface);
+                }
+
+                Object objs[] = new Object[5];
+                objs[0] = "captureResults";
+                objs[1] = props;
+                objs[2] = request;
+                objs[3] = result;
+                objs[4] = jsonSurfaces;
+                mSerializerQueue.put(objs);
+            } catch (org.json.JSONException e) {
+                throw new ItsException("JSON error: ", e);
+            } catch (InterruptedException e) {
+                throw new ItsException("Interrupted: ", e);
+            }
+        }
+    }
+
+    public ImageReader.OnImageAvailableListener
+            createAvailableListener(final CaptureCallback listener) {
+        return new ImageReader.OnImageAvailableListener() {
+            @Override
+            public void onImageAvailable(ImageReader reader) {
+                Image i = null;
+                try {
+                    i = reader.acquireNextImage();
+                    listener.onCaptureAvailable(i);
+                } finally {
+                    if (i != null) {
+                        i.close();
+                    }
+                }
+            }
+        };
+    }
+
+    private ImageReader.OnImageAvailableListener
+            createAvailableListenerDropper(final CaptureCallback listener) {
+        return new ImageReader.OnImageAvailableListener() {
+            @Override
+            public void onImageAvailable(ImageReader reader) {
+                Image i = reader.acquireNextImage();
+                i.close();
+            }
+        };
+    }
+
+    private void doStartSensorEvents() throws ItsException {
+        synchronized(mEventLock) {
+            mEventsEnabled = true;
+        }
+        mSocketRunnableObj.sendResponse("sensorEventsStarted", "");
+    }
+
+    private void doGetSensorEvents() throws ItsException {
+        synchronized(mEventLock) {
+            mSocketRunnableObj.sendResponse(mEvents);
+            mEvents.clear();
+            mEventsEnabled = false;
+        }
+    }
+
+    private void doGetProps() throws ItsException {
+        mSocketRunnableObj.sendResponse(mCameraCharacteristics);
+    }
+
+    private void prepareCaptureReader(int[] widths, int[] heights, int formats[], int numSurfaces) {
+        if (mCaptureReaders != null) {
+            for (int i = 0; i < mCaptureReaders.length; i++) {
+                if (mCaptureReaders[i] != null) {
+                    mCaptureReaders[i].close();
+                }
+            }
+        }
+        mCaptureReaders = new ImageReader[numSurfaces];
+        for (int i = 0; i < numSurfaces; i++) {
+            mCaptureReaders[i] = ImageReader.newInstance(widths[i], heights[i], formats[i],
+                    MAX_CONCURRENT_READER_BUFFERS);
+        }
+    }
+
+    private void do3A(JSONObject params) throws ItsException {
+        try {
+            // Start a 3A action, and wait for it to converge.
+            // Get the converged values for each "A", and package into JSON result for caller.
+
+            // 3A happens on full-res frames.
+            Size sizes[] = ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
+            int widths[] = new int[1];
+            int heights[] = new int[1];
+            int formats[] = new int[1];
+            widths[0] = sizes[0].getWidth();
+            heights[0] = sizes[0].getHeight();
+            formats[0] = ImageFormat.YUV_420_888;
+            int width = widths[0];
+            int height = heights[0];
+
+            prepareCaptureReader(widths, heights, formats, 1);
+            List<Surface> outputSurfaces = new ArrayList<Surface>(1);
+            outputSurfaces.add(mCaptureReaders[0].getSurface());
+            BlockingSessionCallback sessionListener = new BlockingSessionCallback();
+            mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler);
+            mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS);
+
+            // Add a listener that just recycles buffers; they aren't saved anywhere.
+            ImageReader.OnImageAvailableListener readerListener =
+                    createAvailableListenerDropper(mCaptureCallback);
+            mCaptureReaders[0].setOnImageAvailableListener(readerListener, mSaveHandlers[0]);
+
+            // Get the user-specified regions for AE, AWB, AF.
+            // Note that the user specifies normalized [x,y,w,h], which is converted below
+            // to an [x0,y0,x1,y1] region in sensor coords. The capture request region
+            // also has a fifth "weight" element: [x0,y0,x1,y1,w].
+            MeteringRectangle[] regionAE = new MeteringRectangle[]{
+                    new MeteringRectangle(0,0,width,height,1)};
+            MeteringRectangle[] regionAF = new MeteringRectangle[]{
+                    new MeteringRectangle(0,0,width,height,1)};
+            MeteringRectangle[] regionAWB = new MeteringRectangle[]{
+                    new MeteringRectangle(0,0,width,height,1)};
+            if (params.has(REGION_KEY)) {
+                JSONObject regions = params.getJSONObject(REGION_KEY);
+                if (regions.has(REGION_AE_KEY)) {
+                    regionAE = ItsUtils.getJsonWeightedRectsFromArray(
+                            regions.getJSONArray(REGION_AE_KEY), true, width, height);
+                }
+                if (regions.has(REGION_AF_KEY)) {
+                    regionAF = ItsUtils.getJsonWeightedRectsFromArray(
+                            regions.getJSONArray(REGION_AF_KEY), true, width, height);
+                }
+                if (regions.has(REGION_AWB_KEY)) {
+                    regionAWB = ItsUtils.getJsonWeightedRectsFromArray(
+                            regions.getJSONArray(REGION_AWB_KEY), true, width, height);
+                }
+            }
+
+            // If AE or AWB lock is specified, then the 3A will converge first and then lock these
+            // values, waiting until the HAL has reported that the lock was successful.
+            mNeedsLockedAE = params.optBoolean(LOCK_AE_KEY, false);
+            mNeedsLockedAWB = params.optBoolean(LOCK_AWB_KEY, false);
+
+            // An EV compensation can be specified as part of AE convergence.
+            int evComp = params.optInt(EVCOMP_KEY, 0);
+            if (evComp != 0) {
+                Logt.i(TAG, String.format("Running 3A with AE exposure compensation value: %d", evComp));
+            }
+
+            // By default, AE and AF both get triggered, but the user can optionally override this.
+            // Also, AF won't get triggered if the lens is fixed-focus.
+            boolean doAE = true;
+            boolean doAF = true;
+            if (params.has(TRIGGER_KEY)) {
+                JSONObject triggers = params.getJSONObject(TRIGGER_KEY);
+                if (triggers.has(TRIGGER_AE_KEY)) {
+                    doAE = triggers.getBoolean(TRIGGER_AE_KEY);
+                }
+                if (triggers.has(TRIGGER_AF_KEY)) {
+                    doAF = triggers.getBoolean(TRIGGER_AF_KEY);
+                }
+            }
+            if (doAF && mCameraCharacteristics.get(
+                            CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE) == 0) {
+                // Send a dummy result back for the code that is waiting for this message to see
+                // that AF has converged.
+                Logt.i(TAG, "Ignoring request for AF on fixed-focus camera");
+                mSocketRunnableObj.sendResponse("afResult", "0.0");
+                doAF = false;
+            }
+
+            mInterlock3A.open();
+            mIssuedRequest3A = false;
+            mConvergedAE = false;
+            mConvergedAWB = false;
+            mConvergedAF = false;
+            mLockedAE = false;
+            mLockedAWB = false;
+            long tstart = System.currentTimeMillis();
+            boolean triggeredAE = false;
+            boolean triggeredAF = false;
+
+            Logt.i(TAG, String.format("Initiating 3A: AE:%d, AF:%d, AWB:1, AELOCK:%d, AWBLOCK:%d",
+                    doAE?1:0, doAF?1:0, mNeedsLockedAE?1:0, mNeedsLockedAWB?1:0));
+
+            // Keep issuing capture requests until 3A has converged.
+            while (true) {
+
+                // Block until can take the next 3A frame. Only want one outstanding frame
+                // at a time, to simplify the logic here.
+                if (!mInterlock3A.block(TIMEOUT_3A * 1000) ||
+                        System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) {
+                    throw new ItsException(
+                            "3A failed to converge after " + TIMEOUT_3A + " seconds.\n" +
+                            "AE converge state: " + mConvergedAE + ", \n" +
+                            "AF convergence state: " + mConvergedAF + ", \n" +
+                            "AWB convergence state: " + mConvergedAWB + ".");
+                }
+                mInterlock3A.close();
+
+                // If not converged yet, issue another capture request.
+                if (       (doAE && (!triggeredAE || !mConvergedAE))
+                        || !mConvergedAWB
+                        || (doAF && (!triggeredAF || !mConvergedAF))
+                        || (doAE && mNeedsLockedAE && !mLockedAE)
+                        || (mNeedsLockedAWB && !mLockedAWB)) {
+
+                    // Baseline capture request for 3A.
+                    CaptureRequest.Builder req = mCamera.createCaptureRequest(
+                            CameraDevice.TEMPLATE_PREVIEW);
+                    req.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
+                    req.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
+                    req.set(CaptureRequest.CONTROL_CAPTURE_INTENT,
+                            CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
+                    req.set(CaptureRequest.CONTROL_AE_MODE,
+                            CaptureRequest.CONTROL_AE_MODE_ON);
+                    req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
+                    req.set(CaptureRequest.CONTROL_AE_LOCK, false);
+                    req.set(CaptureRequest.CONTROL_AE_REGIONS, regionAE);
+                    req.set(CaptureRequest.CONTROL_AF_MODE,
+                            CaptureRequest.CONTROL_AF_MODE_AUTO);
+                    req.set(CaptureRequest.CONTROL_AF_REGIONS, regionAF);
+                    req.set(CaptureRequest.CONTROL_AWB_MODE,
+                            CaptureRequest.CONTROL_AWB_MODE_AUTO);
+                    req.set(CaptureRequest.CONTROL_AWB_LOCK, false);
+                    req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB);
+
+                    if (evComp != 0) {
+                        req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, evComp);
+                    }
+
+                    if (mConvergedAE && mNeedsLockedAE) {
+                        req.set(CaptureRequest.CONTROL_AE_LOCK, true);
+                    }
+                    if (mConvergedAWB && mNeedsLockedAWB) {
+                        req.set(CaptureRequest.CONTROL_AWB_LOCK, true);
+                    }
+
+                    // Trigger AE first.
+                    if (doAE && !triggeredAE) {
+                        Logt.i(TAG, "Triggering AE");
+                        req.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
+                                CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
+                        triggeredAE = true;
+                    }
+
+                    // After AE has converged, trigger AF.
+                    if (doAF && !triggeredAF && (!doAE || (triggeredAE && mConvergedAE))) {
+                        Logt.i(TAG, "Triggering AF");
+                        req.set(CaptureRequest.CONTROL_AF_TRIGGER,
+                                CaptureRequest.CONTROL_AF_TRIGGER_START);
+                        triggeredAF = true;
+                    }
+
+                    req.addTarget(mCaptureReaders[0].getSurface());
+
+                    mIssuedRequest3A = true;
+                    mSession.capture(req.build(), mCaptureResultListener, mResultHandler);
+                } else {
+                    mSocketRunnableObj.sendResponse("3aConverged", "");
+                    Logt.i(TAG, "3A converged");
+                    break;
+                }
+            }
+        } catch (android.hardware.camera2.CameraAccessException e) {
+            throw new ItsException("Access error: ", e);
+        } catch (org.json.JSONException e) {
+            throw new ItsException("JSON error: ", e);
+        } finally {
+            mSocketRunnableObj.sendResponse("3aDone", "");
+        }
+    }
+
+    private void doVibrate(JSONObject params) throws ItsException {
+        try {
+            if (mVibrator == null) {
+                throw new ItsException("Unable to start vibrator");
+            }
+            JSONArray patternArray = params.getJSONArray(VIB_PATTERN_KEY);
+            int len = patternArray.length();
+            long pattern[] = new long[len];
+            for (int i = 0; i < len; i++) {
+                pattern[i] = patternArray.getLong(i);
+            }
+            Logt.i(TAG, String.format("Starting vibrator, pattern length %d",len));
+            mVibrator.vibrate(pattern, -1);
+            mSocketRunnableObj.sendResponse("vibrationStarted", "");
+        } catch (org.json.JSONException e) {
+            throw new ItsException("JSON error: ", e);
+        }
+    }
+
+    private void doCapture(JSONObject params) throws ItsException {
+        try {
+            // Parse the JSON to get the list of capture requests.
+            List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList(
+                    mCamera, params);
+
+            // Set the output surface(s) and listeners.
+            int widths[] = new int[MAX_NUM_OUTPUT_SURFACES];
+            int heights[] = new int[MAX_NUM_OUTPUT_SURFACES];
+            int formats[] = new int[MAX_NUM_OUTPUT_SURFACES];
+            int numSurfaces = 0;
+            try {
+                mCountRawOrDng.set(0);
+                mCountJpg.set(0);
+                mCountYuv.set(0);
+                mCountRaw10.set(0);
+                mCountCapRes.set(0);
+                mCaptureRawIsDng = false;
+                mCaptureResults = new CaptureResult[requests.size()];
+
+                JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params);
+                if (jsonOutputSpecs != null) {
+                    numSurfaces = jsonOutputSpecs.length();
+                    if (numSurfaces > MAX_NUM_OUTPUT_SURFACES) {
+                        throw new ItsException("Too many output surfaces");
+                    }
+                    for (int i = 0; i < numSurfaces; i++) {
+                        // Get the specified surface.
+                        JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i);
+                        String sformat = surfaceObj.optString("format");
+                        Size sizes[];
+                        if ("yuv".equals(sformat) || "".equals(sformat)) {
+                            // Default to YUV if no format is specified.
+                            formats[i] = ImageFormat.YUV_420_888;
+                            sizes = ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
+                        } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) {
+                            formats[i] = ImageFormat.JPEG;
+                            sizes = ItsUtils.getJpegOutputSizes(mCameraCharacteristics);
+                        } else if ("raw".equals(sformat)) {
+                            formats[i] = ImageFormat.RAW_SENSOR;
+                            sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
+                        } else if ("raw10".equals(sformat)) {
+                            formats[i] = ImageFormat.RAW10;
+                            sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
+                        } else if ("dng".equals(sformat)) {
+                            formats[i] = ImageFormat.RAW_SENSOR;
+                            sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
+                            mCaptureRawIsDng = true;
+                        } else {
+                            throw new ItsException("Unsupported format: " + sformat);
+                        }
+                        // If the size is omitted, then default to the largest allowed size for the
+                        // format.
+                        widths[i] = surfaceObj.optInt("width");
+                        heights[i] = surfaceObj.optInt("height");
+                        if (widths[i] <= 0) {
+                            if (sizes == null || sizes.length == 0) {
+                                throw new ItsException(String.format(
+                                        "Zero stream configs available for requested format: %s",
+                                        sformat));
+                            }
+                            widths[i] = sizes[0].getWidth();
+                        }
+                        if (heights[i] <= 0) {
+                            heights[i] = sizes[0].getHeight();
+                        }
+                    }
+                } else {
+                    // No surface(s) specified at all.
+                    // Default: a single output surface which is full-res YUV.
+                    Size sizes[] =
+                            ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
+                    numSurfaces = 1;
+                    widths[0] = sizes[0].getWidth();
+                    heights[0] = sizes[0].getHeight();
+                    formats[0] = ImageFormat.YUV_420_888;
+                }
+
+                prepareCaptureReader(widths, heights, formats, numSurfaces);
+                List<Surface> outputSurfaces = new ArrayList<Surface>(numSurfaces);
+                for (int i = 0; i < numSurfaces; i++) {
+                    outputSurfaces.add(mCaptureReaders[i].getSurface());
+                }
+                BlockingSessionCallback sessionListener = new BlockingSessionCallback();
+                mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler);
+                mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS);
+
+                for (int i = 0; i < numSurfaces; i++) {
+                    ImageReader.OnImageAvailableListener readerListener =
+                            createAvailableListener(mCaptureCallback);
+                    mCaptureReaders[i].setOnImageAvailableListener(readerListener,mSaveHandlers[i]);
+                }
+
+                // Plan for how many callbacks need to be received throughout the duration of this
+                // sequence of capture requests. There is one callback per image surface, and one
+                // callback for the CaptureResult, for each capture.
+                int numCaptures = requests.size();
+                mCountCallbacksRemaining.set(numCaptures * (numSurfaces + 1));
+
+            } catch (CameraAccessException e) {
+                throw new ItsException("Error configuring outputs", e);
+            } catch (org.json.JSONException e) {
+                throw new ItsException("JSON error", e);
+            }
+
+            // Initiate the captures.
+            for (int i = 0; i < requests.size(); i++) {
+                // For DNG captures, need the LSC map to be available.
+                if (mCaptureRawIsDng) {
+                    requests.get(i).set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 1);
+                }
+
+                CaptureRequest.Builder req = requests.get(i);
+                for (int j = 0; j < numSurfaces; j++) {
+                    req.addTarget(mCaptureReaders[j].getSurface());
+                }
+                mSession.capture(req.build(), mCaptureResultListener, mResultHandler);
+            }
+
+            // Make sure all callbacks have been hit (wait until captures are done).
+            // If no timeouts are received after a timeout, then fail.
+            int currentCount = mCountCallbacksRemaining.get();
+            while (currentCount > 0) {
+                try {
+                    Thread.sleep(TIMEOUT_CALLBACK*1000);
+                } catch (InterruptedException e) {
+                    throw new ItsException("Timeout failure", e);
+                }
+                int newCount = mCountCallbacksRemaining.get();
+                if (newCount == currentCount) {
+                    throw new ItsException(
+                            "No callback received within timeout");
+                }
+                currentCount = newCount;
+            }
+        } catch (android.hardware.camera2.CameraAccessException e) {
+            throw new ItsException("Access error: ", e);
+        }
+    }
+
+    @Override
+    public final void onSensorChanged(SensorEvent event) {
+        synchronized(mEventLock) {
+            if (mEventsEnabled) {
+                MySensorEvent ev2 = new MySensorEvent();
+                ev2.sensor = event.sensor;
+                ev2.accuracy = event.accuracy;
+                ev2.timestamp = event.timestamp;
+                ev2.values = new float[event.values.length];
+                System.arraycopy(event.values, 0, ev2.values, 0, event.values.length);
+                mEvents.add(ev2);
+            }
+        }
+    }
+
+    @Override
+    public final void onAccuracyChanged(Sensor sensor, int accuracy) {
+    }
+
+    private final CaptureCallback mCaptureCallback = new CaptureCallback() {
+        @Override
+        public void onCaptureAvailable(Image capture) {
+            try {
+                int format = capture.getFormat();
+                if (format == ImageFormat.JPEG) {
+                    Logt.i(TAG, "Received JPEG capture");
+                    byte[] img = ItsUtils.getDataFromImage(capture);
+                    ByteBuffer buf = ByteBuffer.wrap(img);
+                    int count = mCountJpg.getAndIncrement();
+                    mSocketRunnableObj.sendResponseCaptureBuffer("jpegImage", buf);
+                } else if (format == ImageFormat.YUV_420_888) {
+                    Logt.i(TAG, "Received YUV capture");
+                    byte[] img = ItsUtils.getDataFromImage(capture);
+                    ByteBuffer buf = ByteBuffer.wrap(img);
+                    int count = mCountYuv.getAndIncrement();
+                    mSocketRunnableObj.sendResponseCaptureBuffer("yuvImage", buf);
+                } else if (format == ImageFormat.RAW10) {
+                    Logt.i(TAG, "Received RAW10 capture");
+                    byte[] img = ItsUtils.getDataFromImage(capture);
+                    ByteBuffer buf = ByteBuffer.wrap(img);
+                    int count = mCountRaw10.getAndIncrement();
+                    mSocketRunnableObj.sendResponseCaptureBuffer("raw10Image", buf);
+                } else if (format == ImageFormat.RAW_SENSOR) {
+                    Logt.i(TAG, "Received RAW16 capture");
+                    int count = mCountRawOrDng.getAndIncrement();
+                    if (! mCaptureRawIsDng) {
+                        byte[] img = ItsUtils.getDataFromImage(capture);
+                        ByteBuffer buf = ByteBuffer.wrap(img);
+                        mSocketRunnableObj.sendResponseCaptureBuffer("rawImage", buf);
+                    } else {
+                        // Wait until the corresponding capture result is ready, up to a timeout.
+                        long t0 = android.os.SystemClock.elapsedRealtime();
+                        while (! mThreadExitFlag
+                                && android.os.SystemClock.elapsedRealtime()-t0 < TIMEOUT_CAP_RES) {
+                            if (mCaptureResults[count] != null) {
+                                Logt.i(TAG, "Writing capture as DNG");
+                                DngCreator dngCreator = new DngCreator(
+                                        mCameraCharacteristics, mCaptureResults[count]);
+                                ByteArrayOutputStream dngStream = new ByteArrayOutputStream();
+                                dngCreator.writeImage(dngStream, capture);
+                                byte[] dngArray = dngStream.toByteArray();
+                                ByteBuffer dngBuf = ByteBuffer.wrap(dngArray);
+                                mSocketRunnableObj.sendResponseCaptureBuffer("dngImage", dngBuf);
+                                break;
+                            } else {
+                                Thread.sleep(1);
+                            }
+                        }
+                    }
+                } else {
+                    throw new ItsException("Unsupported image format: " + format);
+                }
+                mCountCallbacksRemaining.decrementAndGet();
+            } catch (IOException e) {
+                Logt.e(TAG, "Script error: ", e);
+            } catch (InterruptedException e) {
+                Logt.e(TAG, "Script error: ", e);
+            } catch (ItsException e) {
+                Logt.e(TAG, "Script error: ", e);
+            }
+        }
+    };
+
+    private static float r2f(Rational r) {
+        return (float)r.getNumerator() / (float)r.getDenominator();
+    }
+
+    private final CaptureResultListener mCaptureResultListener = new CaptureResultListener() {
+        @Override
+        public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
+                long timestamp, long frameNumber) {
+        }
+
+        @Override
+        public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
+                TotalCaptureResult result) {
+            try {
+                // Currently result has all 0 values.
+                if (request == null || result == null) {
+                    throw new ItsException("Request/result is invalid");
+                }
+
+                StringBuilder logMsg = new StringBuilder();
+                logMsg.append(String.format(
+                        "Capt result: AE=%d, AF=%d, AWB=%d, sens=%d, exp=%.1fms, dur=%.1fms, ",
+                        result.get(CaptureResult.CONTROL_AE_STATE),
+                        result.get(CaptureResult.CONTROL_AF_STATE),
+                        result.get(CaptureResult.CONTROL_AWB_STATE),
+                        result.get(CaptureResult.SENSOR_SENSITIVITY),
+                        result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue() / 1000000.0f,
+                        result.get(CaptureResult.SENSOR_FRAME_DURATION).intValue() / 1000000.0f));
+                if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null) {
+                    logMsg.append(String.format(
+                            "gains=[%.1f, %.1f, %.1f, %.1f], ",
+                            result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(),
+                            result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(),
+                            result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(),
+                            result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue()));
+                } else {
+                    logMsg.append("gains=[], ");
+                }
+                if (result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) {
+                    logMsg.append(String.format(
+                            "xform=[%.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f], ",
+                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,0)),
+                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,0)),
+                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,0)),
+                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,1)),
+                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,1)),
+                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,1)),
+                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,2)),
+                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,2)),
+                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,2))));
+                } else {
+                    logMsg.append("xform=[], ");
+                }
+                logMsg.append(String.format(
+                        "foc=%.1f",
+                        result.get(CaptureResult.LENS_FOCUS_DISTANCE)));
+                Logt.i(TAG, logMsg.toString());
+
+                if (result.get(CaptureResult.CONTROL_AE_STATE) != null) {
+                    mConvergedAE = result.get(CaptureResult.CONTROL_AE_STATE) ==
+                                              CaptureResult.CONTROL_AE_STATE_CONVERGED ||
+                                   result.get(CaptureResult.CONTROL_AE_STATE) ==
+                                              CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED ||
+                                   result.get(CaptureResult.CONTROL_AE_STATE) ==
+                                              CaptureResult.CONTROL_AE_STATE_LOCKED;
+                    mLockedAE = result.get(CaptureResult.CONTROL_AE_STATE) ==
+                                           CaptureResult.CONTROL_AE_STATE_LOCKED;
+                }
+                if (result.get(CaptureResult.CONTROL_AF_STATE) != null) {
+                    mConvergedAF = result.get(CaptureResult.CONTROL_AF_STATE) ==
+                                              CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED;
+                }
+                if (result.get(CaptureResult.CONTROL_AWB_STATE) != null) {
+                    mConvergedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) ==
+                                               CaptureResult.CONTROL_AWB_STATE_CONVERGED ||
+                                    result.get(CaptureResult.CONTROL_AWB_STATE) ==
+                                               CaptureResult.CONTROL_AWB_STATE_LOCKED;
+                    mLockedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) ==
+                                            CaptureResult.CONTROL_AWB_STATE_LOCKED;
+                }
+
+                if (mConvergedAE && (!mNeedsLockedAE || mLockedAE)) {
+                    if (result.get(CaptureResult.SENSOR_SENSITIVITY) != null
+                            && result.get(CaptureResult.SENSOR_EXPOSURE_TIME) != null) {
+                        mSocketRunnableObj.sendResponse("aeResult", String.format("%d %d",
+                                result.get(CaptureResult.SENSOR_SENSITIVITY).intValue(),
+                                result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue()
+                                ));
+                    } else {
+                        Logt.i(TAG, String.format(
+                                "AE converged but NULL exposure values, sensitivity:%b, expTime:%b",
+                                result.get(CaptureResult.SENSOR_SENSITIVITY) == null,
+                                result.get(CaptureResult.SENSOR_EXPOSURE_TIME) == null));
+                    }
+                }
+
+                if (mConvergedAF) {
+                    if (result.get(CaptureResult.LENS_FOCUS_DISTANCE) != null) {
+                        mSocketRunnableObj.sendResponse("afResult", String.format("%f",
+                                result.get(CaptureResult.LENS_FOCUS_DISTANCE)
+                                ));
+                    } else {
+                        Logt.i(TAG, "AF converged but NULL focus distance values");
+                    }
+                }
+
+                if (mConvergedAWB && (!mNeedsLockedAWB || mLockedAWB)) {
+                    if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null
+                            && result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) {
+                        mSocketRunnableObj.sendResponse("awbResult", String.format(
+                                "%f %f %f %f %f %f %f %f %f %f %f %f %f",
+                                result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(),
+                                result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(),
+                                result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(),
+                                result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue(),
+                                r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,0)),
+                                r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,0)),
+                                r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,0)),
+                                r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,1)),
+                                r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,1)),
+                                r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,1)),
+                                r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,2)),
+                                r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,2)),
+                                r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,2))
+                                ));
+                    } else {
+                        Logt.i(TAG, String.format(
+                                "AWB converged but NULL color correction values, gains:%b, ccm:%b",
+                                result.get(CaptureResult.COLOR_CORRECTION_GAINS) == null,
+                                result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) == null));
+                    }
+                }
+
+                if (mIssuedRequest3A) {
+                    mIssuedRequest3A = false;
+                    mInterlock3A.open();
+                } else {
+                    int count = mCountCapRes.getAndIncrement();
+                    mCaptureResults[count] = result;
+                    mSocketRunnableObj.sendResponseCaptureResult(mCameraCharacteristics,
+                            request, result, mCaptureReaders);
+                    mCountCallbacksRemaining.decrementAndGet();
+                }
+            } catch (ItsException e) {
+                Logt.e(TAG, "Script error: ", e);
+            } catch (Exception e) {
+                Logt.e(TAG, "Script error: ", e);
+            }
+        }
+
+        @Override
+        public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request,
+                CaptureFailure failure) {
+            Logt.e(TAG, "Script error: capture failed");
+        }
+    };
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsTestActivity.java
new file mode 100644
index 0000000..12b9bfc
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsTestActivity.java
@@ -0,0 +1,142 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.camera.its;
+
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.content.res.Configuration;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.os.Bundle;
+import android.util.Log;
+import android.widget.Toast;
+import java.util.HashSet;
+import java.util.Arrays;
+
+import com.android.cts.verifier.PassFailButtons;
+import com.android.cts.verifier.R;
+
+
+/**
+ * Test for Camera features that require that the camera be aimed at a specific test scene.
+ * This test activity requires a USB connection to a computer, and a corresponding host-side run of
+ * the python scripts found in the CameraITS directory.
+ */
+public class ItsTestActivity extends PassFailButtons.Activity {
+    private static final String TAG = "ItsTestActivity";
+    private static final String EXTRA_SUCCESS = "camera.its.extra.SUCCESS";
+    private static final String ACTION_ITS_RESULT =
+            "com.android.cts.verifier.camera.its.ACTION_ITS_RESULT";
+
+    class SuccessReceiver extends BroadcastReceiver {
+        @Override
+        public void onReceive(Context context, Intent intent) {
+            Log.i(TAG, "Received result for Camera ITS tests");
+            if (ACTION_ITS_RESULT.equals(intent.getAction())) {
+                String result = intent.getStringExtra(EXTRA_SUCCESS);
+                String[] parts = result.split("=");
+                if (parts.length != 2) {
+                    Toast.makeText(ItsTestActivity.this,
+                            "Received unknown ITS result string: " + result,
+                            Toast.LENGTH_SHORT).show();
+                }
+                String cameraId = parts[0];
+                boolean pass = parts[1].equals("True");
+                if(pass) {
+                    Log.i(TAG, "Received Camera " + cameraId + " ITS SUCCESS from host.");
+                    mITSPassedCameraIds.add(cameraId);
+                    if (mCameraIds != null &&
+                            mITSPassedCameraIds.containsAll(Arrays.asList(mCameraIds))) {
+                        ItsTestActivity.this.showToast(R.string.its_test_passed);
+                        ItsTestActivity.this.getPassButton().setEnabled(true);
+                    }
+                } else {
+                    Log.i(TAG, "Received Camera " + cameraId + " ITS FAILURE from host.");
+                    ItsTestActivity.this.showToast(R.string.its_test_failed);
+                }
+            }
+        }
+    }
+
+    private final SuccessReceiver mSuccessReceiver = new SuccessReceiver();
+    private final HashSet<String> mITSPassedCameraIds = new HashSet<>();
+    private String[] mCameraIds = null;
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        setContentView(R.layout.its_main);
+        setInfoResources(R.string.camera_its_test, R.string.camera_its_test_info, -1);
+        setPassFailButtonClickListeners();
+        getPassButton().setEnabled(false);
+    }
+
+    @Override
+    protected void onResume() {
+        super.onResume();
+        CameraManager manager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE);
+        if (manager == null) {
+            showToast(R.string.no_camera_manager);
+        } else {
+            try {
+                mCameraIds = manager.getCameraIdList();
+                boolean allCamerasAreLegacy = true;
+                for (String id : mCameraIds) {
+                    CameraCharacteristics characteristics = manager.getCameraCharacteristics(id);
+                    if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)
+                            != CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+                        allCamerasAreLegacy = false;
+                        break;
+                    }
+                }
+                if (allCamerasAreLegacy) {
+                    showToast(R.string.all_legacy_devices);
+                    getPassButton().setEnabled(false);
+                }
+            } catch (CameraAccessException e) {
+                Toast.makeText(ItsTestActivity.this,
+                        "Received error from camera service while checking device capabilities: "
+                                + e, Toast.LENGTH_SHORT).show();
+            }
+            IntentFilter filter = new IntentFilter(ACTION_ITS_RESULT);
+            registerReceiver(mSuccessReceiver, filter);
+        }
+    }
+
+    @Override
+    protected void onPause() {
+        super.onPause();
+        unregisterReceiver(mSuccessReceiver);
+    }
+
+    @Override
+    public void onConfigurationChanged(Configuration newConfig) {
+        super.onConfigurationChanged(newConfig);
+        setContentView(R.layout.its_main);
+        setInfoResources(R.string.camera_its_test, R.string.camera_its_test_info, -1);
+        setPassFailButtonClickListeners();
+        getPassButton().setEnabled(false);
+    }
+
+    private void showToast(int messageId) {
+        Toast.makeText(ItsTestActivity.this, messageId, Toast.LENGTH_SHORT).show();
+    }
+
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsUtils.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsUtils.java
new file mode 100644
index 0000000..2541142
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsUtils.java
@@ -0,0 +1,221 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.camera.its;
+
+import android.content.Context;
+import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.media.Image;
+import android.media.Image.Plane;
+import android.net.Uri;
+import android.os.Environment;
+import android.util.Log;
+import android.util.Size;
+
+import org.json.JSONArray;
+import org.json.JSONObject;
+
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.List;
+
+public class ItsUtils {
+    public static final String TAG = ItsUtils.class.getSimpleName();
+
+    public static ByteBuffer jsonToByteBuffer(JSONObject jsonObj) {
+        return ByteBuffer.wrap(jsonObj.toString().getBytes(Charset.defaultCharset()));
+    }
+
+    public static MeteringRectangle[] getJsonWeightedRectsFromArray(
+            JSONArray a, boolean normalized, int width, int height)
+            throws ItsException {
+        try {
+            // Returns [x0,y0,x1,y1,wgt,  x0,y0,x1,y1,wgt,  x0,y0,x1,y1,wgt,  ...]
+            assert(a.length() % 5 == 0);
+            MeteringRectangle[] ma = new MeteringRectangle[a.length() / 5];
+            for (int i = 0; i < a.length(); i += 5) {
+                int x,y,w,h;
+                if (normalized) {
+                    x = (int)Math.floor(a.getDouble(i+0) * width + 0.5f);
+                    y = (int)Math.floor(a.getDouble(i+1) * height + 0.5f);
+                    w = (int)Math.floor(a.getDouble(i+2) * width + 0.5f);
+                    h = (int)Math.floor(a.getDouble(i+3) * height + 0.5f);
+                } else {
+                    x = a.getInt(i+0);
+                    y = a.getInt(i+1);
+                    w = a.getInt(i+2);
+                    h = a.getInt(i+3);
+                }
+                x = Math.max(x, 0);
+                y = Math.max(y, 0);
+                w = Math.min(w, width-x);
+                h = Math.min(h, height-y);
+                int wgt = a.getInt(i+4);
+                ma[i/5] = new MeteringRectangle(x,y,w,h,wgt);
+            }
+            return ma;
+        } catch (org.json.JSONException e) {
+            throw new ItsException("JSON error: ", e);
+        }
+    }
+
+    public static JSONArray getOutputSpecs(JSONObject jsonObjTop)
+            throws ItsException {
+        try {
+            if (jsonObjTop.has("outputSurfaces")) {
+                return jsonObjTop.getJSONArray("outputSurfaces");
+            }
+            return null;
+        } catch (org.json.JSONException e) {
+            throw new ItsException("JSON error: ", e);
+        }
+    }
+
+    public static Size[] getRawOutputSizes(CameraCharacteristics ccs)
+            throws ItsException {
+        return getOutputSizes(ccs, ImageFormat.RAW_SENSOR);
+    }
+
+    public static Size[] getJpegOutputSizes(CameraCharacteristics ccs)
+            throws ItsException {
+        return getOutputSizes(ccs, ImageFormat.JPEG);
+    }
+
+    public static Size[] getYuvOutputSizes(CameraCharacteristics ccs)
+            throws ItsException {
+        return getOutputSizes(ccs, ImageFormat.YUV_420_888);
+    }
+
+    private static Size[] getOutputSizes(CameraCharacteristics ccs, int format)
+            throws ItsException {
+        StreamConfigurationMap configMap = ccs.get(
+                CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+        if (configMap == null) {
+            throw new ItsException("Failed to get stream config");
+        }
+        return configMap.getOutputSizes(format);
+    }
+
+    public static byte[] getDataFromImage(Image image)
+            throws ItsException {
+        int format = image.getFormat();
+        int width = image.getWidth();
+        int height = image.getHeight();
+        byte[] data = null;
+
+        // Read image data
+        Plane[] planes = image.getPlanes();
+
+        // Check image validity
+        if (!checkAndroidImageFormat(image)) {
+            throw new ItsException(
+                    "Invalid image format passed to getDataFromImage: " + image.getFormat());
+        }
+
+        if (format == ImageFormat.JPEG) {
+            // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer.
+            ByteBuffer buffer = planes[0].getBuffer();
+            data = new byte[buffer.capacity()];
+            buffer.get(data);
+            return data;
+        } else if (format == ImageFormat.YUV_420_888 || format == ImageFormat.RAW_SENSOR
+                || format == ImageFormat.RAW10) {
+            int offset = 0;
+            data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
+            int maxRowSize = planes[0].getRowStride();
+            for (int i = 0; i < planes.length; i++) {
+                if (maxRowSize < planes[i].getRowStride()) {
+                    maxRowSize = planes[i].getRowStride();
+                }
+            }
+            byte[] rowData = new byte[maxRowSize];
+            for (int i = 0; i < planes.length; i++) {
+                ByteBuffer buffer = planes[i].getBuffer();
+                int rowStride = planes[i].getRowStride();
+                int pixelStride = planes[i].getPixelStride();
+                int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8;
+                Logt.i(TAG, String.format(
+                        "Reading image: fmt %d, plane %d, w %d, h %d, rowStride %d, pixStride %d",
+                        format, i, width, height, rowStride, pixelStride));
+                // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling.
+                int w = (i == 0) ? width : width / 2;
+                int h = (i == 0) ? height : height / 2;
+                for (int row = 0; row < h; row++) {
+                    if (pixelStride == bytesPerPixel) {
+                        // Special case: optimized read of the entire row
+                        int length = w * bytesPerPixel;
+                        buffer.get(data, offset, length);
+                        // Advance buffer the remainder of the row stride
+                        buffer.position(buffer.position() + rowStride - length);
+                        offset += length;
+                    } else {
+                        // Generic case: should work for any pixelStride but slower.
+                        // Use intermediate buffer to avoid read byte-by-byte from
+                        // DirectByteBuffer, which is very bad for performance.
+                        // Also need avoid access out of bound by only reading the available
+                        // bytes in the bytebuffer.
+                        int readSize = rowStride;
+                        if (buffer.remaining() < readSize) {
+                            readSize = buffer.remaining();
+                        }
+                        buffer.get(rowData, 0, readSize);
+                        if (pixelStride >= 1) {
+                            for (int col = 0; col < w; col++) {
+                                data[offset++] = rowData[col * pixelStride];
+                            }
+                        } else {
+                            // PixelStride of 0 can mean pixel isn't a multiple of 8 bits, for
+                            // example with RAW10. Just copy the buffer, dropping any padding at
+                            // the end of the row.
+                            int length = (w * ImageFormat.getBitsPerPixel(format)) / 8;
+                            System.arraycopy(rowData,0,data,offset,length);
+                            offset += length;
+                        }
+                    }
+                }
+            }
+            Logt.i(TAG, String.format("Done reading image, format %d", format));
+            return data;
+        } else {
+            throw new ItsException("Unsupported image format: " + format);
+        }
+    }
+
+    private static boolean checkAndroidImageFormat(Image image) {
+        int format = image.getFormat();
+        Plane[] planes = image.getPlanes();
+        switch (format) {
+            case ImageFormat.YUV_420_888:
+            case ImageFormat.NV21:
+            case ImageFormat.YV12:
+                return 3 == planes.length;
+            case ImageFormat.RAW_SENSOR:
+            case ImageFormat.RAW10:
+            case ImageFormat.JPEG:
+                return 1 == planes.length;
+            default:
+                return false;
+        }
+    }
+}
+
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/Logt.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/Logt.java
new file mode 100644
index 0000000..852a1ce
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/Logt.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.camera.its;
+
+import android.util.Log;
+
+public class Logt {
+    public static void i(String tag, String msg) {
+        long t = android.os.SystemClock.elapsedRealtime();
+        Log.i(tag, String.format("[%d] %s", t, msg));
+    }
+    public static void e(String tag, String msg) {
+        long t = android.os.SystemClock.elapsedRealtime();
+        Log.e(tag, String.format("[%d] %s", t, msg));
+    }
+    public static void w(String tag, String msg) {
+        long t = android.os.SystemClock.elapsedRealtime();
+        Log.w(tag, String.format("[%d] %s", t, msg));
+    }
+    public static void e(String tag, String msg, Throwable tr) {
+        long t = android.os.SystemClock.elapsedRealtime();
+        Log.e(tag, String.format("[%d] %s", t, msg), tr);
+    }
+}
+
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/features/FeatureSummaryActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/features/FeatureSummaryActivity.java
index 74a5317..c0895d7 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/features/FeatureSummaryActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/features/FeatureSummaryActivity.java
@@ -238,6 +238,7 @@
         // features
         boolean hasWifi = false;
         boolean hasTelephony = false;
+        boolean hasBluetooth = false;
         boolean hasIllegalFeature = false;
 
         // get list of all features device thinks it has, & store in a HashMap
@@ -304,6 +305,7 @@
                 // device reports it -- yay! set the happy icon
                 hasWifi = hasWifi || PackageManager.FEATURE_WIFI.equals(f.name);
                 hasTelephony = hasTelephony || PackageManager.FEATURE_TELEPHONY.equals(f.name);
+                hasBluetooth = hasBluetooth || PackageManager.FEATURE_BLUETOOTH.equals(f.name);
                 statusIcon = R.drawable.fs_good;
                 actualFeatures.remove(f.name);
             } else if (!present && f.required) {
@@ -388,9 +390,11 @@
         if (hasIllegalFeature) {
             sb.append(getResources().getString(R.string.fs_disallowed)).append("\n");
         }
-        if (!hasWifi && !hasTelephony) {
+
+        if (!hasWifi && !hasTelephony && !hasBluetooth) {
             sb.append(getResources().getString(R.string.fs_missing_wifi_telephony)).append("\n");
         }
+
         String warnings = sb.toString().trim();
         if (warnings == null || "".equals(warnings)) {
             ((TextView) (findViewById(R.id.fs_warnings))).setVisibility(View.GONE);
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/ByodFlowTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/ByodFlowTestActivity.java
index da823e8..6a9de44 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/ByodFlowTestActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/ByodFlowTestActivity.java
@@ -69,7 +69,6 @@
     protected DevicePolicyManager mDevicePolicyManager;
 
     private TestItem mProfileOwnerInstalled;
-    private TestItem mDiskEncryptionTest;
     private TestItem mProfileVisibleTest;
     private TestItem mDeviceAdminVisibleTest;
     private TestItem mWorkAppVisibleTest;
@@ -155,13 +154,6 @@
             }
         };
 
-        mDiskEncryptionTest = new TestItem(this, R.string.provisioning_byod_diskencryption) {
-            @Override
-            public TestResult getPassFailState() {
-                return isDeviceEncrypted() ? TestResult.Passed : TestResult.Failed;
-            }
-        };
-
         mProfileVisibleTest = new TestItem(this, R.string.provisioning_byod_profile_visible,
                 R.string.provisioning_byod_profile_visible_instruction,
                 new Intent(Settings.ACTION_SETTINGS));
@@ -181,7 +173,6 @@
                 R.string.provisioning_byod_cross_profile_instruction,
                 chooser);
 
-        mTests.add(mDiskEncryptionTest);
         mTests.add(mProfileOwnerInstalled);
         mTests.add(mProfileVisibleTest);
         mTests.add(mDeviceAdminVisibleTest);
@@ -284,11 +275,6 @@
                 PackageManager.DONT_KILL_APP);
     }
 
-    private boolean isDeviceEncrypted() {
-        return mDevicePolicyManager.getStorageEncryptionStatus()
-                == DevicePolicyManager.ENCRYPTION_STATUS_ACTIVE;
-    }
-
     private void showToast(int messageId) {
         String message = getString(messageId);
         Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/nfc/hce/HceReaderTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/nfc/hce/HceReaderTestActivity.java
index f628fb7..4c77871 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/nfc/hce/HceReaderTestActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/nfc/hce/HceReaderTestActivity.java
@@ -16,6 +16,8 @@
 
 package com.android.cts.verifier.nfc.hce;
 
+import android.nfc.NfcAdapter;
+import android.nfc.cardemulation.CardEmulation;
 import com.android.cts.verifier.ArrayTestListAdapter;
 import com.android.cts.verifier.PassFailButtons;
 import com.android.cts.verifier.R;
@@ -101,21 +103,25 @@
                     SimpleReaderActivity.class.getName(),
                     DynamicAidEmulatorActivity.buildReaderIntent(this), null));
 
-            adapter.add(TestListItem.newTest(this, R.string.nfc_hce_payment_prefix_aids_reader,
-                    SimpleReaderActivity.class.getName(),
-                    PrefixPaymentEmulatorActivity.buildReaderIntent(this), null));
+            NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(this);
+            CardEmulation cardEmulation = CardEmulation.getInstance(nfcAdapter);
+            if (cardEmulation.supportsAidPrefixRegistration()) {
+                adapter.add(TestListItem.newTest(this, R.string.nfc_hce_payment_prefix_aids_reader,
+                        SimpleReaderActivity.class.getName(),
+                        PrefixPaymentEmulatorActivity.buildReaderIntent(this), null));
 
-            adapter.add(TestListItem.newTest(this, R.string.nfc_hce_payment_prefix_aids_reader_2,
-                    SimpleReaderActivity.class.getName(),
-                    PrefixPaymentEmulator2Activity.buildReaderIntent(this), null));
+                adapter.add(TestListItem.newTest(this, R.string.nfc_hce_payment_prefix_aids_reader_2,
+                        SimpleReaderActivity.class.getName(),
+                        PrefixPaymentEmulator2Activity.buildReaderIntent(this), null));
 
-            adapter.add(TestListItem.newTest(this, R.string.nfc_hce_other_prefix_aids_reader,
-                    SimpleReaderActivity.class.getName(),
-                    DualNonPaymentPrefixEmulatorActivity.buildReaderIntent(this), null));
+                adapter.add(TestListItem.newTest(this, R.string.nfc_hce_other_prefix_aids_reader,
+                        SimpleReaderActivity.class.getName(),
+                        DualNonPaymentPrefixEmulatorActivity.buildReaderIntent(this), null));
 
-            adapter.add(TestListItem.newTest(this, R.string.nfc_hce_other_conflicting_prefix_aids_reader,
-                    SimpleReaderActivity.class.getName(),
-                    ConflictingNonPaymentPrefixEmulatorActivity.buildReaderIntent(this), null));
+                adapter.add(TestListItem.newTest(this, R.string.nfc_hce_other_conflicting_prefix_aids_reader,
+                        SimpleReaderActivity.class.getName(),
+                        ConflictingNonPaymentPrefixEmulatorActivity.buildReaderIntent(this), null));
+            }
         }
 
         setTestListAdapter(adapter);
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/notifications/AttentionManagementVerifierActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/notifications/AttentionManagementVerifierActivity.java
new file mode 100644
index 0000000..d8f196a
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/notifications/AttentionManagementVerifierActivity.java
@@ -0,0 +1,931 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.notifications;
+
+import static com.android.cts.verifier.notifications.MockListener.JSON_AMBIENT;
+import static com.android.cts.verifier.notifications.MockListener.JSON_MATCHES_ZEN_FILTER;
+import static com.android.cts.verifier.notifications.MockListener.JSON_TAG;
+
+import android.app.Notification;
+import android.content.ContentProviderOperation;
+import android.content.OperationApplicationException;
+import android.database.Cursor;
+import android.net.Uri;
+import android.os.RemoteException;
+import android.provider.ContactsContract;
+import android.provider.ContactsContract.CommonDataKinds.Email;
+import android.provider.ContactsContract.CommonDataKinds.Phone;
+import android.provider.ContactsContract.CommonDataKinds.StructuredName;
+import android.service.notification.NotificationListenerService;
+import android.util.Log;
+import android.view.View;
+import android.view.ViewGroup;
+import com.android.cts.verifier.R;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+public class AttentionManagementVerifierActivity
+        extends InteractiveVerifierActivity {
+    private static final String TAG = "NoListenerAttentionVerifier";
+
+    private static final String ALICE = "Alice";
+    private static final String ALICE_PHONE = "+16175551212";
+    private static final String ALICE_EMAIL = "alice@_foo._bar";
+    private static final String BOB = "Bob";
+    private static final String BOB_PHONE = "+16505551212";;
+    private static final String BOB_EMAIL = "bob@_foo._bar";
+    private static final String CHARLIE = "Charlie";
+    private static final String CHARLIE_PHONE = "+13305551212";
+    private static final String CHARLIE_EMAIL = "charlie@_foo._bar";
+    private static final int MODE_NONE = 0;
+    private static final int MODE_URI = 1;
+    private static final int MODE_PHONE = 2;
+    private static final int MODE_EMAIL = 3;
+
+    private Uri mAliceUri;
+    private Uri mBobUri;
+    private Uri mCharlieUri;
+
+    @Override
+    int getTitleResource() {
+        return R.string.attention_test;
+    }
+
+    @Override
+    int getInstructionsResource() {
+        return R.string.attention_info;
+    }
+
+    // Test Setup
+
+    @Override
+    protected List<InteractiveTestCase> createTestItems() {
+        List<InteractiveTestCase> tests = new ArrayList<>(17);
+        tests.add(new IsEnabledTest());
+        tests.add(new ServiceStartedTest());
+        tests.add(new InsertContactsTest());
+        tests.add(new SetModeNoneTest());
+        tests.add(new NoneInterceptsAllTest());
+        tests.add(new SetModePriorityTest());
+        tests.add(new PriorityInterceptsSomeTest());
+        tests.add(new SetModeAllTest());
+        tests.add(new AllInterceptsNothingTest());
+        tests.add(new DefaultOrderTest());
+        tests.add(new PrioritytOrderTest());
+        tests.add(new InterruptionOrderTest());
+        tests.add(new AmbientBitsTest());
+        tests.add(new LookupUriOrderTest());
+        tests.add(new EmailOrderTest());
+        tests.add(new PhoneOrderTest());
+        tests.add(new DeleteContactsTest());
+        return tests;
+    }
+
+    // Tests
+
+    protected class InsertContactsTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.attention_create_contacts);
+        }
+
+        @Override
+        void setUp() {
+            insertSingleContact(ALICE, ALICE_PHONE, ALICE_EMAIL, true);
+            insertSingleContact(BOB, BOB_PHONE, BOB_EMAIL, false);
+            // charlie is not in contacts
+            status = READY;
+            // wait for insertions to move through the system
+            delay();
+        }
+
+        @Override
+        void test() {
+            mAliceUri = lookupContact(ALICE_PHONE);
+            mBobUri = lookupContact(BOB_PHONE);
+            mCharlieUri = lookupContact(CHARLIE_PHONE);
+
+            status = PASS;
+            if (mAliceUri == null) { status = FAIL; }
+            if (mBobUri == null) { status = FAIL; }
+            if (mCharlieUri != null) { status = FAIL; }
+            next();
+        }
+    }
+
+    protected class DeleteContactsTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.attention_delete_contacts);
+        }
+
+        @Override
+        void test() {
+            final ArrayList<ContentProviderOperation> operationList = new ArrayList<>();
+            operationList.add(ContentProviderOperation.newDelete(mAliceUri).build());
+            operationList.add(ContentProviderOperation.newDelete(mBobUri).build());
+            try {
+                mContext.getContentResolver().applyBatch(ContactsContract.AUTHORITY, operationList);
+                status = READY;
+            } catch (RemoteException e) {
+                Log.e(TAG, String.format("%s: %s", e.toString(), e.getMessage()));
+                status = FAIL;
+            } catch (OperationApplicationException e) {
+                Log.e(TAG, String.format("%s: %s", e.toString(), e.getMessage()));
+                status = FAIL;
+            }
+            status = PASS;
+            next();
+        }
+    }
+
+    protected class SetModeNoneTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createRetryItem(parent, R.string.attention_filter_none);
+        }
+
+        @Override
+        void test() {
+            MockListener.probeFilter(mContext,
+                    new MockListener.IntegerResultCatcher() {
+                        @Override
+                        public void accept(int mode) {
+                            if (mode == NotificationListenerService.INTERRUPTION_FILTER_NONE) {
+                                status = PASS;
+                                next();
+                            } else {
+                                Log.i("SetModeNoneTest", "waiting, current mode is: " + mode);
+                                status = WAIT_FOR_USER;
+                            }
+                        }
+                    });
+        }
+
+        @Override
+        void tearDown() {
+            mNm.cancelAll();
+            MockListener.resetListenerData(mContext);
+            delay();
+        }
+    }
+
+    protected class NoneInterceptsAllTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.attention_all_are_filtered);
+        }
+
+        @Override
+        void setUp() {
+            sendNotifications(MODE_URI, false, false);
+            status = READY;
+            // wait for notifications to move through the system
+            delay();
+        }
+
+        @Override
+        void test() {
+            MockListener.probeListenerPayloads(mContext,
+                    new MockListener.StringListResultCatcher() {
+                        @Override
+                        public void accept(List<String> result) {
+                            Set<String> found = new HashSet<String>();
+                            if (result == null || result.size() == 0) {
+                                status = FAIL;
+                                next();
+                                return;
+                            }
+                            boolean pass = true;
+                            for (String payloadData : result) {
+                                try {
+                                    JSONObject payload = new JSONObject(payloadData);
+                                    String tag = payload.getString(JSON_TAG);
+                                    boolean zen = payload.getBoolean(JSON_MATCHES_ZEN_FILTER);
+                                    Log.e(TAG, tag + (zen ? "" : " not") + " intercepted");
+                                    if (found.contains(tag)) {
+                                        // multiple entries for same notification!
+                                        pass = false;
+                                    } else if (ALICE.equals(tag)) {
+                                        found.add(ALICE);
+                                        pass &= !zen;
+                                    } else if (BOB.equals(tag)) {
+                                        found.add(BOB);
+                                        pass &= !zen;
+                                    } else if (CHARLIE.equals(tag)) {
+                                        found.add(CHARLIE);
+                                        pass &= !zen;
+                                    }
+                                } catch (JSONException e) {
+                                    pass = false;
+                                    Log.e(TAG, "failed to unpack data from mocklistener", e);
+                                }
+                            }
+                            pass &= found.size() == 3;
+                            status = pass ? PASS : FAIL;
+                            next();
+                        }
+                    });
+            delay();  // in case the catcher never returns
+        }
+
+        @Override
+        void tearDown() {
+            mNm.cancelAll();
+            MockListener.resetListenerData(mContext);
+            delay();
+        }
+
+    }
+
+    protected class SetModeAllTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createRetryItem(parent, R.string.attention_filter_all);
+        }
+
+        @Override
+        void test() {
+            MockListener.probeFilter(mContext,
+                    new MockListener.IntegerResultCatcher() {
+                        @Override
+                        public void accept(int mode) {
+                            if (mode == NotificationListenerService.INTERRUPTION_FILTER_ALL) {
+                                status = PASS;
+                                next();
+                            } else {
+                                Log.i("SetModeAllTest", "waiting, current mode is: " + mode);
+                                status = WAIT_FOR_USER;
+                            }
+                        }
+                    });
+        }
+    }
+
+    protected class AllInterceptsNothingTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.attention_none_are_filtered);
+        }
+
+        @Override
+        void setUp() {
+            sendNotifications(MODE_URI, false, false);
+            status = READY;
+            // wait for notifications to move through the system
+            delay();
+        }
+
+        @Override
+        void test() {
+            MockListener.probeListenerPayloads(mContext,
+                    new MockListener.StringListResultCatcher() {
+                        @Override
+                        public void accept(List<String> result) {
+                            Set<String> found = new HashSet<String>();
+                            if (result == null || result.size() == 0) {
+                                status = FAIL;
+                                return;
+                            }
+                            boolean pass = true;
+                            for (String payloadData : result) {
+                                try {
+                                    JSONObject payload = new JSONObject(payloadData);
+                                    String tag = payload.getString(JSON_TAG);
+                                    boolean zen = payload.getBoolean(JSON_MATCHES_ZEN_FILTER);
+                                    Log.e(TAG, tag + (zen ? "" : " not") + " intercepted");
+                                    if (found.contains(tag)) {
+                                        // multiple entries for same notification!
+                                        pass = false;
+                                    } else if (ALICE.equals(tag)) {
+                                        found.add(ALICE);
+                                        pass &= zen;
+                                    } else if (BOB.equals(tag)) {
+                                        found.add(BOB);
+                                        pass &= zen;
+                                    } else if (CHARLIE.equals(tag)) {
+                                        found.add(CHARLIE);
+                                        pass &= zen;
+                                    }
+                                } catch (JSONException e) {
+                                    pass = false;
+                                    Log.e(TAG, "failed to unpack data from mocklistener", e);
+                                }
+                            }
+                            pass &= found.size() == 3;
+                            status = pass ? PASS : FAIL;
+                            next();
+                        }
+                    });
+            delay();  // in case the catcher never returns
+        }
+
+        @Override
+        void tearDown() {
+            mNm.cancelAll();
+            MockListener.resetListenerData(mContext);
+            delay();
+        }
+    }
+
+    protected class SetModePriorityTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createRetryItem(parent, R.string.attention_filter_priority);
+        }
+
+        @Override
+        void test() {
+            MockListener.probeFilter(mContext,
+                    new MockListener.IntegerResultCatcher() {
+                        @Override
+                        public void accept(int mode) {
+                            if (mode == NotificationListenerService.INTERRUPTION_FILTER_PRIORITY) {
+                                status = PASS;
+                                next();
+                            } else {
+                                Log.i("SetModePriorityTest", "waiting, current mode is: " + mode);
+                                status = WAIT_FOR_USER;
+                            }
+                        }
+                    });
+        }
+    }
+
+    protected class PriorityInterceptsSomeTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.attention_some_are_filtered);
+        }
+
+        @Override
+        void setUp() {
+            sendNotifications(MODE_URI, false, false);
+            status = READY;
+            // wait for notifications to move through the system
+            delay();
+        }
+
+        @Override
+        void test() {
+            MockListener.probeListenerPayloads(mContext,
+                    new MockListener.StringListResultCatcher() {
+                        @Override
+                        public void accept(List<String> result) {
+                            Set<String> found = new HashSet<String>();
+                            if (result == null || result.size() == 0) {
+                                status = FAIL;
+                                return;
+                            }
+                            boolean pass = true;
+                            for (String payloadData : result) {
+                                try {
+                                    JSONObject payload = new JSONObject(payloadData);
+                                    String tag = payload.getString(JSON_TAG);
+                                    boolean zen = payload.getBoolean(JSON_MATCHES_ZEN_FILTER);
+                                    Log.e(TAG, tag + (zen ? "" : " not") + " intercepted");
+                                    if (found.contains(tag)) {
+                                        // multiple entries for same notification!
+                                        pass = false;
+                                    } else if (ALICE.equals(tag)) {
+                                        found.add(ALICE);
+                                        pass &= zen;
+                                    } else if (BOB.equals(tag)) {
+                                        found.add(BOB);
+                                        pass &= !zen;
+                                    } else if (CHARLIE.equals(tag)) {
+                                        found.add(CHARLIE);
+                                        pass &= !zen;
+                                    }
+                                } catch (JSONException e) {
+                                    pass = false;
+                                    Log.e(TAG, "failed to unpack data from mocklistener", e);
+                                }
+                            }
+                            pass &= found.size() == 3;
+                            status = pass ? PASS : FAIL;
+                            next();
+                        }
+                    });
+            delay();  // in case the catcher never returns
+        }
+
+        @Override
+        void tearDown() {
+            mNm.cancelAll();
+            MockListener.resetListenerData(mContext);
+            delay();
+        }
+    }
+
+    // ordered by time: C, B, A
+    protected class DefaultOrderTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.attention_default_order);
+        }
+
+        @Override
+        void setUp() {
+            sendNotifications(MODE_NONE, false, false);
+            status = READY;
+            // wait for notifications to move through the system
+            delay();
+        }
+
+        @Override
+        void test() {
+            MockListener.probeListenerOrder(mContext,
+                    new MockListener.StringListResultCatcher() {
+                        @Override
+                        public void accept(List<String> orderedKeys) {
+                            int rankA = findTagInKeys(ALICE, orderedKeys);
+                            int rankB = findTagInKeys(BOB, orderedKeys);
+                            int rankC = findTagInKeys(CHARLIE, orderedKeys);
+                            if (rankC < rankB && rankB < rankA) {
+                                status = PASS;
+                            } else {
+                                logFail(rankA + ", " + rankB + ", " + rankC);
+                                status = FAIL;
+                            }
+                            next();
+                        }
+                    });
+            delay();  // in case the catcher never returns
+        }
+
+        @Override
+        void tearDown() {
+            mNm.cancelAll();
+            MockListener.resetListenerData(mContext);
+            delay();
+        }
+    }
+
+    // ordered by priority: B, C, A
+    protected class PrioritytOrderTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.attention_priority_order);
+        }
+
+        @Override
+        void setUp() {
+            sendNotifications(MODE_NONE, true, false);
+            status = READY;
+            // wait for notifications to move through the system
+            delay();
+        }
+
+        @Override
+        void test() {
+            MockListener.probeListenerOrder(mContext,
+                    new MockListener.StringListResultCatcher() {
+                        @Override
+                        public void accept(List<String> orderedKeys) {
+                            int rankA = findTagInKeys(ALICE, orderedKeys);
+                            int rankB = findTagInKeys(BOB, orderedKeys);
+                            int rankC = findTagInKeys(CHARLIE, orderedKeys);
+                            if (rankB < rankC && rankC < rankA) {
+                                status = PASS;
+                            } else {
+                                logFail(rankA + ", " + rankB + ", " + rankC);
+                                status = FAIL;
+                            }
+                            next();
+                        }
+                    });
+            delay();  // in case the catcher never returns
+        }
+
+        @Override
+        void tearDown() {
+            mNm.cancelAll();
+            MockListener.resetListenerData(mContext);
+            delay();
+        }
+    }
+
+    // A starts at the top then falls to the bottom
+    protected class InterruptionOrderTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.attention_interruption_order);
+        }
+
+        @Override
+        void setUp() {
+            sendNotifications(MODE_NONE, false, true);
+            status = READY;
+            // wait for notifications to move through the system
+            delay();
+        }
+
+        @Override
+        void test() {
+            if (status == READY) {
+                MockListener.probeListenerOrder(mContext,
+                        new MockListener.StringListResultCatcher() {
+                            @Override
+                            public void accept(List<String> orderedKeys) {
+                                int rankA = findTagInKeys(ALICE, orderedKeys);
+                                int rankB = findTagInKeys(BOB, orderedKeys);
+                                int rankC = findTagInKeys(CHARLIE, orderedKeys);
+                                if (rankA < rankB && rankA < rankC) {
+                                    status = RETEST;
+                                    delay(12000);
+                                } else {
+                                    logFail("noisy notification did not sort to top.");
+                                    status = FAIL;
+                                    next();
+                                }
+                            }
+                        });
+                delay();  // in case the catcher never returns
+            } else {
+                MockListener.probeListenerOrder(mContext,
+                        new MockListener.StringListResultCatcher() {
+                            @Override
+                            public void accept(List<String> orderedKeys) {
+                                int rankA = findTagInKeys(ALICE, orderedKeys);
+                                int rankB = findTagInKeys(BOB, orderedKeys);
+                                int rankC = findTagInKeys(CHARLIE, orderedKeys);
+                                if (rankA > rankB && rankA > rankC) {
+                                    status = PASS;
+                                } else {
+                                    logFail("noisy notification did not fade back into the list.");
+                                    status = FAIL;
+                                }
+                                next();
+                            }
+                        });
+                delay();  // in case the catcher never returns
+            }
+        }
+
+        @Override
+        void tearDown() {
+            mNm.cancelAll();
+            MockListener.resetListenerData(mContext);
+            delay();
+        }
+    }
+
+    // B & C above the fold, A below
+    protected class AmbientBitsTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.attention_ambient_bit);
+        }
+
+        @Override
+        void setUp() {
+            sendNotifications(MODE_NONE, true, false);
+            status = READY;
+            // wait for notifications to move through the system
+            delay();
+        }
+
+        @Override
+        void test() {
+            MockListener.probeListenerPayloads(mContext,
+                    new MockListener.StringListResultCatcher() {
+                        @Override
+                        public void accept(List<String> result) {
+                            Set<String> found = new HashSet<String>();
+                            if (result == null || result.size() == 0) {
+                                status = FAIL;
+                                return;
+                            }
+                            boolean pass = true;
+                            for (String payloadData : result) {
+                                try {
+                                    JSONObject payload = new JSONObject(payloadData);
+                                    String tag = payload.getString(JSON_TAG);
+                                    boolean ambient = payload.getBoolean(JSON_AMBIENT);
+                                    Log.e(TAG, tag + (ambient ? " is" : " isn't") + " ambient");
+                                    if (found.contains(tag)) {
+                                        // multiple entries for same notification!
+                                        pass = false;
+                                    } else if (ALICE.equals(tag)) {
+                                        found.add(ALICE);
+                                        pass &= ambient;
+                                    } else if (BOB.equals(tag)) {
+                                        found.add(BOB);
+                                        pass &= !ambient;
+                                    } else if (CHARLIE.equals(tag)) {
+                                        found.add(CHARLIE);
+                                        pass &= !ambient;
+                                    }
+                                } catch (JSONException e) {
+                                    pass = false;
+                                    Log.e(TAG, "failed to unpack data from mocklistener", e);
+                                }
+                            }
+                            pass &= found.size() == 3;
+                            status = pass ? PASS : FAIL;
+                            next();
+                        }
+                    });
+            delay();  // in case the catcher never returns
+        }
+
+        @Override
+        void tearDown() {
+            mNm.cancelAll();
+            MockListener.resetListenerData(mContext);
+            delay();
+        }
+    }
+
+    // ordered by contact affinity: A, B, C
+    protected class LookupUriOrderTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.attention_lookup_order);
+        }
+
+        @Override
+        void setUp() {
+            sendNotifications(MODE_URI, false, false);
+            status = READY;
+            // wait for notifications to move through the system
+            delay();
+        }
+
+        @Override
+        void test() {
+            MockListener.probeListenerOrder(mContext,
+                    new MockListener.StringListResultCatcher() {
+                        @Override
+                        public void accept(List<String> orderedKeys) {
+                            int rankA = findTagInKeys(ALICE, orderedKeys);
+                            int rankB = findTagInKeys(BOB, orderedKeys);
+                            int rankC = findTagInKeys(CHARLIE, orderedKeys);
+                            if (rankA < rankB && rankB < rankC) {
+                                status = PASS;
+                            } else {
+                                logFail(rankA + ", " + rankB + ", " + rankC);
+                                status = FAIL;
+                            }
+                            next();
+                        }
+                    });
+            delay();  // in case the catcher never returns
+        }
+
+        @Override
+        void tearDown() {
+            mNm.cancelAll();
+            MockListener.resetListenerData(mContext);
+            delay();
+        }
+    }
+
+    // ordered by contact affinity: A, B, C
+    protected class EmailOrderTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.attention_email_order);
+        }
+
+        @Override
+        void setUp() {
+            sendNotifications(MODE_EMAIL, false, false);
+            status = READY;
+            // wait for notifications to move through the system
+            delay();
+        }
+
+        @Override
+        void test() {
+            MockListener.probeListenerOrder(mContext,
+                    new MockListener.StringListResultCatcher() {
+                        @Override
+                        public void accept(List<String> orderedKeys) {
+                            int rankA = findTagInKeys(ALICE, orderedKeys);
+                            int rankB = findTagInKeys(BOB, orderedKeys);
+                            int rankC = findTagInKeys(CHARLIE, orderedKeys);
+                            if (rankA < rankB && rankB < rankC) {
+                                status = PASS;
+                            } else {
+                                logFail(rankA + ", " + rankB + ", " + rankC);
+                                status = FAIL;
+                            }
+                            next();
+                        }
+                    });
+            delay();  // in case the catcher never returns
+        }
+
+        @Override
+        void tearDown() {
+            mNm.cancelAll();
+            MockListener.resetListenerData(mContext);
+            delay();
+        }
+    }
+
+    // ordered by contact affinity: A, B, C
+    protected class PhoneOrderTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.attention_phone_order);
+        }
+
+        @Override
+        void setUp() {
+            sendNotifications(MODE_PHONE, false, false);
+            status = READY;
+            // wait for notifications to move through the system
+            delay();
+        }
+
+        @Override
+        void test() {
+            MockListener.probeListenerOrder(mContext,
+                    new MockListener.StringListResultCatcher() {
+                        @Override
+                        public void accept(List<String> orderedKeys) {
+                            int rankA = findTagInKeys(ALICE, orderedKeys);
+                            int rankB = findTagInKeys(BOB, orderedKeys);
+                            int rankC = findTagInKeys(CHARLIE, orderedKeys);
+                            if (rankA < rankB && rankB < rankC) {
+                                status = PASS;
+                            } else {
+                                logFail(rankA + ", " + rankB + ", " + rankC);
+                                status = FAIL;
+                            }
+                            next();
+                        }
+                    });
+            delay();  // in case the catcher never returns
+        }
+
+        @Override
+        void tearDown() {
+            mNm.cancelAll();
+            MockListener.resetListenerData(mContext);
+            delay();
+        }
+    }
+
+    // Utilities
+
+    // usePriorities true: B, C, A
+    // usePriorities false:
+    //   MODE_NONE: C, B, A
+    //   otherwise: A, B ,C
+    private void sendNotifications(int annotationMode, boolean usePriorities, boolean noisy) {
+        // TODO(cwren) Fixes flakey tests due to bug 17644321. Remove this line when it is fixed.
+        int baseId = NOTIFICATION_ID + (noisy ? 3 : 0);
+
+        // C, B, A when sorted by time.  Times must be in the past.
+        long whenA = System.currentTimeMillis() - 4000000L;
+        long whenB = System.currentTimeMillis() - 2000000L;
+        long whenC = System.currentTimeMillis() - 1000000L;
+
+        // B, C, A when sorted by priorities
+        int priorityA = usePriorities ? Notification.PRIORITY_MIN : Notification.PRIORITY_DEFAULT;
+        int priorityB = usePriorities ? Notification.PRIORITY_MAX : Notification.PRIORITY_DEFAULT;
+        int priorityC = usePriorities ? Notification.PRIORITY_LOW : Notification.PRIORITY_DEFAULT;
+
+        Notification.Builder alice = new Notification.Builder(mContext)
+                .setContentTitle(ALICE)
+                .setContentText(ALICE)
+                .setSmallIcon(R.drawable.ic_stat_alice)
+                .setPriority(priorityA)
+                .setCategory(Notification.CATEGORY_MESSAGE)
+                .setWhen(whenA);
+        alice.setDefaults(noisy ? Notification.DEFAULT_SOUND | Notification.DEFAULT_VIBRATE : 0);
+        addPerson(annotationMode, alice, mAliceUri, ALICE_PHONE, ALICE_EMAIL);
+        mNm.notify(ALICE, baseId + 1, alice.build());
+
+        Notification.Builder bob = new Notification.Builder(mContext)
+                .setContentTitle(BOB)
+                .setContentText(BOB)
+                .setSmallIcon(R.drawable.ic_stat_bob)
+                .setPriority(priorityB)
+                .setCategory(Notification.CATEGORY_MESSAGE)
+                .setWhen(whenB);
+        addPerson(annotationMode, bob, mBobUri, BOB_PHONE, BOB_EMAIL);
+        mNm.notify(BOB, baseId + 2, bob.build());
+
+        Notification.Builder charlie = new Notification.Builder(mContext)
+                .setContentTitle(CHARLIE)
+                .setContentText(CHARLIE)
+                .setSmallIcon(R.drawable.ic_stat_charlie)
+                .setPriority(priorityC)
+                .setCategory(Notification.CATEGORY_MESSAGE)
+                .setWhen(whenC);
+        addPerson(annotationMode, charlie, mCharlieUri, CHARLIE_PHONE, CHARLIE_EMAIL);
+        mNm.notify(CHARLIE, baseId + 3, charlie.build());
+    }
+
+    private void addPerson(int mode, Notification.Builder note,
+            Uri uri, String phone, String email) {
+        if (mode == MODE_URI && uri != null) {
+            note.addPerson(uri.toString());
+        } else if (mode == MODE_PHONE) {
+            note.addPerson(Uri.fromParts("tel", phone, null).toString());
+        } else if (mode == MODE_EMAIL) {
+            note.addPerson(Uri.fromParts("mailto", email, null).toString());
+        }
+    }
+
+    private void insertSingleContact(String name, String phone, String email, boolean starred) {
+        final ArrayList<ContentProviderOperation> operationList =
+                new ArrayList<ContentProviderOperation>();
+        ContentProviderOperation.Builder builder =
+                ContentProviderOperation.newInsert(ContactsContract.RawContacts.CONTENT_URI);
+        builder.withValue(ContactsContract.RawContacts.STARRED, starred ? 1 : 0);
+        operationList.add(builder.build());
+
+        builder = ContentProviderOperation.newInsert(ContactsContract.Data.CONTENT_URI);
+        builder.withValueBackReference(StructuredName.RAW_CONTACT_ID, 0);
+        builder.withValue(ContactsContract.Data.MIMETYPE, StructuredName.CONTENT_ITEM_TYPE);
+        builder.withValue(ContactsContract.CommonDataKinds.StructuredName.DISPLAY_NAME, name);
+        operationList.add(builder.build());
+
+        if (phone != null) {
+            builder = ContentProviderOperation.newInsert(ContactsContract.Data.CONTENT_URI);
+            builder.withValueBackReference(Phone.RAW_CONTACT_ID, 0);
+            builder.withValue(ContactsContract.Data.MIMETYPE, Phone.CONTENT_ITEM_TYPE);
+            builder.withValue(Phone.TYPE, Phone.TYPE_MOBILE);
+            builder.withValue(Phone.NUMBER, phone);
+            builder.withValue(ContactsContract.Data.IS_PRIMARY, 1);
+            operationList.add(builder.build());
+        }
+        if (email != null) {
+            builder = ContentProviderOperation.newInsert(ContactsContract.Data.CONTENT_URI);
+            builder.withValueBackReference(Email.RAW_CONTACT_ID, 0);
+            builder.withValue(ContactsContract.Data.MIMETYPE, Email.CONTENT_ITEM_TYPE);
+            builder.withValue(Email.TYPE, Email.TYPE_HOME);
+            builder.withValue(Email.DATA, email);
+            operationList.add(builder.build());
+        }
+
+        try {
+            mContext.getContentResolver().applyBatch(ContactsContract.AUTHORITY, operationList);
+        } catch (RemoteException e) {
+            Log.e(TAG, String.format("%s: %s", e.toString(), e.getMessage()));
+        } catch (OperationApplicationException e) {
+            Log.e(TAG, String.format("%s: %s", e.toString(), e.getMessage()));
+        }
+    }
+
+    private Uri lookupContact(String phone) {
+        Cursor c = null;
+        try {
+            Uri phoneUri = Uri.withAppendedPath(ContactsContract.PhoneLookup.CONTENT_FILTER_URI,
+                    Uri.encode(phone));
+            String[] projection = new String[] { ContactsContract.Contacts._ID,
+                    ContactsContract.Contacts.LOOKUP_KEY };
+            c = mContext.getContentResolver().query(phoneUri, projection, null, null, null);
+            if (c != null && c.getCount() > 0) {
+                c.moveToFirst();
+                int lookupIdx = c.getColumnIndex(ContactsContract.Contacts.LOOKUP_KEY);
+                int idIdx = c.getColumnIndex(ContactsContract.Contacts._ID);
+                String lookupKey = c.getString(lookupIdx);
+                long contactId = c.getLong(idIdx);
+                return ContactsContract.Contacts.getLookupUri(contactId, lookupKey);
+            }
+        } catch (Throwable t) {
+            Log.w(TAG, "Problem getting content resolver or performing contacts query.", t);
+        } finally {
+            if (c != null) {
+                c.close();
+            }
+        }
+        return null;
+    }
+
+    /** Search a list of notification keys for a givcen tag. */
+    private int findTagInKeys(String tag, List<String> orderedKeys) {
+        for (int i = 0; i < orderedKeys.size(); i++) {
+            if (orderedKeys.get(i).contains(tag)) {
+                return i;
+            }
+        }
+        return -1;
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/notifications/InteractiveVerifierActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/notifications/InteractiveVerifierActivity.java
new file mode 100644
index 0000000..d65af80
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/notifications/InteractiveVerifierActivity.java
@@ -0,0 +1,447 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.notifications;
+
+import android.annotation.SuppressLint;
+import android.app.Activity;
+import android.app.Notification;
+import android.app.NotificationManager;
+import android.app.PendingIntent;
+import android.app.Service;
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.content.pm.PackageManager;
+import android.os.Bundle;
+import android.os.IBinder;
+import android.provider.Settings.Secure;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.Button;
+import android.widget.ImageView;
+import android.widget.TextView;
+import com.android.cts.verifier.PassFailButtons;
+import com.android.cts.verifier.R;
+import com.android.cts.verifier.nfc.TagVerifierActivity;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.UUID;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import static com.android.cts.verifier.notifications.MockListener.*;
+
+public abstract class InteractiveVerifierActivity extends PassFailButtons.Activity
+        implements Runnable {
+    private static final String TAG = "InteractiveVerifier";
+    private static final String STATE = "state";
+    private static final String STATUS = "status";
+    private static LinkedBlockingQueue<String> sDeletedQueue = new LinkedBlockingQueue<String>();
+    protected static final String LISTENER_PATH = "com.android.cts.verifier/" +
+            "com.android.cts.verifier.notifications.MockListener";
+    protected static final int SETUP = 0;
+    protected static final int READY = 1;
+    protected static final int RETEST = 2;
+    protected static final int PASS = 3;
+    protected static final int FAIL = 4;
+    protected static final int WAIT_FOR_USER = 5;
+
+    protected static final int NOTIFICATION_ID = 1001;
+
+    // TODO remove these once b/10023397 is fixed
+    public static final String ENABLED_NOTIFICATION_LISTENERS = "enabled_notification_listeners";
+    public static final String NOTIFICATION_LISTENER_SETTINGS =
+            "android.settings.ACTION_NOTIFICATION_LISTENER_SETTINGS";
+
+    protected InteractiveTestCase mCurrentTest;
+    protected PackageManager mPackageManager;
+    protected NotificationManager mNm;
+    protected Context mContext;
+    protected Runnable mRunner;
+    protected View mHandler;
+    protected String mPackageString;
+
+    private LayoutInflater mInflater;
+    private ViewGroup mItemList;
+    private List<InteractiveTestCase> mTestList;
+    private Iterator<InteractiveTestCase> mTestOrder;
+
+    public static class DismissService extends Service {
+        @Override
+        public IBinder onBind(Intent intent) {
+            return null;
+        }
+
+        @Override
+        public void onStart(Intent intent, int startId) {
+            if(intent != null) { sDeletedQueue.offer(intent.getAction()); }
+        }
+    }
+
+    protected abstract class InteractiveTestCase {
+        int status;
+        private View view;
+
+        abstract View inflate(ViewGroup parent);
+        View getView(ViewGroup parent) {
+            if (view == null) {
+                view = inflate(parent);
+            }
+            return view;
+        }
+
+        /** @return true if the test should re-run when the test activity starts. */
+        boolean autoStart() {
+            return false;
+        }
+
+        /** Set status to {@link #READY} to proceed, or {@link #SETUP} to try again. */
+        void setUp() { status = READY; next(); };
+
+        /** Set status to {@link #PASS} or @{link #FAIL} to proceed, or {@link #READY} to retry. */
+        void test() { status = FAIL; next(); };
+
+        /** Do not modify status. */
+        void tearDown() { next(); };
+
+        protected void logFail() {
+            logFail(null);
+        }
+
+        protected void logFail(String message) {
+            logWithStack("failed " + this.getClass().getSimpleName() +
+                    ((message == null) ? "" : ": " + message));
+        }
+    }
+
+    abstract int getTitleResource();
+    abstract int getInstructionsResource();
+
+    protected void onCreate(Bundle savedState) {
+        super.onCreate(savedState);
+        int savedStateIndex = (savedState == null) ? 0 : savedState.getInt(STATE, 0);
+        int savedStatus = (savedState == null) ? SETUP : savedState.getInt(STATUS, SETUP);
+        Log.i(TAG, "restored state(" + savedStateIndex + "}, status(" + savedStatus + ")");
+        mContext = this;
+        mRunner = this;
+        mNm = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
+        mPackageManager = getPackageManager();
+        mInflater = getLayoutInflater();
+        View view = mInflater.inflate(R.layout.nls_main, null);
+        mItemList = (ViewGroup) view.findViewById(R.id.nls_test_items);
+        mHandler = mItemList;
+        mTestList = new ArrayList<>();
+        mTestList.addAll(createTestItems());
+        for (InteractiveTestCase test: mTestList) {
+            mItemList.addView(test.getView(mItemList));
+        }
+        mTestOrder = mTestList.iterator();
+        for (int i = 0; i < savedStateIndex; i++) {
+            mCurrentTest = mTestOrder.next();
+            mCurrentTest.status = PASS;
+        }
+        mCurrentTest = mTestOrder.next();
+        mCurrentTest.status = savedStatus;
+
+        setContentView(view);
+        setPassFailButtonClickListeners();
+        getPassButton().setEnabled(false);
+
+        setInfoResources(getTitleResource(), getInstructionsResource(), -1);
+    }
+
+    @Override
+    protected void onSaveInstanceState (Bundle outState) {
+        final int stateIndex = mTestList.indexOf(mCurrentTest);
+        outState.putInt(STATE, stateIndex);
+        outState.putInt(STATUS, mCurrentTest.status);
+        Log.i(TAG, "saved state(" + stateIndex + "}, status(" + (mCurrentTest.status) + ")");
+    }
+
+    @Override
+    protected void onResume() {
+        super.onResume();
+        if (mCurrentTest.autoStart()) {
+            mCurrentTest.status = READY;
+        }
+        next();
+    }
+
+    // Interface Utilities
+
+    protected void markItem(InteractiveTestCase test) {
+        if (test == null) { return; }
+        View item = test.view;
+        ImageView status = (ImageView) item.findViewById(R.id.nls_status);
+        View button = item.findViewById(R.id.nls_action_button);
+        switch (test.status) {
+            case WAIT_FOR_USER:
+                status.setImageResource(R.drawable.fs_warning);
+                break;
+
+            case SETUP:
+            case READY:
+            case RETEST:
+                status.setImageResource(R.drawable.fs_clock);
+                break;
+
+            case FAIL:
+                status.setImageResource(R.drawable.fs_error);
+                button.setClickable(false);
+                button.setEnabled(false);
+                break;
+
+            case PASS:
+                status.setImageResource(R.drawable.fs_good);
+                button.setClickable(false);
+                button.setEnabled(false);
+                break;
+
+        }
+        status.invalidate();
+    }
+
+    protected View createNlsSettingsItem(ViewGroup parent, int messageId) {
+        return createUserItem(parent, messageId, R.string.nls_start_settings);
+    }
+
+    protected View createRetryItem(ViewGroup parent, int messageId) {
+        return createUserItem(parent, messageId, R.string.attention_ready);
+    }
+
+    protected View createUserItem(ViewGroup parent, int messageId, int actionId) {
+        View item = mInflater.inflate(R.layout.nls_item, parent, false);
+        TextView instructions = (TextView) item.findViewById(R.id.nls_instructions);
+        instructions.setText(messageId);
+        Button button = (Button) item.findViewById(R.id.nls_action_button);
+        button.setText(actionId);
+        button.setTag(actionId);
+        return item;
+    }
+
+    protected View  createAutoItem(ViewGroup parent, int stringId) {
+        View item = mInflater.inflate(R.layout.nls_item, parent, false);
+        TextView instructions = (TextView) item.findViewById(R.id.nls_instructions);
+        instructions.setText(stringId);
+        View button = item.findViewById(R.id.nls_action_button);
+        button.setVisibility(View.GONE);
+        return item;
+    }
+
+    // Test management
+
+    abstract protected List<InteractiveTestCase> createTestItems();
+
+    public void run() {
+        if (mCurrentTest == null) { return; }
+        markItem(mCurrentTest);
+        switch (mCurrentTest.status) {
+            case SETUP:
+                Log.i(TAG, "running setup for: " + mCurrentTest.getClass().getSimpleName());
+                mCurrentTest.setUp();
+                break;
+
+            case WAIT_FOR_USER:
+                Log.i(TAG, "waiting for user: " + mCurrentTest.getClass().getSimpleName());
+                break;
+
+            case READY:
+            case RETEST:
+                Log.i(TAG, "running test for: " + mCurrentTest.getClass().getSimpleName());
+                mCurrentTest.test();
+                break;
+
+            case FAIL:
+                Log.i(TAG, "FAIL: " + mCurrentTest.getClass().getSimpleName());
+                mCurrentTest = null;
+                break;
+
+            case PASS:
+                Log.i(TAG, "pass for: " + mCurrentTest.getClass().getSimpleName());
+                mCurrentTest.tearDown();
+                if (mTestOrder.hasNext()) {
+                    mCurrentTest = mTestOrder.next();
+                    Log.i(TAG, "next test is: " + mCurrentTest.getClass().getSimpleName());
+                } else {
+                    Log.i(TAG, "no more tests");
+                    mCurrentTest = null;
+                    getPassButton().setEnabled(true);
+                    mNm.cancelAll();
+                }
+                break;
+        }
+        markItem(mCurrentTest);
+    }
+
+    /**
+     * Return to the state machine to progress through the tests.
+     */
+    protected void next() {
+        mHandler.removeCallbacks(mRunner);
+        mHandler.post(mRunner);
+    }
+
+    /**
+     * Wait for things to settle before returning to the state machine.
+     */
+    protected void delay() {
+        delay(3000);
+    }
+
+    /**
+     * Wait for some time.
+     */
+    protected void delay(long waitTime) {
+        mHandler.removeCallbacks(mRunner);
+        mHandler.postDelayed(mRunner, waitTime);
+    }
+
+    // UI callbacks
+
+    public void launchSettings() {
+        startActivity(new Intent(NOTIFICATION_LISTENER_SETTINGS));
+    }
+
+    public void actionPressed(View v) {
+        Object tag = v.getTag();
+        if (tag instanceof Integer) {
+            int id = ((Integer) tag).intValue();
+            if (id == R.string.nls_start_settings) {
+                launchSettings();
+            } else if (id == R.string.attention_ready) {
+                mCurrentTest.status = READY;
+                next();
+            }
+        }
+    }
+
+    // Utilities
+
+    protected PendingIntent makeIntent(int code, String tag) {
+        Intent intent = new Intent(tag);
+        intent.setComponent(new ComponentName(mContext, DismissService.class));
+        PendingIntent pi = PendingIntent.getService(mContext, code, intent,
+                PendingIntent.FLAG_UPDATE_CURRENT);
+        return pi;
+    }
+
+    protected boolean checkEquals(long expected, long actual, String message) {
+        if (expected == actual) {
+            return true;
+        }
+        logWithStack(String.format(message, expected, actual));
+        return false;
+    }
+
+    protected boolean checkEquals(String expected, String actual, String message) {
+        if (expected.equals(actual)) {
+            return true;
+        }
+        logWithStack(String.format(message, expected, actual));
+        return false;
+    }
+
+    protected boolean checkFlagSet(int expected, int actual, String message) {
+        if ((expected & actual) != 0) {
+            return true;
+        }
+        logWithStack(String.format(message, expected, actual));
+        return false;
+    };
+
+    protected void logWithStack(String message) {
+        Throwable stackTrace = new Throwable();
+        stackTrace.fillInStackTrace();
+        Log.e(TAG, message, stackTrace);
+    }
+
+    // Common Tests: useful for the side-effects they generate
+
+    protected class IsEnabledTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createNlsSettingsItem(parent, R.string.nls_enable_service);
+        }
+
+        @Override
+        boolean autoStart() {
+            return true;
+        }
+
+        @Override
+        void test() {
+            Intent settings = new Intent(NOTIFICATION_LISTENER_SETTINGS);
+            if (settings.resolveActivity(mPackageManager) == null) {
+                logFail("no settings activity");
+                status = FAIL;
+            } else {
+                String listeners = Secure.getString(getContentResolver(),
+                        ENABLED_NOTIFICATION_LISTENERS);
+                if (listeners != null && listeners.contains(LISTENER_PATH)) {
+                    status = PASS;
+                } else {
+                    status = WAIT_FOR_USER;
+                }
+                next();
+            }
+        }
+
+        void tearDown() {
+            // wait for the service to start
+            delay();
+        }
+    }
+
+    protected class ServiceStartedTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.nls_service_started);
+        }
+
+        @Override
+        void test() {
+            MockListener.probeListenerStatus(mContext,
+                    new MockListener.StatusCatcher() {
+                        @Override
+                        public void accept(int result) {
+                            if (result == Activity.RESULT_OK) {
+                                status = PASS;
+                                next();
+                            } else {
+                                logFail();
+                                status = RETEST;
+                                delay();
+                            }
+                        }
+                    });
+            delay();  // in case the catcher never returns
+        }
+
+        @Override
+        void tearDown() {
+            MockListener.resetListenerData(mContext);
+            delay();
+        }
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/notifications/MockListener.java b/apps/CtsVerifier/src/com/android/cts/verifier/notifications/MockListener.java
index b4863fa..75eaebd 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/notifications/MockListener.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/notifications/MockListener.java
@@ -238,6 +238,7 @@
         Log.d(TAG, "removed: " + sbn.getTag());
         mRemoved.add(sbn.getTag());
         mNotifications.remove(sbn.getKey());
+        mNotificationKeys.remove(sbn.getTag());
         onNotificationRankingUpdate(rankingMap);
         mNotificationKeys.remove(sbn.getTag());
     }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/notifications/NotificationAttentionManagementVerifierActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/notifications/NotificationAttentionManagementVerifierActivity.java
deleted file mode 100644
index b4e348f..0000000
--- a/apps/CtsVerifier/src/com/android/cts/verifier/notifications/NotificationAttentionManagementVerifierActivity.java
+++ /dev/null
@@ -1,883 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.cts.verifier.notifications;
-
-import static com.android.cts.verifier.notifications.MockListener.JSON_AMBIENT;
-import static com.android.cts.verifier.notifications.MockListener.JSON_MATCHES_ZEN_FILTER;
-import static com.android.cts.verifier.notifications.MockListener.JSON_TAG;
-
-import android.app.Activity;
-import android.app.Notification;
-import android.content.ContentProviderOperation;
-import android.content.Intent;
-import android.content.OperationApplicationException;
-import android.database.Cursor;
-import android.net.Uri;
-import android.os.Bundle;
-import android.os.RemoteException;
-import android.provider.ContactsContract;
-import android.provider.ContactsContract.CommonDataKinds.Email;
-import android.provider.ContactsContract.CommonDataKinds.Phone;
-import android.provider.ContactsContract.CommonDataKinds.StructuredName;
-import android.provider.Settings.Secure;
-import android.service.notification.NotificationListenerService;
-import android.util.Log;
-import com.android.cts.verifier.R;
-import com.android.cts.verifier.nfc.TagVerifierActivity;
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-public class NotificationAttentionManagementVerifierActivity
-        extends NotificationListenerVerifierActivity {
-    private static final String TAG = TagVerifierActivity.class.getSimpleName();
-    private static final String ALICE = "Alice";
-    private static final String ALICE_PHONE = "+16175551212";
-    private static final String ALICE_EMAIL = "alice@_foo._bar";
-    private static final String BOB = "Bob";
-    private static final String BOB_PHONE = "+16505551212";;
-    private static final String BOB_EMAIL = "bob@_foo._bar";
-    private static final String CHARLIE = "Charlie";
-    private static final String CHARLIE_PHONE = "+13305551212";
-    private static final String CHARLIE_EMAIL = "charlie@_foo._bar";
-    private static final int MODE_NONE = 0;
-    private static final int MODE_URI = 1;
-    private static final int MODE_PHONE = 2;
-    private static final int MODE_EMAIL = 3;
-    private static final int DELAYED_SETUP = CLEARED;
-
-    private Uri mAliceUri;
-    private Uri mBobUri;
-    private Uri mCharlieUri;
-
-    @Override
-    protected void onCreate(Bundle savedInstanceState) {
-        super.onCreate(savedInstanceState, R.layout.nls_main);
-        setInfoResources(R.string.attention_test, R.string.attention_info, -1);
-    }
-
-    // Test Setup
-
-    @Override
-    protected void createTestItems() {
-        createNlsSettingsItem(R.string.nls_enable_service);
-        createAutoItem(R.string.nls_service_started);
-        createAutoItem(R.string.attention_create_contacts);
-        createRetryItem(R.string.attention_filter_none);
-        createAutoItem(R.string.attention_all_are_filtered);
-        createRetryItem(R.string.attention_filter_all);
-        createAutoItem(R.string.attention_none_are_filtered);
-        createAutoItem(R.string.attention_default_order);
-        createAutoItem(R.string.attention_interruption_order);
-        createAutoItem(R.string.attention_priority_order);
-        createAutoItem(R.string.attention_ambient_bit);
-        createAutoItem(R.string.attention_lookup_order);
-        createAutoItem(R.string.attention_email_order);
-        createAutoItem(R.string.attention_phone_order);
-        createRetryItem(R.string.attention_filter_priority);
-        createAutoItem(R.string.attention_some_are_filtered);
-        createAutoItem(R.string.attention_delete_contacts);
-    }
-
-    // Test management
-
-    @Override
-    protected void updateStateMachine() {
-        switch (mState) {
-            case 0:
-                testIsEnabled(mState);
-                break;
-            case 1:
-                testIsStarted(mState);
-                break;
-            case 2:
-                testInsertContacts(mState);
-                break;
-            case 3:
-                testModeNone(mState);
-                break;
-            case 4:
-                testNoneInterceptsAll(mState);
-                break;
-            case 5:
-                testModeAll(mState);
-                break;
-            case 6:
-                testAllInterceptsNothing(mState);
-                break;
-            case 7:
-                testDefaultOrder(mState);
-                break;
-            case 8:
-                testInterruptionOrder(mState);
-                break;
-            case 9:
-                testPrioritytOrder(mState);
-                break;
-            case 10:
-                testAmbientBits(mState);
-                break;
-            case 11:
-                testLookupUriOrder(mState);
-                break;
-            case 12:
-                testEmailOrder(mState);
-                break;
-            case 13:
-                testPhoneOrder(mState);
-                break;
-            case 14:
-                testModePriority(mState);
-                break;
-            case 15:
-                testPriorityInterceptsSome(mState);
-                break;
-            case 16:
-                testDeleteContacts(mState);
-                break;
-            case 17:
-                getPassButton().setEnabled(true);
-                mNm.cancelAll();
-                break;
-        }
-    }
-
-    // usePriorities true: B, C, A
-    // usePriorities false:
-    //   MODE_NONE: C, B, A
-    //   otherwise: A, B ,C
-    private void sendNotifications(int annotationMode, boolean usePriorities, boolean noisy) {
-        // TODO(cwren) Fixes flakey tests due to bug 17644321. Remove this line when it is fixed.
-        int baseId = NOTIFICATION_ID + (noisy ? 3 : 0);
-
-        // C, B, A when sorted by time.  Times must be in the past.
-        long whenA = System.currentTimeMillis() - 4000000L;
-        long whenB = System.currentTimeMillis() - 2000000L;
-        long whenC = System.currentTimeMillis() - 1000000L;
-
-        // B, C, A when sorted by priorities
-        int priorityA = usePriorities ? Notification.PRIORITY_MIN : Notification.PRIORITY_DEFAULT;
-        int priorityB = usePriorities ? Notification.PRIORITY_MAX : Notification.PRIORITY_DEFAULT;
-        int priorityC = usePriorities ? Notification.PRIORITY_LOW : Notification.PRIORITY_DEFAULT;
-
-        Notification.Builder alice = new Notification.Builder(mContext)
-                .setContentTitle(ALICE)
-                .setContentText(ALICE)
-                .setSmallIcon(R.drawable.fs_good)
-                .setPriority(priorityA)
-                .setCategory(Notification.CATEGORY_MESSAGE)
-                .setWhen(whenA);
-        alice.setDefaults(noisy ? Notification.DEFAULT_SOUND | Notification.DEFAULT_VIBRATE : 0);
-        addPerson(annotationMode, alice, mAliceUri, ALICE_PHONE, ALICE_EMAIL);
-        mNm.notify(ALICE, baseId + 1, alice.build());
-
-        Notification.Builder bob = new Notification.Builder(mContext)
-                .setContentTitle(BOB)
-                .setContentText(BOB)
-                .setSmallIcon(R.drawable.fs_warning)
-                .setPriority(priorityB)
-                .setCategory(Notification.CATEGORY_MESSAGE)
-                .setWhen(whenB);
-        addPerson(annotationMode, bob, mBobUri, BOB_PHONE, BOB_EMAIL);
-        mNm.notify(BOB, baseId + 2, bob.build());
-
-        Notification.Builder charlie = new Notification.Builder(mContext)
-                .setContentTitle(CHARLIE)
-                .setContentText(CHARLIE)
-                .setSmallIcon(R.drawable.fs_error)
-                .setPriority(priorityC)
-                .setCategory(Notification.CATEGORY_MESSAGE)
-                .setWhen(whenC);
-        addPerson(annotationMode, charlie, mCharlieUri, CHARLIE_PHONE, CHARLIE_EMAIL);
-        mNm.notify(CHARLIE, baseId + 3, charlie.build());
-    }
-
-    private void addPerson(int mode, Notification.Builder note,
-            Uri uri, String phone, String email) {
-        if (mode == MODE_URI && uri != null) {
-            note.addPerson(uri.toString());
-        } else if (mode == MODE_PHONE) {
-            note.addPerson(Uri.fromParts("tel", phone, null).toString());
-        } else if (mode == MODE_EMAIL) {
-            note.addPerson(Uri.fromParts("mailto", email, null).toString());
-        }
-    }
-
-    // Tests
-
-    private void testIsEnabled(int i) {
-        // no setup required
-        Intent settings = new Intent("android.settings.ACTION_NOTIFICATION_LISTENER_SETTINGS");
-        if (settings.resolveActivity(mPackageManager) == null) {
-            logWithStack("failed testIsEnabled: no settings activity");
-            mStatus[i] = FAIL;
-        } else {
-            // TODO: find out why Secure.ENABLED_NOTIFICATION_LISTENERS is hidden
-            String listeners = Secure.getString(getContentResolver(),
-                    "enabled_notification_listeners");
-            if (listeners != null && listeners.contains(LISTENER_PATH)) {
-                mStatus[i] = PASS;
-            } else {
-                mStatus[i] = WAIT_FOR_USER;
-            }
-        }
-        next();
-    }
-
-    private void testIsStarted(final int i) {
-        if (mStatus[i] == SETUP) {
-            mStatus[i] = READY;
-            // wait for the service to start
-            delay();
-        } else {
-            MockListener.probeListenerStatus(mContext,
-                    new MockListener.StatusCatcher() {
-                        @Override
-                        public void accept(int result) {
-                            if (result == Activity.RESULT_OK) {
-                                mStatus[i] = PASS;
-                            } else {
-                                logWithStack("failed testIsStarted: " + result);
-                                mStatus[i] = FAIL;
-                            }
-                            next();
-                        }
-                    });
-        }
-    }
-
-    private void testModeAll(final int i) {
-        if (mStatus[i] == READY || mStatus[i] == SETUP) {
-            MockListener.probeFilter(mContext,
-                    new MockListener.IntegerResultCatcher() {
-                        @Override
-                        public void accept(int mode) {
-                            if (mode == NotificationListenerService.INTERRUPTION_FILTER_ALL) {
-                                mStatus[i] = PASS;
-                            } else {
-                                logWithStack("waiting testModeAll: " + mode);
-                                mStatus[i] = WAIT_FOR_USER;
-                            }
-                            next();
-                        }
-                    });
-        }
-    }
-
-    private void testModePriority(final int i) {
-        if (mStatus[i] == READY || mStatus[i] == SETUP) {
-            MockListener.probeFilter(mContext,
-                    new MockListener.IntegerResultCatcher() {
-                        @Override
-                        public void accept(int mode) {
-                            if (mode == NotificationListenerService.INTERRUPTION_FILTER_PRIORITY) {
-                                mStatus[i] = PASS;
-                            } else {
-                                logWithStack("waiting testModePriority: " + mode);
-                                mStatus[i] = WAIT_FOR_USER;
-                            }
-                            next();
-                        }
-                    });
-        }
-    }
-
-    private void testModeNone(final int i) {
-        if (mStatus[i] == READY || mStatus[i] == SETUP) {
-            MockListener.probeFilter(mContext,
-                    new MockListener.IntegerResultCatcher() {
-                        @Override
-                        public void accept(int mode) {
-                            if (mode == NotificationListenerService.INTERRUPTION_FILTER_NONE) {
-                                mStatus[i] = PASS;
-                            } else {
-                                logWithStack("waiting testModeNone: " + mode);
-                                mStatus[i] = WAIT_FOR_USER;
-                            }
-                            next();
-                        }
-                    });
-        }
-    }
-
-
-    private void insertSingleContact(String name, String phone, String email, boolean starred) {
-        final ArrayList<ContentProviderOperation> operationList =
-                new ArrayList<ContentProviderOperation>();
-        ContentProviderOperation.Builder builder =
-                ContentProviderOperation.newInsert(ContactsContract.RawContacts.CONTENT_URI);
-        builder.withValue(ContactsContract.RawContacts.STARRED, starred ? 1 : 0);
-        operationList.add(builder.build());
-
-        builder = ContentProviderOperation.newInsert(ContactsContract.Data.CONTENT_URI);
-        builder.withValueBackReference(StructuredName.RAW_CONTACT_ID, 0);
-        builder.withValue(ContactsContract.Data.MIMETYPE, StructuredName.CONTENT_ITEM_TYPE);
-        builder.withValue(ContactsContract.CommonDataKinds.StructuredName.DISPLAY_NAME, name);
-        operationList.add(builder.build());
-
-        if (phone != null) {
-            builder = ContentProviderOperation.newInsert(ContactsContract.Data.CONTENT_URI);
-            builder.withValueBackReference(Phone.RAW_CONTACT_ID, 0);
-            builder.withValue(ContactsContract.Data.MIMETYPE, Phone.CONTENT_ITEM_TYPE);
-            builder.withValue(Phone.TYPE, Phone.TYPE_MOBILE);
-            builder.withValue(Phone.NUMBER, phone);
-            builder.withValue(ContactsContract.Data.IS_PRIMARY, 1);
-            operationList.add(builder.build());
-        }
-        if (email != null) {
-            builder = ContentProviderOperation.newInsert(ContactsContract.Data.CONTENT_URI);
-            builder.withValueBackReference(Email.RAW_CONTACT_ID, 0);
-            builder.withValue(ContactsContract.Data.MIMETYPE, Email.CONTENT_ITEM_TYPE);
-            builder.withValue(Email.TYPE, Email.TYPE_HOME);
-            builder.withValue(Email.DATA, email);
-            operationList.add(builder.build());
-        }
-
-        try {
-            mContext.getContentResolver().applyBatch(ContactsContract.AUTHORITY, operationList);
-        } catch (RemoteException e) {
-            Log.e(TAG, String.format("%s: %s", e.toString(), e.getMessage()));
-        } catch (OperationApplicationException e) {
-            Log.e(TAG, String.format("%s: %s", e.toString(), e.getMessage()));
-        }
-    }
-
-    private Uri lookupContact(String phone) {
-        Cursor c = null;
-        try {
-            Uri phoneUri = Uri.withAppendedPath(ContactsContract.PhoneLookup.CONTENT_FILTER_URI,
-                    Uri.encode(phone));
-            String[] projection = new String[] { ContactsContract.Contacts._ID,
-                    ContactsContract.Contacts.LOOKUP_KEY };
-            c = mContext.getContentResolver().query(phoneUri, projection, null, null, null);
-            if (c != null && c.getCount() > 0) {
-                c.moveToFirst();
-                int lookupIdx = c.getColumnIndex(ContactsContract.Contacts.LOOKUP_KEY);
-                int idIdx = c.getColumnIndex(ContactsContract.Contacts._ID);
-                String lookupKey = c.getString(lookupIdx);
-                long contactId = c.getLong(idIdx);
-                return ContactsContract.Contacts.getLookupUri(contactId, lookupKey);
-            }
-        } catch (Throwable t) {
-            Log.w(TAG, "Problem getting content resolver or performing contacts query.", t);
-        } finally {
-            if (c != null) {
-                c.close();
-            }
-        }
-        return null;
-    }
-
-    private void testInsertContacts(final int i) {
-        if (mStatus[i] == SETUP) {
-            insertSingleContact(ALICE, ALICE_PHONE, ALICE_EMAIL, true);
-            insertSingleContact(BOB, BOB_PHONE, BOB_EMAIL, false);
-            // charlie is not in contacts
-            mStatus[i] = READY;
-            // wait for insertions to move through the system
-            delay();
-        } else {
-            mAliceUri = lookupContact(ALICE_PHONE);
-            mBobUri = lookupContact(BOB_PHONE);
-            mCharlieUri = lookupContact(CHARLIE_PHONE);
-
-            mStatus[i] = PASS;
-            if (mAliceUri == null) { mStatus[i] = FAIL; }
-            if (mBobUri == null) { mStatus[i] = FAIL; }
-            if (mCharlieUri != null) { mStatus[i] = FAIL; }
-            next();
-        }
-    }
-
-    // ordered by time: C, B, A
-    private void testDefaultOrder(final int i) {
-        if (mStatus[i] == SETUP) {
-            mNm.cancelAll();
-            MockListener.resetListenerData(this);
-            mStatus[i] = CLEARED;
-            // wait for intent to move through the system
-            delay();
-        } else if (mStatus[i] == CLEARED) {
-            sendNotifications(MODE_NONE, false, false);
-            mStatus[i] = READY;
-            // wait for notifications to move through the system
-            delay();
-        } else {
-            MockListener.probeListenerOrder(mContext,
-                    new MockListener.StringListResultCatcher() {
-                        @Override
-                        public void accept(List<String> orderedKeys) {
-                            int rankA = findTagInKeys(ALICE, orderedKeys);
-                            int rankB = findTagInKeys(BOB, orderedKeys);
-                            int rankC = findTagInKeys(CHARLIE, orderedKeys);
-                            if (rankC < rankB && rankB < rankA) {
-                                mStatus[i] = PASS;
-                            } else {
-                                logWithStack("failed testDefaultOrder : "
-                                        + rankA + ", " + rankB + ", " + rankC);
-                                mStatus[i] = FAIL;
-                            }
-                            next();
-                        }
-                    });
-        }
-    }
-
-    // ordered by priority: B, C, A
-    private void testPrioritytOrder(final int i) {
-        if (mStatus[i] == SETUP) {
-            mNm.cancelAll();
-            MockListener.resetListenerData(this);
-            mStatus[i] = CLEARED;
-            // wait for intent to move through the system
-            delay();
-        } else if (mStatus[i] == CLEARED) {
-            sendNotifications(MODE_PHONE, true, false);
-            mStatus[i] = READY;
-            // wait for notifications to move through the system
-            delay();
-        } else {
-            MockListener.probeListenerOrder(mContext,
-                    new MockListener.StringListResultCatcher() {
-                        @Override
-                        public void accept(List<String> orderedKeys) {
-                            int rankA = findTagInKeys(ALICE, orderedKeys);
-                            int rankB = findTagInKeys(BOB, orderedKeys);
-                            int rankC = findTagInKeys(CHARLIE, orderedKeys);
-                            if (rankB < rankC && rankC < rankA) {
-                                mStatus[i] = PASS;
-                            } else {
-                                logWithStack("failed testPrioritytOrder : "
-                                        + rankA + ", " + rankB + ", " + rankC);
-                                mStatus[i] = FAIL;
-                            }
-                            next();
-                        }
-                    });
-        }
-    }
-
-    // B & C above the fold, A below
-    private void testAmbientBits(final int i) {
-        if (mStatus[i] == SETUP) {
-            mNm.cancelAll();
-            MockListener.resetListenerData(this);
-            mStatus[i] = CLEARED;
-            // wait for intent to move through the system
-            delay();
-        } else if (mStatus[i] == CLEARED) {
-            sendNotifications(MODE_PHONE, true, false);
-            mStatus[i] = READY;
-            // wait for notifications to move through the system
-            delay();
-        } else {
-            MockListener.probeListenerPayloads(mContext,
-                    new MockListener.StringListResultCatcher() {
-                        @Override
-                        public void accept(List<String> result) {
-                            boolean pass = false;
-                            Set<String> found = new HashSet<String>();
-                            if (result != null && result.size() > 0) {
-                                pass = true;
-                                for (String payloadData : result) {
-                                    try {
-                                        JSONObject payload = new JSONObject(payloadData);
-                                        String tag = payload.getString(JSON_TAG);
-                                        if (found.contains(tag)) {
-                                            // multiple entries for same notification!
-                                            pass = false;
-                                        } else if (ALICE.equals(tag)) {
-                                            found.add(ALICE);
-                                            pass &= payload.getBoolean(JSON_AMBIENT);
-                                        } else if (BOB.equals(tag)) {
-                                            found.add(BOB);
-                                            pass &= !payload.getBoolean(JSON_AMBIENT);
-                                        } else if (CHARLIE.equals(tag)) {
-                                            found.add(CHARLIE);
-                                            pass &= !payload.getBoolean(JSON_AMBIENT);
-                                        }
-                                    } catch (JSONException e) {
-                                        pass = false;
-                                        Log.e(TAG, "failed to unpack data from mocklistener", e);
-                                    }
-                                }
-                            }
-                            pass &= found.size() == 3;
-                            mStatus[i] = pass ? PASS : FAIL;
-                            next();
-                        }
-                    });
-        }
-    }
-
-    // ordered by contact affinity: A, B, C
-    private void testLookupUriOrder(final int i) {
-        if (mStatus[i] == SETUP) {
-            mNm.cancelAll();
-            MockListener.resetListenerData(this);
-            mStatus[i] = CLEARED;
-            // wait for intent to move through the system
-            delay();
-        } else if (mStatus[i] == CLEARED) {
-            sendNotifications(MODE_URI, false, false);
-            mStatus[i] = READY;
-            // wait for notifications to move through the system
-            delay();
-        } else {
-            MockListener.probeListenerOrder(mContext,
-                    new MockListener.StringListResultCatcher() {
-                        @Override
-                        public void accept(List<String> orderedKeys) {
-                            int rankA = findTagInKeys(ALICE, orderedKeys);
-                            int rankB = findTagInKeys(BOB, orderedKeys);
-                            int rankC = findTagInKeys(CHARLIE, orderedKeys);
-                            if (rankA < rankB && rankB < rankC) {
-                                mStatus[i] = PASS;
-                            } else {
-                                logWithStack("failed testLookupUriOrder : "
-                                        + rankA + ", " + rankB + ", " + rankC);
-                                mStatus[i] = FAIL;
-                            }
-                            next();
-                        }
-                    });
-        }
-    }
-
-    // ordered by contact affinity: A, B, C
-    private void testEmailOrder(final int i) {
-        if (mStatus[i] == SETUP) {
-            mNm.cancelAll();
-            MockListener.resetListenerData(this);
-            mStatus[i] = DELAYED_SETUP;
-            // wait for intent to move through the system
-            delay();
-        } else if (mStatus[i] == DELAYED_SETUP) {
-            sendNotifications(MODE_EMAIL, false, false);
-            mStatus[i] = READY;
-            // wait for notifications to move through the system
-            delay();
-        } else {
-            MockListener.probeListenerOrder(mContext,
-                    new MockListener.StringListResultCatcher() {
-                        @Override
-                        public void accept(List<String> orderedKeys) {
-                            int rankA = findTagInKeys(ALICE, orderedKeys);
-                            int rankB = findTagInKeys(BOB, orderedKeys);
-                            int rankC = findTagInKeys(CHARLIE, orderedKeys);
-                            if (rankA < rankB && rankB < rankC) {
-                                mStatus[i] = PASS;
-                            } else {
-                                logWithStack("failed testEmailOrder : "
-                                        + rankA + ", " + rankB + ", " + rankC);
-                                mStatus[i] = FAIL;
-                            }
-                            next();
-                        }
-                    });
-        }
-    }
-
-    // ordered by contact affinity: A, B, C
-    private void testPhoneOrder(final int i) {
-        if (mStatus[i] == SETUP) {
-            mNm.cancelAll();
-            MockListener.resetListenerData(this);
-            mStatus[i] = CLEARED;
-            // wait for intent to move through the system
-            delay();
-        } else if (mStatus[i] == CLEARED) {
-            sendNotifications(MODE_PHONE, false, false);
-            mStatus[i] = READY;
-            // wait for notifications to move through the system
-            delay();
-        } else {
-            MockListener.probeListenerOrder(mContext,
-                    new MockListener.StringListResultCatcher() {
-                        @Override
-                        public void accept(List<String> orderedKeys) {
-                            int rankA = findTagInKeys(ALICE, orderedKeys);
-                            int rankB = findTagInKeys(BOB, orderedKeys);
-                            int rankC = findTagInKeys(CHARLIE, orderedKeys);
-                            if (rankA < rankB && rankB < rankC) {
-                                mStatus[i] = PASS;
-                            } else {
-                                logWithStack("failed testPhoneOrder : "
-                                        + rankA + ", " + rankB + ", " + rankC);
-                                mStatus[i] = FAIL;
-                            }
-                            next();
-                        }
-                    });
-        }
-    }
-
-    // A starts at the top then falls to the bottom
-    private void testInterruptionOrder(final int i) {
-        if (mStatus[i] == SETUP) {
-            mNm.cancelAll();
-            MockListener.resetListenerData(this);
-            mStatus[i] = CLEARED;
-            // wait for intent to move through the system
-            delay();
-        } else if (mStatus[i] == CLEARED) {
-            sendNotifications(MODE_NONE, false, true);
-            mStatus[i] = READY;
-            // wait for notifications to move through the system
-            delay();
-        } else if (mStatus[i] == READY) {
-            MockListener.probeListenerOrder(mContext,
-                    new MockListener.StringListResultCatcher() {
-                        @Override
-                        public void accept(List<String> orderedKeys) {
-                            int rankA = findTagInKeys(ALICE, orderedKeys);
-                            int rankB = findTagInKeys(BOB, orderedKeys);
-                            int rankC = findTagInKeys(CHARLIE, orderedKeys);
-                            if (rankA < rankB && rankA < rankC) {
-                                mStatus[i] = RETRY;
-                                delay(12000);
-                            } else {
-                                logWithStack("noisy notification did not sort to top.");
-                                mStatus[i] = FAIL;
-                                next();
-                            }
-                        }
-                    });
-        } else if (mStatus[i] == RETRY) {
-            MockListener.probeListenerOrder(mContext,
-                    new MockListener.StringListResultCatcher() {
-                        @Override
-                        public void accept(List<String> orderedKeys) {
-                            int rankA = findTagInKeys(ALICE, orderedKeys);
-                            int rankB = findTagInKeys(BOB, orderedKeys);
-                            int rankC = findTagInKeys(CHARLIE, orderedKeys);
-                            if (rankA > rankB && rankA > rankC) {
-                                mStatus[i] = PASS;
-                            } else {
-                                logWithStack("noisy notification did not fade back into the list.");
-                                mStatus[i] = FAIL;
-                            }
-                            next();
-                        }
-                    });
-        }
-    }
-
-    // Nothing should be filtered when mode is ALL
-    private void testAllInterceptsNothing(final int i) {
-        if (mStatus[i] == SETUP) {
-            mNm.cancelAll();
-            MockListener.resetListenerData(this);
-            mStatus[i] = CLEARED;
-            // wait for intent to move through the system
-            delay();
-        } else if (mStatus[i] == CLEARED) {
-            sendNotifications(MODE_URI, false, false);
-            mStatus[i] = READY;
-            // wait for notifications to move through the system
-            delay();
-        } else {
-            MockListener.probeListenerPayloads(mContext,
-                    new MockListener.StringListResultCatcher() {
-                        @Override
-                        public void accept(List<String> result) {
-                            boolean pass = false;
-                            Set<String> found = new HashSet<String>();
-                            if (result != null && result.size() > 0) {
-                                pass = true;
-                                for (String payloadData : result) {
-                                    try {
-                                        JSONObject payload = new JSONObject(payloadData);
-                                        String tag = payload.getString(JSON_TAG);
-                                        if (found.contains(tag)) {
-                                            // multiple entries for same notification!
-                                            pass = false;
-                                        } else if (ALICE.equals(tag)) {
-                                            found.add(ALICE);
-                                            pass &= payload.getBoolean(JSON_MATCHES_ZEN_FILTER);
-                                        } else if (BOB.equals(tag)) {
-                                            found.add(BOB);
-                                            pass &= payload.getBoolean(JSON_MATCHES_ZEN_FILTER);
-                                        } else if (CHARLIE.equals(tag)) {
-                                            found.add(CHARLIE);
-                                            pass &= payload.getBoolean(JSON_MATCHES_ZEN_FILTER);
-                                        }
-                                    } catch (JSONException e) {
-                                        pass = false;
-                                        Log.e(TAG, "failed to unpack data from mocklistener", e);
-                                    }
-                                }
-                            }
-                            pass &= found.size() == 3;
-                            mStatus[i] = pass ? PASS : FAIL;
-                            next();
-                        }
-                    });
-        }
-    }
-
-    // A should be filtered when mode is Priority/Starred.
-    private void testPriorityInterceptsSome(final int i) {
-        if (mStatus[i] == SETUP) {
-            mNm.cancelAll();
-            MockListener.resetListenerData(this);
-            mStatus[i] = CLEARED;
-            // wait for intent to move through the system
-            delay();
-        } else if (mStatus[i] == CLEARED) {
-            sendNotifications(MODE_URI, false, false);
-            mStatus[i] = READY;
-            // wait for notifications to move through the system
-            delay();
-        } else {
-            MockListener.probeListenerPayloads(mContext,
-                    new MockListener.StringListResultCatcher() {
-                        @Override
-                        public void accept(List<String> result) {
-                            boolean pass = false;
-                            Set<String> found = new HashSet<String>();
-                            if (result != null && result.size() > 0) {
-                                pass = true;
-                                for (String payloadData : result) {
-                                    try {
-                                        JSONObject payload = new JSONObject(payloadData);
-                                        String tag = payload.getString(JSON_TAG);
-                                        if (found.contains(tag)) {
-                                            // multiple entries for same notification!
-                                            pass = false;
-                                        } else if (ALICE.equals(tag)) {
-                                            found.add(ALICE);
-                                            pass &= payload.getBoolean(JSON_MATCHES_ZEN_FILTER);
-                                        } else if (BOB.equals(tag)) {
-                                            found.add(BOB);
-                                            pass &= !payload.getBoolean(JSON_MATCHES_ZEN_FILTER);
-                                        } else if (CHARLIE.equals(tag)) {
-                                            found.add(CHARLIE);
-                                            pass &= !payload.getBoolean(JSON_MATCHES_ZEN_FILTER);
-                                        }
-                                    } catch (JSONException e) {
-                                        pass = false;
-                                        Log.e(TAG, "failed to unpack data from mocklistener", e);
-                                    }
-                                }
-                            }
-                            pass &= found.size() == 3;
-                            mStatus[i] = pass ? PASS : FAIL;
-                            next();
-                        }
-                    });
-        }
-    }
-
-    // Nothing should get through when mode is None.
-    private void testNoneInterceptsAll(final int i) {
-        if (mStatus[i] == SETUP) {
-            mNm.cancelAll();
-            MockListener.resetListenerData(this);
-            mStatus[i] = CLEARED;
-            // wait for intent to move through the system
-            delay();
-        } else if (mStatus[i] == CLEARED) {
-            sendNotifications(MODE_URI, false, false);
-            mStatus[i] = READY;
-            // wait for notifications to move through the system
-            delay();
-        } else {
-            MockListener.probeListenerPayloads(mContext,
-                    new MockListener.StringListResultCatcher() {
-                        @Override
-                        public void accept(List<String> result) {
-                            boolean pass = false;
-                            Set<String> found = new HashSet<String>();
-                            if (result != null && result.size() > 0) {
-                                pass = true;
-                                for (String payloadData : result) {
-                                    try {
-                                        JSONObject payload = new JSONObject(payloadData);
-                                        String tag = payload.getString(JSON_TAG);
-                                        if (found.contains(tag)) {
-                                            // multiple entries for same notification!
-                                            pass = false;
-                                        } else if (ALICE.equals(tag)) {
-                                            found.add(ALICE);
-                                            pass &= !payload.getBoolean(JSON_MATCHES_ZEN_FILTER);
-                                        } else if (BOB.equals(tag)) {
-                                            found.add(BOB);
-                                            pass &= !payload.getBoolean(JSON_MATCHES_ZEN_FILTER);
-                                        } else if (CHARLIE.equals(tag)) {
-                                            found.add(CHARLIE);
-                                            pass &= !payload.getBoolean(JSON_MATCHES_ZEN_FILTER);
-                                        }
-                                    } catch (JSONException e) {
-                                        pass = false;
-                                        Log.e(TAG, "failed to unpack data from mocklistener", e);
-                                    }
-                                }
-                            }
-                            pass &= found.size() == 3;
-                            mStatus[i] = pass ? PASS : FAIL;
-                            next();
-                        }
-                    });
-        }
-    }
-
-    /** Search a list of notification keys for a givcen tag. */
-    private int findTagInKeys(String tag, List<String> orderedKeys) {
-        for (int i = 0; i < orderedKeys.size(); i++) {
-            if (orderedKeys.get(i).contains(tag)) {
-                return i;
-            }
-        }
-        return -1;
-    }
-
-    private void testDeleteContacts(final int i) {
-        if (mStatus[i] == SETUP) {
-            final ArrayList<ContentProviderOperation> operationList =
-                    new ArrayList<ContentProviderOperation>();
-            operationList.add(ContentProviderOperation.newDelete(mAliceUri).build());
-            operationList.add(ContentProviderOperation.newDelete(mBobUri).build());
-            try {
-                mContext.getContentResolver().applyBatch(ContactsContract.AUTHORITY, operationList);
-                mStatus[i] = READY;
-            } catch (RemoteException e) {
-                Log.e(TAG, String.format("%s: %s", e.toString(), e.getMessage()));
-                mStatus[i] = FAIL;
-            } catch (OperationApplicationException e) {
-                Log.e(TAG, String.format("%s: %s", e.toString(), e.getMessage()));
-                mStatus[i] = FAIL;
-            }
-            // wait for deletions to move through the system
-            delay(3000);
-        } else if (mStatus[i] == READY) {
-            mAliceUri = lookupContact(ALICE_PHONE);
-            mBobUri = lookupContact(BOB_PHONE);
-            mCharlieUri = lookupContact(CHARLIE_PHONE);
-
-            mStatus[i] = PASS;
-            if (mAliceUri != null) { mStatus[i] = FAIL; }
-            if (mBobUri != null) { mStatus[i] = FAIL; }
-            if (mCharlieUri != null) { mStatus[i] = FAIL; }
-            next();
-        }
-    }
-}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/notifications/NotificationListenerVerifierActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/notifications/NotificationListenerVerifierActivity.java
index 0ef595b..ace194c 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/notifications/NotificationListenerVerifierActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/notifications/NotificationListenerVerifierActivity.java
@@ -16,76 +16,30 @@
 
 package com.android.cts.verifier.notifications;
 
-import static com.android.cts.verifier.notifications.MockListener.JSON_FLAGS;
-import static com.android.cts.verifier.notifications.MockListener.JSON_ICON;
-import static com.android.cts.verifier.notifications.MockListener.JSON_ID;
-import static com.android.cts.verifier.notifications.MockListener.JSON_PACKAGE;
-import static com.android.cts.verifier.notifications.MockListener.JSON_TAG;
-import static com.android.cts.verifier.notifications.MockListener.JSON_WHEN;
-
 import android.annotation.SuppressLint;
 import android.app.Activity;
 import android.app.Notification;
-import android.app.NotificationManager;
-import android.app.PendingIntent;
-import android.app.Service;
-import android.content.ComponentName;
-import android.content.Context;
-import android.content.Intent;
-import android.content.pm.PackageManager;
-import android.os.Bundle;
-import android.os.IBinder;
 import android.provider.Settings.Secure;
 import android.util.Log;
-import android.view.LayoutInflater;
 import android.view.View;
 import android.view.ViewGroup;
-import android.widget.Button;
-import android.widget.ImageView;
-import android.widget.TextView;
 
-import com.android.cts.verifier.PassFailButtons;
 import com.android.cts.verifier.R;
-import com.android.cts.verifier.nfc.TagVerifierActivity;
 
 import org.json.JSONException;
 import org.json.JSONObject;
 
+import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 import java.util.UUID;
-import java.util.concurrent.LinkedBlockingQueue;
 
-public class NotificationListenerVerifierActivity extends PassFailButtons.Activity
-implements Runnable {
-    private static final String TAG = TagVerifierActivity.class.getSimpleName();
-    private static final String STATE = "state";
-    private static LinkedBlockingQueue<String> sDeletedQueue = new LinkedBlockingQueue<String>();
+import static com.android.cts.verifier.notifications.MockListener.*;
 
-    protected static final String LISTENER_PATH = "com.android.cts.verifier/" +
-            "com.android.cts.verifier.notifications.MockListener";
-    protected static final int SETUP = 0;
-    protected static final int PASS = 1;
-    protected static final int FAIL = 2;
-    protected static final int WAIT_FOR_USER = 3;
-    protected static final int CLEARED = 4;
-    protected static final int READY = 5;
-    protected static final int RETRY = 6;
-
-    protected static final int NOTIFICATION_ID = 1001;
-
-    protected int mState;
-    protected int[] mStatus;
-    protected PackageManager mPackageManager;
-    protected NotificationManager mNm;
-    protected Context mContext;
-    protected Runnable mRunner;
-    protected View mHandler;
-    protected String mPackageString;
-
-    private LayoutInflater mInflater;
-    private ViewGroup mItemList;
+public class NotificationListenerVerifierActivity extends InteractiveVerifierActivity
+        implements Runnable {
+    private static final String TAG = "NoListenerVerifier";
 
     private String mTag1;
     private String mTag2;
@@ -103,199 +57,31 @@
     private int mFlag2;
     private int mFlag3;
 
-    public static class DismissService extends Service {
-        @Override
-        public IBinder onBind(Intent intent) {
-            return null;
-        }
-
-        @Override
-        public void onStart(Intent intent, int startId) {
-            sDeletedQueue.offer(intent.getAction());
-        }
+    @Override
+    int getTitleResource() {
+        return R.string.nls_test;
     }
 
     @Override
-    protected void onCreate(Bundle savedInstanceState) {
-        onCreate(savedInstanceState, R.layout.nls_main);
-        setInfoResources(R.string.nls_test, R.string.nls_info, -1);
+    int getInstructionsResource() {
+        return R.string.nls_info;
     }
 
-    protected void onCreate(Bundle savedInstanceState, int layoutId) {
-        super.onCreate(savedInstanceState);
-
-        if (savedInstanceState != null) {
-            mState = savedInstanceState.getInt(STATE, 0);
-        }
-        mContext = this;
-        mRunner = this;
-        mNm = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
-        mPackageManager = getPackageManager();
-        mInflater = getLayoutInflater();
-        View view = mInflater.inflate(layoutId, null);
-        mItemList = (ViewGroup) view.findViewById(R.id.nls_test_items);
-        mHandler = mItemList;
-        createTestItems();
-        mStatus = new int[mItemList.getChildCount()];
-        setContentView(view);
-
-        setPassFailButtonClickListeners();
-        getPassButton().setEnabled(false);
-    }
+    // Test Setup
 
     @Override
-    protected void onSaveInstanceState (Bundle outState) {
-        outState.putInt(STATE, mState);
-    }
-
-    @Override
-    protected void onResume() {
-        super.onResume();
-        next();
-    }
-
-    // Interface Utilities
-
-    protected void createTestItems() {
-        createNlsSettingsItem(R.string.nls_enable_service);
-        createAutoItem(R.string.nls_service_started);
-        createAutoItem(R.string.nls_note_received);
-        createAutoItem(R.string.nls_payload_intact);
-        createAutoItem(R.string.nls_clear_one);
-        createAutoItem(R.string.nls_clear_all);
-        createNlsSettingsItem(R.string.nls_disable_service);
-        createAutoItem(R.string.nls_service_stopped);
-        createAutoItem(R.string.nls_note_missed);
-    }
-
-    protected void setItemState(int index, boolean passed) {
-        ViewGroup item = (ViewGroup) mItemList.getChildAt(index);
-        ImageView status = (ImageView) item.findViewById(R.id.nls_status);
-        status.setImageResource(passed ? R.drawable.fs_good : R.drawable.fs_error);
-        View button = item.findViewById(R.id.nls_action_button);
-        button.setClickable(false);
-        button.setEnabled(false);
-        status.invalidate();
-    }
-
-    protected void markItemWaiting(int index) {
-        ViewGroup item = (ViewGroup) mItemList.getChildAt(index);
-        ImageView status = (ImageView) item.findViewById(R.id.nls_status);
-        status.setImageResource(R.drawable.fs_warning);
-        status.invalidate();
-    }
-
-    protected View createNlsSettingsItem(int messageId) {
-        return createUserItem(messageId, R.string.nls_start_settings);
-    }
-
-    protected View createRetryItem(int messageId) {
-        return createUserItem(messageId, R.string.attention_ready);
-    }
-
-    protected View createUserItem(int messageId, int actionId) {
-        View item = mInflater.inflate(R.layout.nls_item, mItemList, false);
-        TextView instructions = (TextView) item.findViewById(R.id.nls_instructions);
-        instructions.setText(messageId);
-        Button button = (Button) item.findViewById(R.id.nls_action_button);
-        button.setText(actionId);
-        mItemList.addView(item);
-        button.setTag(actionId);
-        return item;
-    }
-
-    protected View createAutoItem(int stringId) {
-        View item = mInflater.inflate(R.layout.nls_item, mItemList, false);
-        TextView instructions = (TextView) item.findViewById(R.id.nls_instructions);
-        instructions.setText(stringId);
-        View button = item.findViewById(R.id.nls_action_button);
-        button.setVisibility(View.GONE);
-        mItemList.addView(item);
-        return item;
-    }
-
-    // Test management
-
-    public void run() {
-        while (mState < mStatus.length && mStatus[mState] != WAIT_FOR_USER) {
-            if (mStatus[mState] == PASS) {
-                setItemState(mState, true);
-                mState++;
-            } else if (mStatus[mState] == FAIL) {
-                setItemState(mState, false);
-                return;
-            } else {
-                break;
-            }
-        }
-
-        if (mState < mStatus.length && mStatus[mState] == WAIT_FOR_USER) {
-            markItemWaiting(mState);
-        }
-
-        updateStateMachine();
-    }
-
-    protected void updateStateMachine() {
-        switch (mState) {
-            case 0:
-                testIsEnabled(mState);
-                break;
-            case 1:
-                testIsStarted(mState);
-                break;
-            case 2:
-                testNotificationRecieved(mState);
-                break;
-            case 3:
-                testDataIntact(mState);
-                break;
-            case 4:
-                testDismissOne(mState);
-                break;
-            case 5:
-                testDismissAll(mState);
-                break;
-            case 6:
-                testIsDisabled(mState);
-                break;
-            case 7:
-                testIsStopped(mState);
-                break;
-            case 8:
-                testNotificationNotRecieved(mState);
-                break;
-            case 9:
-                getPassButton().setEnabled(true);
-                mNm.cancelAll();
-                break;
-        }
-    }
-
-    public void launchSettings() {
-        startActivity(
-                new Intent("android.settings.ACTION_NOTIFICATION_LISTENER_SETTINGS"));
-    }
-
-    public void actionPressed(View v) {
-        Object tag = v.getTag();
-        if (tag instanceof Integer) {
-            int id = ((Integer) tag).intValue();
-            if (id == R.string.nls_start_settings) {
-                launchSettings();
-            } else if (id == R.string.attention_ready) {
-                mStatus[mState] = READY;
-                next();
-            }
-        }
-    }
-
-    protected PendingIntent makeIntent(int code, String tag) {
-        Intent intent = new Intent(tag);
-        intent.setComponent(new ComponentName(mContext, DismissService.class));
-        PendingIntent pi = PendingIntent.getService(mContext, code, intent,
-                PendingIntent.FLAG_UPDATE_CURRENT);
-        return pi;
+    protected List<InteractiveTestCase> createTestItems() {
+        List<InteractiveTestCase> tests = new ArrayList<>(9);
+        tests.add(new IsEnabledTest());
+        tests.add(new ServiceStartedTest());
+        tests.add(new NotificationRecievedTest());
+        tests.add(new DataIntactTest());
+        tests.add(new DismissOneTest());
+        tests.add(new DismissAllTest());
+        tests.add(new IsDisabledTest());
+        tests.add(new ServiceStoppedTest());
+        tests.add(new NotificationNotReceivedTest());
+        return tests;
     }
 
     @SuppressLint("NewApi")
@@ -310,9 +96,9 @@
         mWhen2 = System.currentTimeMillis() + 2;
         mWhen3 = System.currentTimeMillis() + 3;
 
-        mIcon1 = R.drawable.fs_good;
-        mIcon2 = R.drawable.fs_error;
-        mIcon3 = R.drawable.fs_warning;
+        mIcon1 = R.drawable.ic_stat_alice;
+        mIcon2 = R.drawable.ic_stat_bob;
+        mIcon3 = R.drawable.ic_stat_charlie;
 
         mId1 = NOTIFICATION_ID + 1;
         mId2 = NOTIFICATION_ID + 2;
@@ -321,356 +107,352 @@
         mPackageString = "com.android.cts.verifier";
 
         Notification n1 = new Notification.Builder(mContext)
-        .setContentTitle("ClearTest 1")
-        .setContentText(mTag1.toString())
-        .setPriority(Notification.PRIORITY_LOW)
-        .setSmallIcon(mIcon1)
-        .setWhen(mWhen1)
-        .setDeleteIntent(makeIntent(1, mTag1))
-        .setOnlyAlertOnce(true)
-        .build();
+                .setContentTitle("ClearTest 1")
+                .setContentText(mTag1.toString())
+                .setPriority(Notification.PRIORITY_LOW)
+                .setSmallIcon(mIcon1)
+                .setWhen(mWhen1)
+                .setDeleteIntent(makeIntent(1, mTag1))
+                .setOnlyAlertOnce(true)
+                .build();
         mNm.notify(mTag1, mId1, n1);
         mFlag1 = Notification.FLAG_ONLY_ALERT_ONCE;
 
         Notification n2 = new Notification.Builder(mContext)
-        .setContentTitle("ClearTest 2")
-        .setContentText(mTag2.toString())
-        .setPriority(Notification.PRIORITY_HIGH)
-        .setSmallIcon(mIcon2)
-        .setWhen(mWhen2)
-        .setDeleteIntent(makeIntent(2, mTag2))
-        .setAutoCancel(true)
-        .build();
+                .setContentTitle("ClearTest 2")
+                .setContentText(mTag2.toString())
+                .setPriority(Notification.PRIORITY_HIGH)
+                .setSmallIcon(mIcon2)
+                .setWhen(mWhen2)
+                .setDeleteIntent(makeIntent(2, mTag2))
+                .setAutoCancel(true)
+                .build();
         mNm.notify(mTag2, mId2, n2);
         mFlag2 = Notification.FLAG_AUTO_CANCEL;
 
         Notification n3 = new Notification.Builder(mContext)
-        .setContentTitle("ClearTest 3")
-        .setContentText(mTag3.toString())
-        .setPriority(Notification.PRIORITY_LOW)
-        .setSmallIcon(mIcon3)
-        .setWhen(mWhen3)
-        .setDeleteIntent(makeIntent(3, mTag3))
-        .setAutoCancel(true)
-        .setOnlyAlertOnce(true)
-        .build();
+                .setContentTitle("ClearTest 3")
+                .setContentText(mTag3.toString())
+                .setPriority(Notification.PRIORITY_LOW)
+                .setSmallIcon(mIcon3)
+                .setWhen(mWhen3)
+                .setDeleteIntent(makeIntent(3, mTag3))
+                .setAutoCancel(true)
+                .setOnlyAlertOnce(true)
+                .build();
         mNm.notify(mTag3, mId3, n3);
         mFlag3 = Notification.FLAG_ONLY_ALERT_ONCE | Notification.FLAG_AUTO_CANCEL;
     }
 
-    /**
-     * Return to the state machine to progress through the tests.
-     */
-    protected void next() {
-        mHandler.removeCallbacks(mRunner);
-        mHandler.post(mRunner);
-    }
-
-    /**
-     * Wait for things to settle before returning to the state machine.
-     */
-    protected void delay() {
-        delay(2000);
-    }
-
-    /**
-     * Wait for some time.
-     */
-    protected void delay(long waitTime) {
-        mHandler.removeCallbacks(mRunner);
-        mHandler.postDelayed(mRunner, waitTime);
-    }
-
-    protected boolean checkEquals(long expected, long actual, String message) {
-        if (expected == actual) {
-            return true;
-        }
-        logWithStack(String.format(message, expected, actual));
-        return false;
-    }
-
-    protected boolean checkEquals(String expected, String actual, String message) {
-        if (expected.equals(actual)) {
-            return true;
-        }
-        logWithStack(String.format(message, expected, actual));
-        return false;
-    }
-
-    protected boolean checkFlagSet(int expected, int actual, String message) {
-        if ((expected & actual) != 0) {
-            return true;
-        }
-        logWithStack(String.format(message, expected, actual));
-        return false;
-    };
-
-    protected void logWithStack(String message) {
-        Throwable stackTrace = new Throwable();
-        stackTrace.fillInStackTrace();
-        Log.e(TAG, message, stackTrace);
-    }
-
     // Tests
 
-    private void testIsEnabled(int i) {
-        // no setup required
-        Intent settings = new Intent("android.settings.ACTION_NOTIFICATION_LISTENER_SETTINGS");
-        if (settings.resolveActivity(mPackageManager) == null) {
-            logWithStack("failed testIsEnabled: no settings activity");
-            mStatus[i] = FAIL;
-        } else {
-            // TODO: find out why Secure.ENABLED_NOTIFICATION_LISTENERS is hidden
-            String listeners = Secure.getString(getContentResolver(),
-                    "enabled_notification_listeners");
-            if (listeners != null && listeners.contains(LISTENER_PATH)) {
-                mStatus[i] = PASS;
-            } else {
-                mStatus[i] = WAIT_FOR_USER;
-            }
-        }
-        next();
-    }
+    private class NotificationRecievedTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.nls_note_received);
 
-    private void testIsStarted(final int i) {
-        if (mStatus[i] == SETUP) {
-            mStatus[i] = READY;
-            // wait for the service to start
-            delay();
-        } else {
-            MockListener.probeListenerStatus(mContext,
-                    new MockListener.StatusCatcher() {
-                @Override
-                public void accept(int result) {
-                    if (result == Activity.RESULT_OK) {
-                        mStatus[i] = PASS;
-                    } else {
-                        logWithStack("failed testIsStarted: " + result);
-                        mStatus[i] = FAIL;
-                    }
-                    next();
-                }
-            });
         }
-    }
 
-    private void testNotificationRecieved(final int i) {
-        if (mStatus[i] == SETUP) {
-            MockListener.resetListenerData(this);
-            mStatus[i] = CLEARED;
-            // wait for intent to move through the system
-            delay();
-        } else if (mStatus[i] == CLEARED) {
+        @Override
+        void setUp() {
             sendNotifications();
-            mStatus[i] = READY;
+            status = READY;
             // wait for notifications to move through the system
             delay();
-        } else {
+        }
+
+        @Override
+        void test() {
             MockListener.probeListenerPosted(mContext,
                     new MockListener.StringListResultCatcher() {
-                @Override
-                public void accept(List<String> result) {
-                    if (result != null && result.size() > 0 && result.contains(mTag1)) {
-                        mStatus[i] = PASS;
-                    } else {
-                        logWithStack("failed testNotificationRecieved");
-                        mStatus[i] = FAIL;
-                    }
-                    next();
-                }});
-        }
-    }
-
-    private void testDataIntact(final int i) {
-        // no setup required
-        MockListener.probeListenerPayloads(mContext,
-                new MockListener.StringListResultCatcher() {
-            @Override
-            public void accept(List<String> result) {
-                boolean pass = false;
-                Set<String> found = new HashSet<String>();
-                if (result != null && result.size() > 0) {
-                    pass = true;
-                    for(String payloadData : result) {
-                        try {
-                            JSONObject payload = new JSONObject(payloadData);
-                            pass &= checkEquals(mPackageString, payload.getString(JSON_PACKAGE),
-                                    "data integrity test fail: notification package (%s, %s)");
-                            String tag = payload.getString(JSON_TAG);
-                            if (mTag1.equals(tag)) {
-                                found.add(mTag1);
-                                pass &= checkEquals(mIcon1, payload.getInt(JSON_ICON),
-                                        "data integrity test fail: notification icon (%d, %d)");
-                                pass &= checkFlagSet(mFlag1, payload.getInt(JSON_FLAGS),
-                                        "data integrity test fail: notification flags (%d, %d)");
-                                pass &= checkEquals(mId1, payload.getInt(JSON_ID),
-                                        "data integrity test fail: notification ID (%d, %d)");
-                                pass &= checkEquals(mWhen1, payload.getLong(JSON_WHEN),
-                                        "data integrity test fail: notification when (%d, %d)");
-                            } else if (mTag2.equals(tag)) {
-                                found.add(mTag2);
-                                pass &= checkEquals(mIcon2, payload.getInt(JSON_ICON),
-                                        "data integrity test fail: notification icon (%d, %d)");
-                                pass &= checkFlagSet(mFlag2, payload.getInt(JSON_FLAGS),
-                                        "data integrity test fail: notification flags (%d, %d)");
-                                pass &= checkEquals(mId2, payload.getInt(JSON_ID),
-                                        "data integrity test fail: notification ID (%d, %d)");
-                                pass &= checkEquals(mWhen2, payload.getLong(JSON_WHEN),
-                                        "data integrity test fail: notification when (%d, %d)");
-                            } else if (mTag3.equals(tag)) {
-                                found.add(mTag3);
-                                pass &= checkEquals(mIcon3, payload.getInt(JSON_ICON),
-                                        "data integrity test fail: notification icon (%d, %d)");
-                                pass &= checkFlagSet(mFlag3, payload.getInt(JSON_FLAGS),
-                                        "data integrity test fail: notification flags (%d, %d)");
-                                pass &= checkEquals(mId3, payload.getInt(JSON_ID),
-                                        "data integrity test fail: notification ID (%d, %d)");
-                                pass &= checkEquals(mWhen3, payload.getLong(JSON_WHEN),
-                                        "data integrity test fail: notification when (%d, %d)");
+                        @Override
+                        public void accept(List<String> result) {
+                            if (result != null && result.size() > 0 && result.contains(mTag1)) {
+                                status = PASS;
                             } else {
-                                pass = false;
-                                logWithStack("failed on unexpected notification tag: " + tag);
+                                logFail();
+                                status = FAIL;
                             }
-                        } catch (JSONException e) {
-                            pass = false;
-                            Log.e(TAG, "failed to unpack data from mocklistener", e);
-                        }
-                    }
-                }
-                pass &= found.size() == 3;
-                mStatus[i] = pass ? PASS : FAIL;
-                next();
-            }});
-    }
-
-    private void testDismissOne(final int i) {
-        if (mStatus[i] == SETUP) {
-            MockListener.resetListenerData(this);
-            mStatus[i] = CLEARED;
-            // wait for intent to move through the system
-            delay();
-        } else if (mStatus[i] == CLEARED) {
-            MockListener.clearOne(mContext, mTag1, NOTIFICATION_ID + 1);
-            mStatus[i] = READY;
-            delay();
-        } else {
-            MockListener.probeListenerRemoved(mContext,
-                    new MockListener.StringListResultCatcher() {
-                @Override
-                public void accept(List<String> result) {
-                    if (result != null && result.size() > 0 && result.contains(mTag1)) {
-                        mStatus[i] = PASS;
-                        next();
-                    } else {
-                        if (mStatus[i] == RETRY) {
-                            logWithStack("failed testDismissOne");
-                            mStatus[i] = FAIL;
                             next();
-                        } else {
-                            logWithStack("failed testDismissOne, once: retrying");
-                            mStatus[i] = RETRY;
-                            delay();
                         }
-                    }
-                }});
+                    });
+            delay();  // in case the catcher never returns
         }
     }
 
-    private void testDismissAll(final int i) {
-        if (mStatus[i] == SETUP) {
-            MockListener.resetListenerData(this);
-            mStatus[i] = CLEARED;
-            // wait for intent to move through the system
-            delay();
-        } else if (mStatus[i] == CLEARED) {
-            MockListener.clearAll(mContext);
-            mStatus[i] = READY;
-            delay();
-        } else {
-            MockListener.probeListenerRemoved(mContext,
+    private class DataIntactTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.nls_payload_intact);
+        }
+
+        @Override
+        void test() {
+            MockListener.probeListenerPayloads(mContext,
                     new MockListener.StringListResultCatcher() {
-                @Override
-                public void accept(List<String> result) {
-                    if (result != null && result.size() == 2
-                            && result.contains(mTag2) && result.contains(mTag3)) {
-                        mStatus[i] = PASS;
-                        next();
-                    } else {
-                        if (mStatus[i] == RETRY) {
-                            logWithStack("failed testDismissAll");
-                            mStatus[i] = FAIL;
+                        @Override
+                        public void accept(List<String> result) {
+                            Set<String> found = new HashSet<String>();
+                            if (result == null || result.size() == 0) {
+                                status = FAIL;
+                                return;
+                            }
+                            boolean pass = true;
+                            for (String payloadData : result) {
+                                try {
+                                    JSONObject payload = new JSONObject(payloadData);
+                                    pass &= checkEquals(mPackageString,
+                                            payload.getString(JSON_PACKAGE),
+                                            "data integrity test: notification package (%s, %s)");
+                                    String tag = payload.getString(JSON_TAG);
+                                    if (mTag1.equals(tag)) {
+                                        found.add(mTag1);
+                                        pass &= checkEquals(mIcon1, payload.getInt(JSON_ICON),
+                                                "data integrity test: notification icon (%d, %d)");
+                                        pass &= checkFlagSet(mFlag1, payload.getInt(JSON_FLAGS),
+                                                "data integrity test: notification flags (%d, %d)");
+                                        pass &= checkEquals(mId1, payload.getInt(JSON_ID),
+                                                "data integrity test: notification ID (%d, %d)");
+                                        pass &= checkEquals(mWhen1, payload.getLong(JSON_WHEN),
+                                                "data integrity test: notification when (%d, %d)");
+                                    } else if (mTag2.equals(tag)) {
+                                        found.add(mTag2);
+                                        pass &= checkEquals(mIcon2, payload.getInt(JSON_ICON),
+                                                "data integrity test: notification icon (%d, %d)");
+                                        pass &= checkFlagSet(mFlag2, payload.getInt(JSON_FLAGS),
+                                                "data integrity test: notification flags (%d, %d)");
+                                        pass &= checkEquals(mId2, payload.getInt(JSON_ID),
+                                                "data integrity test: notification ID (%d, %d)");
+                                        pass &= checkEquals(mWhen2, payload.getLong(JSON_WHEN),
+                                                "data integrity test: notification when (%d, %d)");
+                                    } else if (mTag3.equals(tag)) {
+                                        found.add(mTag3);
+                                        pass &= checkEquals(mIcon3, payload.getInt(JSON_ICON),
+                                                "data integrity test: notification icon (%d, %d)");
+                                        pass &= checkFlagSet(mFlag3, payload.getInt(JSON_FLAGS),
+                                                "data integrity test: notification flags (%d, %d)");
+                                        pass &= checkEquals(mId3, payload.getInt(JSON_ID),
+                                                "data integrity test: notification ID (%d, %d)");
+                                        pass &= checkEquals(mWhen3, payload.getLong(JSON_WHEN),
+                                                "data integrity test: notification when (%d, %d)");
+                                    } else {
+                                        pass = false;
+                                        logFail("unexpected notification tag: " + tag);
+                                    }
+                                } catch (JSONException e) {
+                                    pass = false;
+                                    Log.e(TAG, "failed to unpack data from mocklistener", e);
+                                }
+                            }
+
+                            pass &= found.size() == 3;
+                            status = pass ? PASS : FAIL;
                             next();
-                        } else {
-                            logWithStack("failed testDismissAll, once: retrying");
-                            mStatus[i] = RETRY;
-                            delay();
                         }
-                    }
-                }
-            });
+                    });
+            delay();  // in case the catcher never returns
+        }
+
+        @Override
+        void tearDown() {
+            mNm.cancelAll();
+            MockListener.resetListenerData(mContext);
+            delay();
         }
     }
 
-    private void testIsDisabled(int i) {
-        // no setup required
-        // TODO: find out why Secure.ENABLED_NOTIFICATION_LISTENERS is hidden
-        String listeners = Secure.getString(getContentResolver(),
-                "enabled_notification_listeners");
-        if (listeners == null || !listeners.contains(LISTENER_PATH)) {
-            mStatus[i] = PASS;
+    private class DismissOneTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.nls_clear_one);
+        }
+
+        @Override
+        void setUp() {
+            sendNotifications();
+            status = READY;
+            delay();
+        }
+
+        @Override
+        void test() {
+            if (status == READY) {
+                MockListener.clearOne(mContext, mTag1, mId1);
+                status = RETEST;
+            } else {
+                MockListener.probeListenerRemoved(mContext,
+                        new MockListener.StringListResultCatcher() {
+                            @Override
+                            public void accept(List<String> result) {
+                                if (result != null && result.size() != 0
+                                        && result.contains(mTag1)
+                                        && !result.contains(mTag2)
+                                        && !result.contains(mTag3)) {
+                                    status = PASS;
+                                } else {
+                                    logFail();
+                                    status = FAIL;
+                                }
+                                next();
+                            }
+                        });
+            }
+            delay();
+        }
+
+        @Override
+        void tearDown() {
+            mNm.cancelAll();
+            MockListener.resetListenerData(mContext);
+            delay();
+        }
+    }
+
+    private class DismissAllTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.nls_clear_all);
+        }
+
+        @Override
+        void setUp() {
+            sendNotifications();
+            status = READY;
+            delay();
+        }
+
+        @Override
+        void test() {
+            if (status == READY) {
+                MockListener.clearAll(mContext);
+                status = RETEST;
+            } else {
+                MockListener.probeListenerRemoved(mContext,
+                        new MockListener.StringListResultCatcher() {
+                            @Override
+                            public void accept(List<String> result) {
+                                if (result != null && result.size() != 0
+                                        && result.contains(mTag1)
+                                        && result.contains(mTag2)
+                                        && result.contains(mTag3)) {
+                                    status = PASS;
+                                } else {
+                                    logFail();
+                                    status = FAIL;
+                                }
+                                next();
+                            }
+                        });
+            }
+            delay();  // in case the catcher never returns
+        }
+
+        @Override
+        void tearDown() {
+            mNm.cancelAll();
+            MockListener.resetListenerData(mContext);
+            delay();
+        }
+    }
+
+    private class IsDisabledTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createNlsSettingsItem(parent, R.string.nls_disable_service);
+        }
+
+        @Override
+        boolean autoStart() {
+            return true;
+        }
+
+        @Override
+        void test() {
+            String listeners = Secure.getString(getContentResolver(),
+                    ENABLED_NOTIFICATION_LISTENERS);
+            if (listeners == null || !listeners.contains(LISTENER_PATH)) {
+                status = PASS;
+            } else {
+                status = WAIT_FOR_USER;
+            }
             next();
-        } else {
-            mStatus[i] = WAIT_FOR_USER;
+        }
+
+        @Override
+        void tearDown() {
+            MockListener.resetListenerData(mContext);
             delay();
         }
     }
 
-    private void testIsStopped(final int i) {
-        if (mStatus[i] == SETUP) {
-            mStatus[i] = READY;
-            // wait for the service to start
-            delay();
-        } else {
+    private class ServiceStoppedTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.nls_service_stopped);
+        }
+
+        @Override
+        void test() {
             MockListener.probeListenerStatus(mContext,
                     new MockListener.StatusCatcher() {
-                @Override
-                public void accept(int result) {
-                    if (result == Activity.RESULT_OK) {
-                        logWithStack("failed testIsStopped");
-                        mStatus[i] = FAIL;
-                    } else {
-                        mStatus[i] = PASS;
-                    }
-                    next();
-                }
-            });
+                        @Override
+                        public void accept(int result) {
+                            if (result == Activity.RESULT_OK) {
+                                logFail();
+                                status = FAIL;
+                            } else {
+                                status = PASS;
+                            }
+                            next();
+                        }
+                    });
+            delay();  // in case the catcher never returns
+        }
+
+        @Override
+        void tearDown() {
+            // wait for intent to move through the system
+            delay();
         }
     }
 
-    private void testNotificationNotRecieved(final int i) {
-        if (mStatus[i] == SETUP) {
-            MockListener.resetListenerData(this);
-            mStatus[i] = CLEARED;
-            // wait for intent to move through the system
-            delay();
-        } else if (mStatus[i] == CLEARED) {
-            // setup for testNotificationRecieved
+    private class NotificationNotReceivedTest extends InteractiveTestCase {
+        @Override
+        View inflate(ViewGroup parent) {
+            return createAutoItem(parent, R.string.nls_note_missed);
+
+        }
+
+        @Override
+        void setUp() {
             sendNotifications();
-            mStatus[i] = READY;
+            status = READY;
             delay();
-        } else {
+        }
+
+        @Override
+        void test() {
             MockListener.probeListenerPosted(mContext,
                     new MockListener.StringListResultCatcher() {
-                @Override
-                public void accept(List<String> result) {
-                    if (result == null || result.size() == 0) {
-                        mStatus[i] = PASS;
-                    } else {
-                        logWithStack("failed testNotificationNotRecieved");
-                        mStatus[i] = FAIL;
-                    }
-                    next();
-                }});
+                        @Override
+                        public void accept(List<String> result) {
+                            if (result == null || result.size() == 0) {
+                                status = PASS;
+                            } else {
+                                logFail();
+                                status = FAIL;
+                            }
+                            next();
+                        }
+                    });
+            delay();  // in case the catcher never returns
+        }
+
+        @Override
+        void tearDown() {
+            mNm.cancelAll();
+            MockListener.resetListenerData(mContext);
+            delay();
         }
     }
 }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/os/TimeoutResetActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/os/TimeoutResetActivity.java
new file mode 100644
index 0000000..be78556
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/os/TimeoutResetActivity.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.cts.verifier.os;
+
+import android.app.Activity;
+import android.app.AlarmManager;
+import android.app.PendingIntent;
+import android.content.Context;
+import android.content.Intent;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.WindowManager;
+
+import java.lang.reflect.Field;
+
+/**
+ * Activity resets the screen timeout to its original timeout. Used devices without Device Admin.
+ */
+public class TimeoutResetActivity extends Activity {
+    public static final String EXTRA_OLD_TIMEOUT = "com.android.cts.verifier.extra.OLD_TIMEOUT";
+    /** Set the timeout to the default to reset the activity to if not specified. */
+    public static final long FALLBACK_TIMEOUT = -1L;
+    /**
+     * Empirically determined buffer time in milliseconds between setting short timeout time and
+     * resetting the timeout.
+     */
+    public static final long RESET_BUFFER_TIME = 2000L;
+    /** Short timeout to trigger screen off. */
+    public static final long SCREEN_OFF_TIMEOUT = 0L;
+    public static final String TAG = TimeoutResetActivity.class.getSimpleName();
+
+    private static long getUserActivityTimeout(WindowManager.LayoutParams params) {
+        try {
+            return getUserActivityTimeoutField(params).getLong(params);
+        } catch (Exception e) {
+            Log.e(TAG, "error loading the userActivityTimeout field", e);
+            return -1;
+        }
+    }
+
+    private static Field getUserActivityTimeoutField(WindowManager.LayoutParams params)
+            throws NoSuchFieldException {
+        return params.getClass().getField("userActivityTimeout");
+    }
+
+    private static void setUserActivityTimeout(WindowManager.LayoutParams params, long timeout) {
+        try {
+            getUserActivityTimeoutField(params).setLong(params, timeout);
+            Log.d(TAG, "UserActivityTimeout set to " + timeout);
+        } catch (Exception e) {
+            Log.e(TAG, "error setting the userActivityTimeout field", e);
+            throw new RuntimeException(e);
+        }
+    }
+
+    public static void turnOffScreen(final Activity activity) {
+        activity.runOnUiThread(new Runnable() {
+            @Override
+            public void run() {
+                WindowManager.LayoutParams params = activity.getWindow().getAttributes();
+
+                // to restore timeout after shutoff
+                final long oldTimeout = getUserActivityTimeout(params);
+
+                final long timeout = SCREEN_OFF_TIMEOUT;
+                setUserActivityTimeout(params, timeout);
+
+                // upon setting this, timeout will be reduced
+                activity.getWindow().setAttributes(params);
+
+                ((AlarmManager) activity.getSystemService(ALARM_SERVICE)).setExact(
+                        AlarmManager.RTC,
+                        System.currentTimeMillis() + RESET_BUFFER_TIME,
+                        PendingIntent.getActivity(
+                                activity.getApplicationContext(),
+                                0,
+                                new Intent(activity, TimeoutResetActivity.class)
+                                        .putExtra(EXTRA_OLD_TIMEOUT, oldTimeout),
+                                0));
+            }
+        });
+    }
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+
+        long timeout = getIntent().getLongExtra(EXTRA_OLD_TIMEOUT, FALLBACK_TIMEOUT);
+        if (timeout < 1000) { // in case the old timeout was super low by accident
+            timeout = FALLBACK_TIMEOUT;
+        }
+
+        WindowManager.LayoutParams params = getWindow().getAttributes();
+        setUserActivityTimeout(params, timeout);
+        getWindow().setAttributes(params);
+
+        finish();
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/SensorPowerTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/SensorPowerTestActivity.java
index 8370d3e..74d51e4 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/SensorPowerTestActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/SensorPowerTestActivity.java
@@ -63,7 +63,7 @@
 
     @Override
     protected void activitySetUp() throws InterruptedException {
-        mScreenManipulator = new SensorTestScreenManipulator(getApplicationContext());
+        mScreenManipulator = new SensorTestScreenManipulator(this);
         mScreenManipulator.initialize(this);
     }
 
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/base/SensorCtsTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/base/SensorCtsTestActivity.java
index 16c5fcd..6512fd3 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/base/SensorCtsTestActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/base/SensorCtsTestActivity.java
@@ -69,7 +69,7 @@
     protected void activitySetUp() throws InterruptedException {
         PowerManager powerManager = (PowerManager) getSystemService(Context.POWER_SERVICE);
         mWakeLock =  powerManager.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "SensorCtsTests");
-        mScreenManipulator = new SensorTestScreenManipulator(getApplicationContext());
+        mScreenManipulator = new SensorTestScreenManipulator(this);
         mScreenManipulator.initialize(this);
 
         SensorTestLogger logger = getTestLogger();
@@ -80,6 +80,7 @@
         // automated CTS tests run with the USB connected, so the AP doesn't go to sleep
         // here we are not connected to USB, so we need to hold a wake-lock to avoid going to sleep
         mWakeLock.acquire();
+
         mScreenManipulator.turnScreenOff();
     }
 
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/helpers/SensorTestScreenManipulator.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/helpers/SensorTestScreenManipulator.java
index 835ff56..2956ed7 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/helpers/SensorTestScreenManipulator.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/helpers/SensorTestScreenManipulator.java
@@ -16,6 +16,7 @@
 
 package com.android.cts.verifier.sensors.helpers;
 
+import com.android.cts.verifier.os.TimeoutResetActivity;
 import com.android.cts.verifier.sensors.base.BaseSensorTestActivity;
 import com.android.cts.verifier.sensors.base.ISensorTestStateContainer;
 
@@ -27,8 +28,12 @@
 import android.content.Context;
 import android.content.Intent;
 import android.content.IntentFilter;
+import android.content.pm.PackageManager;
 import android.os.PowerManager;
 import android.text.TextUtils;
+import android.util.Log;
+
+import java.util.concurrent.CountDownLatch;
 
 /**
  * A class that provides functionality to manipulate the state of the device's screen.
@@ -52,8 +57,9 @@
  * - in a single-threaded environment
  */
 public class SensorTestScreenManipulator {
+    private static final String TAG = SensorTestScreenManipulator.class.getSimpleName();
 
-    private final Context mContext;
+    private final Activity mActivity;
     private final DevicePolicyManager mDevicePolicyManager;
     private final ComponentName mComponentName;
     private final PowerManager.WakeLock mWakeUpScreenWakeLock;
@@ -62,16 +68,17 @@
     private InternalBroadcastReceiver mBroadcastReceiver;
     private boolean mTurnOffScreenOnPowerDisconnected;
 
-    public SensorTestScreenManipulator(Context context) {
-        mContext = context;
-        mComponentName = SensorDeviceAdminReceiver.getComponentName(context);
+
+    public SensorTestScreenManipulator(Activity activity) {
+        mActivity = activity;
+        mComponentName = SensorDeviceAdminReceiver.getComponentName(activity);
         mDevicePolicyManager =
-                (DevicePolicyManager) context.getSystemService(Context.DEVICE_POLICY_SERVICE);
+                (DevicePolicyManager) activity.getSystemService(Context.DEVICE_POLICY_SERVICE);
 
         int levelAndFlags = PowerManager.FULL_WAKE_LOCK
                 | PowerManager.ON_AFTER_RELEASE
                 | PowerManager.ACQUIRE_CAUSES_WAKEUP;
-        PowerManager powerManager = (PowerManager) context.getSystemService(Context.POWER_SERVICE);
+        PowerManager powerManager = (PowerManager) activity.getSystemService(Context.POWER_SERVICE);
         mWakeUpScreenWakeLock = powerManager.newWakeLock(levelAndFlags, "SensorTestWakeUpScreen");
         mWakeUpScreenWakeLock.setReferenceCounted(false);
         mKeepScreenOnWakeLock = powerManager.newWakeLock(levelAndFlags, "SensorTestKeepScreenOn");
@@ -87,7 +94,7 @@
      */
     public synchronized void initialize(ISensorTestStateContainer stateContainer)
             throws InterruptedException {
-        if (!isDeviceAdminInitialized()) {
+        if (hasDeviceAdminFeature() && !isDeviceAdminInitialized()) {
             Intent intent = new Intent(DevicePolicyManager.ACTION_ADD_DEVICE_ADMIN);
             intent.putExtra(DevicePolicyManager.EXTRA_DEVICE_ADMIN, mComponentName);
             int resultCode = stateContainer.executeActivity(intent);
@@ -101,7 +108,7 @@
             mBroadcastReceiver = new InternalBroadcastReceiver();
             IntentFilter intentFilter = new IntentFilter();
             intentFilter.addAction(Intent.ACTION_POWER_DISCONNECTED);
-            mContext.registerReceiver(mBroadcastReceiver, intentFilter);
+            mActivity.registerReceiver(mBroadcastReceiver, intentFilter);
         }
     }
 
@@ -111,7 +118,7 @@
      */
     public synchronized  void close() {
         if (mBroadcastReceiver != null) {
-            mContext.unregisterReceiver(mBroadcastReceiver);
+            mActivity.unregisterReceiver(mBroadcastReceiver);
             mBroadcastReceiver = null;
         }
     }
@@ -121,8 +128,30 @@
      */
     public synchronized void turnScreenOff() {
         ensureDeviceAdminInitialized();
+
+        final CountDownLatch screenOffSignal = new CountDownLatch(1);
+        BroadcastReceiver screenOffBroadcastReceiver = new BroadcastReceiver() {
+            @Override
+            public void onReceive(Context context, Intent intent) {
+                mActivity.unregisterReceiver(this);
+                screenOffSignal.countDown();
+            }
+        };
+        mActivity.registerReceiver(
+                screenOffBroadcastReceiver, new IntentFilter(Intent.ACTION_SCREEN_OFF));
+
         releaseScreenOn();
-        mDevicePolicyManager.lockNow();
+        if (hasDeviceAdminFeature()) {
+            mDevicePolicyManager.lockNow();
+        } else {
+            TimeoutResetActivity.turnOffScreen(mActivity);
+        }
+
+        try {
+            screenOffSignal.await();
+        } catch (InterruptedException e) {
+            Log.wtf(TAG, "error waiting for screen off signal", e);
+        }
     }
 
     /**
@@ -175,7 +204,7 @@
     }
 
     private void ensureDeviceAdminInitialized() throws IllegalStateException {
-        if (!isDeviceAdminInitialized()) {
+        if (hasDeviceAdminFeature() && !isDeviceAdminInitialized()) {
             throw new IllegalStateException("Component must be initialized before it can be used.");
         }
     }
@@ -188,6 +217,10 @@
                 .hasGrantedPolicy(mComponentName, DeviceAdminInfo.USES_POLICY_FORCE_LOCK);
     }
 
+    private boolean hasDeviceAdminFeature() {
+        return mActivity.getPackageManager().hasSystemFeature(PackageManager.FEATURE_DEVICE_ADMIN);
+    }
+
     private class InternalBroadcastReceiver extends BroadcastReceiver {
         @Override
         public void onReceive(Context context, Intent intent) {
diff --git a/hostsidetests/dumpsys/Android.mk b/hostsidetests/dumpsys/Android.mk
new file mode 100644
index 0000000..51ea31f
--- /dev/null
+++ b/hostsidetests/dumpsys/Android.mk
@@ -0,0 +1,32 @@
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := $(call all-java-files-under, src)
+
+LOCAL_MODULE_TAGS := optional
+
+# Must match the package name in CtsTestCaseList.mk
+LOCAL_MODULE := CtsDumpsysHostTestCases
+
+LOCAL_JAVA_LIBRARIES := cts-tradefed tradefed-prebuilt
+
+LOCAL_CTS_TEST_PACKAGE := android.host.dumpsys
+
+include $(BUILD_CTS_HOST_JAVA_LIBRARY)
+
+include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/hostsidetests/dumpsys/src/android/dumpsys/cts/DumpsysHostTest.java b/hostsidetests/dumpsys/src/android/dumpsys/cts/DumpsysHostTest.java
new file mode 100644
index 0000000..e4f8ad5
--- /dev/null
+++ b/hostsidetests/dumpsys/src/android/dumpsys/cts/DumpsysHostTest.java
@@ -0,0 +1,346 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.dumpsys.cts;
+
+import com.android.ddmlib.Log;
+import com.android.tradefed.device.ITestDevice;
+import com.android.tradefed.testtype.DeviceTestCase;
+
+import java.io.BufferedReader;
+import java.io.StringReader;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Test to check the format of the dumps of various services (currently only procstats is tested).
+ */
+public class DumpsysHostTest extends DeviceTestCase {
+    private static final String TAG = "DumpsysHostTest";
+
+    /**
+     * A reference to the device under test.
+     */
+    private ITestDevice mDevice;
+
+    @Override
+    protected void setUp() throws Exception {
+        super.setUp();
+        mDevice = getDevice();
+    }
+
+    /**
+     * Tests the output of "dumpsys procstats -c". This is a proxy for testing "dumpsys procstats
+     * --checkin", since the latter is not idempotent.
+     *
+     * @throws Exception
+     */
+    public void testProcstatsOutput() throws Exception {
+        if (mDevice.getApiLevel() < 19) {
+            Log.i(TAG, "No Procstats output before KitKat, skipping test.");
+            return;
+        }
+
+        String procstats = mDevice.executeShellCommand("dumpsys procstats -c");
+        assertNotNull(procstats);
+        assertTrue(procstats.length() > 0);
+
+        Set<String> seenTags = new HashSet<>();
+        int version = -1;
+
+        try (BufferedReader reader = new BufferedReader(
+                new StringReader(procstats))) {
+
+            String line;
+            while ((line = reader.readLine()) != null) {
+                if (line.isEmpty()) {
+                    continue;
+                }
+
+                // extra space to make sure last column shows up.
+                if (line.endsWith(",")) {
+                  line = line + " ";
+                }
+                String[] parts = line.split(",");
+                seenTags.add(parts[0]);
+
+                switch (parts[0]) {
+                    case "vers":
+                        assertEquals(2, parts.length);
+                        version = Integer.parseInt(parts[1]);
+                        break;
+                    case "period":
+                        checkPeriod(parts);
+                        break;
+                    case "pkgproc":
+                        checkPkgProc(parts, version);
+                        break;
+                    case "pkgpss":
+                        checkPkgPss(parts, version);
+                        break;
+                    case "pkgsvc-bound":
+                    case "pkgsvc-exec":
+                    case "pkgsvc-run":
+                    case "pkgsvc-start":
+                        checkPkgSvc(parts, version);
+                        break;
+                    case "pkgkills":
+                        checkPkgKills(parts, version);
+                        break;
+                    case "proc":
+                        checkProc(parts);
+                        break;
+                    case "pss":
+                        checkPss(parts);
+                        break;
+                    case "kills":
+                        checkKills(parts);
+                        break;
+                    case "total":
+                        checkTotal(parts);
+                        break;
+                    default:
+                        break;
+                }
+            }
+        }
+
+        // spot check a few tags
+        assertSeenTag(seenTags, "pkgproc");
+        assertSeenTag(seenTags, "proc");
+        assertSeenTag(seenTags, "pss");
+        assertSeenTag(seenTags, "total");
+    }
+
+    private void checkPeriod(String[] parts) {
+        assertEquals(5, parts.length);
+        assertNotNull(parts[1]); // date
+        assertInteger(parts[2]); // start time (msec)
+        assertInteger(parts[3]); // end time (msec)
+        assertNotNull(parts[4]); // status
+    }
+
+    private void checkPkgProc(String[] parts, int version) {
+        int statesStartIndex;
+
+        if (version < 4) {
+            assertTrue(parts.length >= 4);
+            assertNotNull(parts[1]); // package name
+            assertInteger(parts[2]); // uid
+            assertNotNull(parts[3]); // process
+            statesStartIndex = 4;
+        } else {
+            assertTrue(parts.length >= 5);
+            assertNotNull(parts[1]); // package name
+            assertInteger(parts[2]); // uid
+            assertInteger(parts[3]); // app version
+            assertNotNull(parts[4]); // process
+            statesStartIndex = 5;
+        }
+
+        for (int i = statesStartIndex; i < parts.length; i++) {
+            String[] subparts = parts[i].split(":");
+            assertEquals(2, subparts.length);
+            checkTag(subparts[0], true); // tag
+            assertInteger(subparts[1]); // duration (msec)
+        }
+    }
+
+    private void checkTag(String tag, boolean hasProcess) {
+        assertEquals(hasProcess ? 3 : 2, tag.length());
+
+        // screen: 0 = off, 1 = on
+        char s = tag.charAt(0);
+        if (s != '0' && s != '1') {
+            fail("malformed tag: " + tag);
+        }
+
+        // memory: n = normal, m = moderate, l = low, c = critical
+        char m = tag.charAt(1);
+        if (m != 'n' && m != 'm' && m != 'l' && m != 'c') {
+            fail("malformed tag: " + tag);
+        }
+
+        if (hasProcess) {
+            char p = tag.charAt(2);
+            assertTrue("malformed tag: " + tag, p >= 'a' && p <= 'z');
+        }
+    }
+
+    private void checkPkgPss(String[] parts, int version) {
+        int statesStartIndex;
+
+        if (version < 4) {
+            assertTrue(parts.length >= 4);
+            assertNotNull(parts[1]); // package name
+            assertInteger(parts[2]); // uid
+            assertNotNull(parts[3]); // process
+            statesStartIndex = 4;
+        } else {
+            assertTrue(parts.length >= 5);
+            assertNotNull(parts[1]); // package name
+            assertInteger(parts[2]); // uid
+            assertInteger(parts[3]); // app version
+            assertNotNull(parts[4]); // process
+            statesStartIndex = 5;
+        }
+
+        for (int i = statesStartIndex; i < parts.length; i++) {
+            String[] subparts = parts[i].split(":");
+            assertEquals(8, subparts.length);
+            checkTag(subparts[0], true); // tag
+            assertInteger(subparts[1]); // sample size
+            assertInteger(subparts[2]); // pss min
+            assertInteger(subparts[3]); // pss avg
+            assertInteger(subparts[4]); // pss max
+            assertInteger(subparts[5]); // uss min
+            assertInteger(subparts[6]); // uss avg
+            assertInteger(subparts[7]); // uss max
+        }
+    }
+
+    private void checkPkgSvc(String[] parts, int version) {
+        int statesStartIndex;
+
+        if (version < 4) {
+            assertTrue(parts.length >= 5);
+            assertNotNull(parts[1]); // package name
+            assertInteger(parts[2]); // uid
+            assertNotNull(parts[3]); // service name
+            assertInteger(parts[4]); // count
+            statesStartIndex = 5;
+        } else {
+            assertTrue(parts.length >= 6);
+            assertNotNull(parts[1]); // package name
+            assertInteger(parts[2]); // uid
+            assertInteger(parts[3]); // app version
+            assertNotNull(parts[4]); // service name
+            assertInteger(parts[5]); // count
+            statesStartIndex = 6;
+        }
+
+        for (int i = statesStartIndex; i < parts.length; i++) {
+            String[] subparts = parts[i].split(":");
+            assertEquals(2, subparts.length);
+            checkTag(subparts[0], false); // tag
+            assertInteger(subparts[1]); // duration (msec)
+        }
+    }
+
+    private void checkPkgKills(String[] parts, int version) {
+        String pssStr;
+
+        if (version < 4) {
+            assertEquals(8, parts.length);
+            assertNotNull(parts[1]); // package name
+            assertInteger(parts[2]); // uid
+            assertNotNull(parts[3]); // process
+            assertInteger(parts[4]); // wakes
+            assertInteger(parts[5]); // cpu
+            assertInteger(parts[6]); // cached
+            pssStr = parts[7];
+        } else {
+            assertEquals(9, parts.length);
+            assertNotNull(parts[1]); // package name
+            assertInteger(parts[2]); // uid
+            assertInteger(parts[3]); // app version
+            assertNotNull(parts[4]); // process
+            assertInteger(parts[5]); // wakes
+            assertInteger(parts[6]); // cpu
+            assertInteger(parts[7]); // cached
+            pssStr = parts[8];
+        }
+
+        String[] subparts = pssStr.split(":");
+        assertEquals(3, subparts.length);
+        assertInteger(subparts[0]); // pss min
+        assertInteger(subparts[1]); // pss avg
+        assertInteger(subparts[2]); // pss max
+    }
+
+    private void checkProc(String[] parts) {
+        assertTrue(parts.length >= 3);
+        assertNotNull(parts[1]); // package name
+        assertInteger(parts[2]); // uid
+
+        for (int i = 3; i < parts.length; i++) {
+            String[] subparts = parts[i].split(":");
+            assertEquals(2, subparts.length);
+            checkTag(subparts[0], true); // tag
+            assertInteger(subparts[1]); // duration (msec)
+        }
+    }
+
+    private void checkPss(String[] parts) {
+        assertTrue(parts.length >= 3);
+        assertNotNull(parts[1]); // package name
+        assertInteger(parts[2]); // uid
+
+        for (int i = 3; i < parts.length; i++) {
+            String[] subparts = parts[i].split(":");
+            assertEquals(8, subparts.length);
+            checkTag(subparts[0], true); // tag
+            assertInteger(subparts[1]); // sample size
+            assertInteger(subparts[2]); // pss min
+            assertInteger(subparts[3]); // pss avg
+            assertInteger(subparts[4]); // pss max
+            assertInteger(subparts[5]); // uss min
+            assertInteger(subparts[6]); // uss avg
+            assertInteger(subparts[7]); // uss max
+        }
+    }
+
+    private void checkKills(String[] parts) {
+        assertEquals(7, parts.length);
+        assertNotNull(parts[1]); // package name
+        assertInteger(parts[2]); // uid
+        assertInteger(parts[3]); // wakes
+        assertInteger(parts[4]); // cpu
+        assertInteger(parts[5]); // cached
+        String pssStr = parts[6];
+
+        String[] subparts = pssStr.split(":");
+        assertEquals(3, subparts.length);
+        assertInteger(subparts[0]); // pss min
+        assertInteger(subparts[1]); // pss avg
+        assertInteger(subparts[2]); // pss max
+    }
+
+    private void checkTotal(String[] parts) {
+        assertTrue(parts.length >= 2);
+        for (int i = 1; i < parts.length; i++) {
+            String[] subparts = parts[i].split(":");
+            checkTag(subparts[0], false); // tag
+
+            if (subparts[1].contains("sysmemusage")) {
+                break; // see b/18340771
+            }
+            assertInteger(subparts[1]); // duration (msec)
+        }
+    }
+
+    private static void assertInteger(String input) {
+        try {
+            Long.parseLong(input);
+        } catch (NumberFormatException e) {
+            fail("Expected an integer but found \"" + input + "\"");
+        }
+    }
+
+    private static void assertSeenTag(Set<String> seenTags, String tag) {
+        assertTrue("No line starting with \"" + tag + ",\"", seenTags.contains(tag));
+    }
+}
diff --git a/hostsidetests/monkey/src/com/android/cts/monkey/MonkeyTest.java b/hostsidetests/monkey/src/com/android/cts/monkey/MonkeyTest.java
index f141d8f..997f7c6 100644
--- a/hostsidetests/monkey/src/com/android/cts/monkey/MonkeyTest.java
+++ b/hostsidetests/monkey/src/com/android/cts/monkey/MonkeyTest.java
@@ -37,7 +37,8 @@
     }
 
     private void assertIsUserAMonkey(boolean isMonkey) throws DeviceNotAvailableException {
-        String logs = mDevice.executeAdbCommand("logcat", "-d", "MonkeyActivity:I", "*:S");
+        String logs = mDevice.executeAdbCommand(
+                "logcat", "-v", "brief", "-d", "MonkeyActivity:I", "*:S");
         boolean monkeyLogsFound = false;
         Scanner s = new Scanner(logs);
         try {
diff --git a/hostsidetests/sample/src/android/sample/cts/SampleHostTest.java b/hostsidetests/sample/src/android/sample/cts/SampleHostTest.java
index 3cc4aa9..ab7e0b0 100644
--- a/hostsidetests/sample/src/android/sample/cts/SampleHostTest.java
+++ b/hostsidetests/sample/src/android/sample/cts/SampleHostTest.java
@@ -123,7 +123,7 @@
         // Start the APK and wait for it to complete.
         mDevice.executeShellCommand(START_COMMAND);
         // Dump logcat.
-        String logs = mDevice.executeAdbCommand("logcat", "-d", CLASS + ":I", "*:S");
+        String logs = mDevice.executeAdbCommand("logcat", "-v", "brief", "-d", CLASS + ":I", "*:S");
         // Search for string.
         String testString = "";
         Scanner in = new Scanner(logs);
diff --git a/hostsidetests/theme/src/android/theme/cts/ThemeHostTest.java b/hostsidetests/theme/src/android/theme/cts/ThemeHostTest.java
index 90a0c72..da94b15 100644
--- a/hostsidetests/theme/src/android/theme/cts/ThemeHostTest.java
+++ b/hostsidetests/theme/src/android/theme/cts/ThemeHostTest.java
@@ -322,7 +322,8 @@
         boolean waiting = true;
         while (waiting) {
             // Dump logcat.
-            final String logs = mDevice.executeAdbCommand("logcat", "-d", CLASS + ":I", "*:S");
+            final String logs = mDevice.executeAdbCommand(
+                    "logcat", "-v", "brief", "-d", CLASS + ":I", "*:S");
             // Search for string.
             final Scanner in = new Scanner(logs);
             while (in.hasNextLine()) {
diff --git a/hostsidetests/usb/src/com/android/cts/usb/TestUsbTest.java b/hostsidetests/usb/src/com/android/cts/usb/TestUsbTest.java
index 4736e51..3af52c0 100644
--- a/hostsidetests/usb/src/com/android/cts/usb/TestUsbTest.java
+++ b/hostsidetests/usb/src/com/android/cts/usb/TestUsbTest.java
@@ -40,13 +40,11 @@
  */
 public class TestUsbTest extends DeviceTestCase implements IAbiReceiver, IBuildReceiver {
 
-    private static final String LOG_TAG = "TestUsbTest";
     private static final String CTS_RUNNER = "android.support.test.runner.AndroidJUnitRunner";
     private static final String PACKAGE_NAME = "com.android.cts.usb.serialtest";
     private static final String APK_NAME="CtsUsbSerialTestApp.apk";
     private ITestDevice mDevice;
     private IAbi mAbi;
-    private String mAbiBitness;
     private CtsBuildHelper mBuild;
 
     @Override
@@ -118,7 +116,8 @@
         if (runResult.isRunFailure()) {
             fail(runResult.getRunFailureMessage());
         }
-        String logs = mDevice.executeAdbCommand("logcat", "-d", "CtsUsbSerialTest:W", "*:S");
+        String logs = mDevice.executeAdbCommand(
+                "logcat", "-v", "brief", "-d", "CtsUsbSerialTest:W", "*:S");
         pattern = Pattern.compile("^.*CtsUsbSerialTest\\(.*\\):\\s+([a-zA-Z0-9]{6,20})",
                 Pattern.MULTILINE);
         matcher = pattern.matcher(logs);
diff --git a/tests/core/runner/src/com/android/cts/runner/CtsTestRunListener.java b/tests/core/runner/src/com/android/cts/runner/CtsTestRunListener.java
index 5196df1..5f67475 100644
--- a/tests/core/runner/src/com/android/cts/runner/CtsTestRunListener.java
+++ b/tests/core/runner/src/com/android/cts/runner/CtsTestRunListener.java
@@ -35,6 +35,7 @@
 import java.net.CookieHandler;
 import java.net.ResponseCache;
 import java.util.Locale;
+import java.util.Properties;
 import java.util.TimeZone;
 
 import javax.net.ssl.HostnameVerifier;
@@ -57,7 +58,7 @@
 
     @Override
     public void testRunStarted(Description description) throws Exception {
-        mEnvironment = new TestEnvironment();
+        mEnvironment = new TestEnvironment(getInstrumentation().getContext());
 
         // We might want to move this to /sdcard, if is is mounted/writable.
         File cacheDir = getInstrumentation().getTargetContext().getCacheDir();
@@ -149,21 +150,28 @@
     static class TestEnvironment {
         private final Locale mDefaultLocale;
         private final TimeZone mDefaultTimeZone;
-        private final String mJavaIoTmpDir;
         private final HostnameVerifier mHostnameVerifier;
         private final SSLSocketFactory mSslSocketFactory;
+        private final Properties mProperties = new Properties();
 
-        TestEnvironment() {
+        TestEnvironment(Context context) {
             mDefaultLocale = Locale.getDefault();
             mDefaultTimeZone = TimeZone.getDefault();
-            mJavaIoTmpDir = System.getProperty("java.io.tmpdir");
             mHostnameVerifier = HttpsURLConnection.getDefaultHostnameVerifier();
             mSslSocketFactory = HttpsURLConnection.getDefaultSSLSocketFactory();
+
+            mProperties.setProperty("java.io.tmpdir", System.getProperty("java.io.tmpdir"));
+            // The CDD mandates that devices that support WiFi are the only ones that will have 
+            // multicast.
+            PackageManager pm = context.getPackageManager();
+            mProperties.setProperty("android.cts.device.multicast",
+                    Boolean.toString(pm.hasSystemFeature(PackageManager.FEATURE_WIFI)));
+
         }
 
         void reset() {
             System.setProperties(null);
-            System.setProperty("java.io.tmpdir", mJavaIoTmpDir);
+            System.setProperties(mProperties);
             Locale.setDefault(mDefaultLocale);
             TimeZone.setDefault(mDefaultTimeZone);
             Authenticator.setDefault(null);
diff --git a/tests/signature/src/android/signature/cts/JDiffClassDescription.java b/tests/signature/src/android/signature/cts/JDiffClassDescription.java
index 7e36c1c..afcaa15 100644
--- a/tests/signature/src/android/signature/cts/JDiffClassDescription.java
+++ b/tests/signature/src/android/signature/cts/JDiffClassDescription.java
@@ -731,7 +731,7 @@
                     Type type = f.getGenericType();
                     if (type != null) {
                         genericTypeName = type instanceof Class ? ((Class) type).getName() :
-                            type.toString();
+                            type.toString().replace('$', '.');
                     }
                     if (genericTypeName == null || !genericTypeName.equals(field.mFieldType)) {
                         mResultObserver.notifyFailure(
diff --git a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityEndToEndTest.java b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityEndToEndTest.java
index 39b116a..b11248a 100644
--- a/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityEndToEndTest.java
+++ b/tests/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityEndToEndTest.java
@@ -342,6 +342,7 @@
                             (NotificationManager) getActivity().getSystemService(
                                     Service.NOTIFICATION_SERVICE);
                         notificationManager.notify(notificationId, notification);
+                        getActivity().finish();
                     }
                 });
             }},
diff --git a/tests/tests/app/src/android/app/cts/DialogTest.java b/tests/tests/app/src/android/app/cts/DialogTest.java
index 56e731b..6df2eee 100644
--- a/tests/tests/app/src/android/app/cts/DialogTest.java
+++ b/tests/tests/app/src/android/app/cts/DialogTest.java
@@ -393,25 +393,28 @@
         d.isOnTouchEventCalled = false;
         assertTrue(d.isShowing());
 
-        // Send a touch event outside the activity.  This time the dialog will be dismissed
-        // because closeOnTouchOutside is true.
-        d.setCanceledOnTouchOutside(true);
+        // Watch activities cover the entire screen, so there is no way to touch outside.
+        if (!mContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_WATCH)) {
+            // Send a touch event outside the activity.  This time the dialog will be dismissed
+            // because closeOnTouchOutside is true.
+            d.setCanceledOnTouchOutside(true);
 
-        touchMotionEvent = MotionEvent.obtain(now, now + 1, MotionEvent.ACTION_DOWN,
-                1, 100, 0);
-        mInstrumentation.sendPointerSync(touchMotionEvent);
+            touchMotionEvent = MotionEvent.obtain(now, now + 1, MotionEvent.ACTION_DOWN,
+                    1, 100, 0);
+            mInstrumentation.sendPointerSync(touchMotionEvent);
 
-        new PollingCheck(TEST_TIMEOUT) {
-            protected boolean check() {
-                return d.dispatchTouchEventResult;
-            }
-        }.run();
+            new PollingCheck(TEST_TIMEOUT) {
+                protected boolean check() {
+                    return d.dispatchTouchEventResult;
+                }
+            }.run();
 
-        assertMotionEventEquals(touchMotionEvent, d.touchEvent);
+            assertMotionEventEquals(touchMotionEvent, d.touchEvent);
 
-        assertTrue(d.isOnTouchEventCalled);
-        assertMotionEventEquals(touchMotionEvent, d.onTouchEvent);
-        assertFalse(d.isShowing());
+            assertTrue(d.isOnTouchEventCalled);
+            assertMotionEventEquals(touchMotionEvent, d.onTouchEvent);
+            assertFalse(d.isShowing());
+        }
     }
 
     public void testTrackballEvent() {
diff --git a/tests/tests/app/src/android/app/cts/SystemFeaturesTest.java b/tests/tests/app/src/android/app/cts/SystemFeaturesTest.java
index 165e67b..620c51f 100644
--- a/tests/tests/app/src/android/app/cts/SystemFeaturesTest.java
+++ b/tests/tests/app/src/android/app/cts/SystemFeaturesTest.java
@@ -32,6 +32,9 @@
 import android.hardware.SensorManager;
 import android.hardware.Camera.CameraInfo;
 import android.hardware.Camera.Parameters;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
 import android.location.LocationManager;
 import android.net.sip.SipManager;
 import android.net.wifi.WifiManager;
@@ -59,6 +62,7 @@
     private SensorManager mSensorManager;
     private TelephonyManager mTelephonyManager;
     private WifiManager mWifiManager;
+    private CameraManager mCameraManager;
 
     @Override
     protected void setUp() throws Exception {
@@ -77,6 +81,7 @@
         mSensorManager = (SensorManager) mContext.getSystemService(Context.SENSOR_SERVICE);
         mTelephonyManager = (TelephonyManager) mContext.getSystemService(Context.TELEPHONY_SERVICE);
         mWifiManager = (WifiManager) mContext.getSystemService(Context.WIFI_SERVICE);
+        mCameraManager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
     }
 
     /**
@@ -106,7 +111,7 @@
         }
     }
 
-    public void testCameraFeatures() {
+    public void testCameraFeatures() throws Exception {
         int numCameras = Camera.getNumberOfCameras();
         if (numCameras == 0) {
             assertNotAvailable(PackageManager.FEATURE_CAMERA);
@@ -114,6 +119,11 @@
             assertNotAvailable(PackageManager.FEATURE_CAMERA_FLASH);
             assertNotAvailable(PackageManager.FEATURE_CAMERA_FRONT);
             assertNotAvailable(PackageManager.FEATURE_CAMERA_ANY);
+            assertNotAvailable(PackageManager.FEATURE_CAMERA_LEVEL_FULL);
+            assertNotAvailable(PackageManager.FEATURE_CAMERA_CAPABILITY_MANUAL_SENSOR);
+            assertNotAvailable(PackageManager.FEATURE_CAMERA_CAPABILITY_MANUAL_POST_PROCESSING);
+            assertNotAvailable(PackageManager.FEATURE_CAMERA_CAPABILITY_RAW);
+
             assertFalse("Devices supporting external cameras must have a representative camera " +
                     "connected for testing",
                     mPackageManager.hasSystemFeature(PackageManager.FEATURE_CAMERA_EXTERNAL));
@@ -121,9 +131,48 @@
             assertAvailable(PackageManager.FEATURE_CAMERA_ANY);
             checkFrontCamera();
             checkRearCamera();
+            checkCamera2Features();
         }
     }
 
+    private void checkCamera2Features() throws Exception {
+        String[] cameraIds = mCameraManager.getCameraIdList();
+        boolean fullCamera = false;
+        boolean manualSensor = false;
+        boolean manualPostProcessing = false;
+        boolean raw = false;
+        CameraCharacteristics[] cameraChars = new CameraCharacteristics[cameraIds.length];
+        for (String cameraId : cameraIds) {
+            CameraCharacteristics chars = mCameraManager.getCameraCharacteristics(cameraId);
+            Integer hwLevel = chars.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
+            int[] capabilities = chars.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
+            if (hwLevel == CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL) {
+                fullCamera = true;
+            }
+            for (int capability : capabilities) {
+                switch (capability) {
+                    case CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR:
+                        manualSensor = true;
+                        break;
+                    case CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING:
+                        manualPostProcessing = true;
+                        break;
+                    case CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW:
+                        raw = true;
+                        break;
+                    default:
+                        // Capabilities don't have a matching system feature
+                        break;
+                }
+            }
+        }
+        assertFeature(fullCamera, PackageManager.FEATURE_CAMERA_LEVEL_FULL);
+        assertFeature(manualSensor, PackageManager.FEATURE_CAMERA_CAPABILITY_MANUAL_SENSOR);
+        assertFeature(manualPostProcessing,
+                PackageManager.FEATURE_CAMERA_CAPABILITY_MANUAL_POST_PROCESSING);
+        assertFeature(raw, PackageManager.FEATURE_CAMERA_CAPABILITY_RAW);
+    }
+
     private void checkFrontCamera() {
         CameraInfo info = new CameraInfo();
         int numCameras = Camera.getNumberOfCameras();
@@ -400,4 +449,12 @@
         assertFalse("PackageManager#getSystemAvailableFeatures should NOT have " + feature,
                 mAvailableFeatures.contains(feature));
     }
+
+    private void assertFeature(boolean exist, String feature) {
+        if (exist) {
+            assertAvailable(feature);
+        } else {
+            assertNotAvailable(feature);
+        }
+    }
 }
diff --git a/tests/tests/display/src/android/display/cts/VirtualDisplayTest.java b/tests/tests/display/src/android/display/cts/VirtualDisplayTest.java
index f2f859a..872de91 100644
--- a/tests/tests/display/src/android/display/cts/VirtualDisplayTest.java
+++ b/tests/tests/display/src/android/display/cts/VirtualDisplayTest.java
@@ -57,7 +57,7 @@
     private static final int WIDTH = 720;
     private static final int HEIGHT = 480;
     private static final int DENSITY = DisplayMetrics.DENSITY_MEDIUM;
-    private static final int TIMEOUT = 10000;
+    private static final int TIMEOUT = 40000;
 
     // Colors that we use as a signal to determine whether some desired content was
     // drawn.  The colors themselves doesn't matter but we choose them to have with distinct
diff --git a/tests/tests/hardware/AndroidManifest.xml b/tests/tests/hardware/AndroidManifest.xml
index ca148f9..1a02d0a 100644
--- a/tests/tests/hardware/AndroidManifest.xml
+++ b/tests/tests/hardware/AndroidManifest.xml
@@ -23,6 +23,7 @@
     <uses-permission android:name="android.permission.RECORD_AUDIO" />
     <uses-permission android:name="android.permission.WAKE_LOCK" />
     <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
+    <uses-permission android:name="android.permission.BODY_SENSORS" />
 
     <application>
         <uses-library android:name="android.test.runner" />
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureRequestTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureRequestTest.java
index 5346ae1..ec2f95b 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureRequestTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureRequestTest.java
@@ -326,7 +326,13 @@
                 flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_ON);
 
                 // LEGACY won't support AE mode OFF
-                if (mStaticInfo.isHardwareLevelLimitedOrBetter()) {
+                boolean aeOffModeSupported = false;
+                for (int aeMode : mStaticInfo.getAeAvailableModesChecked()) {
+                    if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) {
+                        aeOffModeSupported = true;
+                    }
+                }
+                if (aeOffModeSupported) {
                     flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_OFF);
                 }
 
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/RecordingTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/RecordingTest.java
index 90cb18a..669de2d 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/RecordingTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/RecordingTest.java
@@ -78,6 +78,7 @@
     private static final int MAX_VIDEO_SNAPSHOT_IMAGES = 5;
     private static final int BURST_VIDEO_SNAPSHOT_NUM = 3;
     private static final int SLOWMO_SLOW_FACTOR = 4;
+    private static final int MAX_NUM_FRAME_DROP_ALLOWED = 4;
     private List<Size> mSupportedVideoSizes;
     private Surface mRecordingSurface;
     private MediaRecorder mMediaRecorder;
@@ -909,6 +910,15 @@
                 // Snapshots in legacy mode pause the preview briefly.  Skip the duration
                 // requirements for legacy mode unless this is fixed.
                 if (!mStaticInfo.isHardwareLevelLegacy()) {
+                    mCollector.expectTrue(
+                            String.format(
+                                    "Video %dx%d Frame drop detected before video snapshot: " +
+                                            "duration %dms (expected %dms)",
+                                    mVideoSize.getWidth(), mVideoSize.getHeight(),
+                                    durationMs, expectedDurationMs
+                            ),
+                            durationMs <= (expectedDurationMs * MAX_NUM_FRAME_DROP_ALLOWED)
+                    );
                     // Log a warning is there is any frame drop detected.
                     if (durationMs >= expectedDurationMs * 2) {
                         Log.w(TAG, String.format(
@@ -920,6 +930,15 @@
                     }
 
                     durationMs = (int) (nextTS - currentTS) / 1000000;
+                    mCollector.expectTrue(
+                            String.format(
+                                    "Video %dx%d Frame drop detected after video snapshot: " +
+                                            "duration %dms (expected %dms)",
+                                    mVideoSize.getWidth(), mVideoSize.getHeight(),
+                                    durationMs, expectedDurationMs
+                            ),
+                            durationMs <= (expectedDurationMs * MAX_NUM_FRAME_DROP_ALLOWED)
+                    );
                     // Log a warning is there is any frame drop detected.
                     if (durationMs >= expectedDurationMs * 2) {
                         Log.w(TAG, String.format(
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/StaticMetadataCollectionTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/StaticMetadataCollectionTest.java
new file mode 100644
index 0000000..283f09b
--- /dev/null
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/StaticMetadataCollectionTest.java
@@ -0,0 +1,181 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.cts;
+
+import android.content.pm.PackageManager;
+import android.cts.util.DeviceReportLog;
+import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase;
+import android.hardware.camera2.cts.helpers.CameraMetadataGetter;
+import android.util.Log;
+
+import com.android.cts.util.ResultType;
+import com.android.cts.util.ResultUnit;
+
+import org.json.JSONArray;
+import org.json.JSONObject;
+
+import java.util.Iterator;
+
+/**
+ * This test collects camera2 API static metadata and reports to device report.
+ *
+ */
+public class StaticMetadataCollectionTest extends Camera2SurfaceViewTestCase {
+    private static final String TAG = "StaticMetadataCollectionTest";
+
+    private DeviceReportLog mReportLog;
+
+    @Override
+    protected void setUp() throws Exception {
+        mReportLog = new DeviceReportLog();
+        super.setUp();
+    }
+
+    @Override
+    protected void tearDown() throws Exception {
+        // Deliver the report to host will automatically clear the report log.
+        mReportLog.deliverReportToHost(getInstrumentation());
+        super.tearDown();
+    }
+
+    public void testDataCollection() {
+        if (hasCameraFeature()) {
+            CameraMetadataGetter cameraInfoGetter = new CameraMetadataGetter(mCameraManager);
+            for (String id : mCameraIds) {
+                // Gather camera info
+                JSONObject cameraInfo = cameraInfoGetter.getCameraInfo(id);
+                dumpJsonObjectAsCtsResult(String.format("camera2_id%s_static_info", id), cameraInfo);
+                dumpDoubleAsCtsResult(String.format("camera2_id%s_static_info:", id)
+                        + cameraInfo.toString(), 0);
+
+                JSONObject[] templates = cameraInfoGetter.getCaptureRequestTemplates(id);
+                for (int i = 0; i < templates.length; i++) {
+                    dumpJsonObjectAsCtsResult(String.format("camera2_id%s_capture_template%d",
+                            id, CameraMetadataGetter.TEMPLATE_IDS[i]), templates[i]);
+                    if (templates[i] != null) {
+                        dumpDoubleAsCtsResult(String.format("camera2_id%s_capture_template%d:",
+                                id, CameraMetadataGetter.TEMPLATE_IDS[i])
+                                + templates[i].toString(), 0);
+                    }
+                }
+            }
+
+            try {
+                cameraInfoGetter.close();
+            } catch (Exception e) {
+                Log.e(TAG, "Unable to close camera info getter " + e.getMessage());
+            }
+
+            mReportLog.printSummary("Camera data collection for static info and capture request"
+                    + " templates",
+                    0.0, ResultType.NEUTRAL, ResultUnit.NONE);
+        }
+
+    }
+
+    private void dumpDoubleAsCtsResult(String name, double value) {
+        mReportLog.printValue(name, value, ResultType.NEUTRAL, ResultUnit.NONE);
+    }
+
+    public void dumpDoubleArrayAsCtsResult(String name, double[] values) {
+        mReportLog.printArray(name, values, ResultType.NEUTRAL, ResultUnit.NONE);
+    }
+
+    private double getJsonValueAsDouble(String name, Object obj) throws Exception {
+        if (obj == null) {
+            Log.e(TAG, "Null value: " + name);
+            throw new Exception();
+        } else if (obj instanceof Double) {
+            return ((Double)obj).doubleValue();
+        } else if (obj instanceof Float) {
+            return ((Float)obj).floatValue();
+        } else if (obj instanceof Long) {
+            return ((Long)obj).longValue();
+        } else if (obj instanceof Integer) {
+            return ((Integer)obj).intValue();
+        } else if (obj instanceof Byte) {
+            return ((Byte)obj).intValue();
+        } else if (obj instanceof Short) {
+            return ((Short)obj).intValue();
+        } else if (obj instanceof Boolean) {
+            return ((Boolean)obj) ? 1 : 0;
+        } else {
+            Log.e(TAG, "Unsupported value type: " + name);
+            throw new Exception();
+        }
+    }
+
+    private void dumpJsonArrayAsCtsResult(String name, JSONArray arr) throws Exception {
+        if (arr == null || arr.length() == 0) {
+            dumpDoubleAsCtsResult(name + "[]", 0);
+        } else if (arr.get(0) instanceof JSONObject) {
+            for (int i = 0; i < arr.length(); i++) {
+                dumpJsonObjectAsCtsResult(name+String.format("[%04d]",i),(JSONObject)arr.get(i));
+            }
+        } else if (arr.get(0) instanceof JSONArray) {
+            for (int i = 0; i < arr.length(); i++) {
+                dumpJsonArrayAsCtsResult(name+String.format("[%04d]",i),(JSONArray)arr.get(i));
+            }
+        } else if (!(arr.get(0) instanceof String)) {
+            double[] values = new double[arr.length()];
+            for (int i = 0; i < arr.length(); i++) {
+                values[i] = getJsonValueAsDouble(name + "[]", arr.get(i));
+            }
+            dumpDoubleArrayAsCtsResult(name + "[]", values);
+        } else if (arr.get(0) instanceof String) {
+            for (int i = 0; i < arr.length(); i++) {
+                dumpDoubleAsCtsResult(
+                        name+String.format("[%04d]",i)+" = "+(String)arr.get(i), 0);
+            }
+        } else {
+            Log.e(TAG, "Unsupported array value type: " + name);
+            throw new Exception();
+        }
+    }
+
+    private void dumpJsonObjectAsCtsResult(String name, JSONObject obj) {
+        if (obj == null) {
+            dumpDoubleAsCtsResult(name + "{}", 0);
+            return;
+        }
+        Iterator<?> keys = obj.keys();
+        while (keys.hasNext()) {
+            try {
+                String key = (String)keys.next();
+                if (obj.get(key) instanceof JSONObject) {
+                    dumpJsonObjectAsCtsResult(name+"."+key, (JSONObject)obj.get(key));
+                } else if (obj.get(key) instanceof JSONArray) {
+                    dumpJsonArrayAsCtsResult(name+"."+key, (JSONArray)obj.get(key));
+                } else if (!(obj.get(key) instanceof String)) {
+                    dumpDoubleAsCtsResult(name+"."+key,
+                            getJsonValueAsDouble(name+"."+key, obj.get(key)));
+                } else if (obj.get(key) instanceof String) {
+                    dumpDoubleAsCtsResult(name+"."+key + " = " + (String)obj.get(key), 0);
+                } else {
+                    Log.e(TAG, "Unsupported object field type: " + name + "." + key);
+                }
+            } catch (Exception e) {
+                // Swallow
+            }
+        }
+    }
+
+    private boolean hasCameraFeature() {
+        PackageManager packageManager = getActivity().getPackageManager();
+        return packageManager.hasSystemFeature(PackageManager.FEATURE_CAMERA_ANY);
+    }
+}
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/StillCaptureTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/StillCaptureTest.java
index f18a1cf..e816659 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/StillCaptureTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/StillCaptureTest.java
@@ -291,6 +291,12 @@
             try {
                 Log.i(TAG, "Testing AE compensation for Camera " + id);
                 openDevice(id);
+
+                if (mStaticInfo.isHardwareLevelLegacy()) {
+                    Log.i(TAG, "Skipping test on legacy devices");
+                    continue;
+                }
+
                 aeCompensationTestByCamera();
             } finally {
                 closeDevice();
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/CameraMetadataGetter.java b/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/CameraMetadataGetter.java
new file mode 100755
index 0000000..db75cdd
--- /dev/null
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/CameraMetadataGetter.java
@@ -0,0 +1,690 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.cts.helpers;
+
+import static com.android.ex.camera2.blocking.BlockingStateCallback.*;
+
+import android.graphics.Point;
+import android.graphics.Rect;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.params.BlackLevelPattern;
+import android.hardware.camera2.params.ColorSpaceTransform;
+import android.hardware.camera2.params.Face;
+import android.hardware.camera2.params.LensShadingMap;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.hardware.camera2.params.RggbChannelVector;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.hardware.camera2.params.TonemapCurve;
+import android.location.Location;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.util.Log;
+import android.util.Pair;
+import android.util.Rational;
+import android.util.Size;
+import android.util.SizeF;
+import android.util.Range;
+
+import com.android.ex.camera2.blocking.BlockingCameraManager;
+import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException;
+import com.android.ex.camera2.blocking.BlockingStateCallback;
+
+import org.json.JSONArray;
+import org.json.JSONObject;
+
+import java.lang.reflect.Array;
+import java.lang.reflect.Field;
+import java.lang.reflect.GenericArrayType;
+import java.lang.reflect.Modifier;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+
+/**
+ * Utility class to dump the camera metadata.
+ */
+public final class CameraMetadataGetter implements AutoCloseable {
+    private static final String TAG = CameraMetadataGetter.class.getSimpleName();
+    private static final int CAMERA_CLOSE_TIMEOUT_MS = 5000;
+    public static final int[] TEMPLATE_IDS = {
+        CameraDevice.TEMPLATE_PREVIEW,
+        CameraDevice.TEMPLATE_STILL_CAPTURE,
+        CameraDevice.TEMPLATE_RECORD,
+        CameraDevice.TEMPLATE_VIDEO_SNAPSHOT,
+        CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG,
+        CameraDevice.TEMPLATE_MANUAL,
+    };
+    private CameraManager mCameraManager;
+    private BlockingStateCallback mCameraListener;
+    private HandlerThread mHandlerThread;
+    private Handler mHandler;
+
+    private static class MetadataEntry {
+        public MetadataEntry(String k, Object v) {
+            key = k;
+            value = v;
+        }
+
+        public String key;
+        public Object value;
+    }
+
+    public CameraMetadataGetter(CameraManager cameraManager) {
+        if (cameraManager == null) {
+            throw new IllegalArgumentException("can not create an CameraMetadataGetter object"
+                    + " with null CameraManager");
+        }
+
+        mCameraManager = cameraManager;
+
+        mCameraListener = new BlockingStateCallback();
+        mHandlerThread = new HandlerThread(TAG);
+        mHandlerThread.start();
+        mHandler = new Handler(mHandlerThread.getLooper());
+    }
+
+    public String getCameraInfo() {
+        StringBuffer cameraInfo = new StringBuffer("{\"CameraStaticMetadata\":{");
+        CameraCharacteristics staticMetadata;
+        String[] cameraIds;
+        try {
+            cameraIds = mCameraManager.getCameraIdList();
+        } catch (CameraAccessException e) {
+            Log.e(TAG, "Unable to get camera ids, skip this info, error: " + e.getMessage());
+            return "";
+        }
+        for (String id : cameraIds) {
+            String value = null;
+            try {
+                staticMetadata = mCameraManager.getCameraCharacteristics(id);
+                value = serialize(staticMetadata).toString();
+            } catch (CameraAccessException e) {
+                Log.e(TAG,
+                        "Unable to get camera camera static info, skip this camera, error: "
+                                + e.getMessage());
+            }
+            cameraInfo.append("\"camera" + id + "\":"); // Key
+            cameraInfo.append(value); // Value
+            // If not last, print "," // Separator
+            if (!id.equals(cameraIds[cameraIds.length - 1])) {
+                cameraInfo.append(",");
+            }
+        }
+        cameraInfo.append("}}");
+
+        return cameraInfo.toString();
+    }
+
+    public JSONObject getCameraInfo(String cameraId) {
+        JSONObject staticMetadata = null;
+        try {
+            staticMetadata = serialize(mCameraManager.getCameraCharacteristics(cameraId));
+        } catch (CameraAccessException e) {
+            Log.e(TAG,
+                    "Unable to get camera camera static info, skip this camera, error: "
+                            + e.getMessage());
+        }
+        return staticMetadata;
+    }
+
+    public JSONObject[] getCaptureRequestTemplates(String cameraId) {
+        JSONObject[] templates = new JSONObject[TEMPLATE_IDS.length];
+        CameraDevice camera = null;
+        try {
+            camera = (new BlockingCameraManager(mCameraManager)).openCamera(cameraId,
+                            mCameraListener, mHandler);
+            for (int i = 0; i < TEMPLATE_IDS.length; i++) {
+                CaptureRequest.Builder request;
+                try {
+                    request = camera.createCaptureRequest(TEMPLATE_IDS[i]);
+                    templates[i] = serialize(request.build());
+                } catch (Exception e) {
+                    Log.e(TAG, "Unable to create template " + TEMPLATE_IDS[i]
+                                    + " because of error " + e.getMessage());
+                    templates[i] = null;
+                }
+            }
+            return templates;
+        } catch (CameraAccessException | BlockingOpenException e) {
+            Log.e(TAG, "Unable to open camera " + cameraId + " because of error "
+                            + e.getMessage());
+            return new JSONObject[0];
+        } finally {
+            if (camera != null) {
+                camera.close();
+            }
+        }
+    }
+
+    public String getCaptureRequestTemplates() {
+        StringBuffer templates = new StringBuffer("{\"CameraRequestTemplates\":{");
+        String[] cameraIds;
+        try {
+            cameraIds = mCameraManager.getCameraIdList();
+        } catch (CameraAccessException e) {
+            Log.e(TAG, "Unable to get camera ids, skip this info, error: " + e.getMessage());
+            return "";
+        }
+        CameraDevice camera = null;
+        for (String id : cameraIds) {
+            try {
+                try {
+                    camera = (new BlockingCameraManager(mCameraManager)).openCamera(id,
+                                    mCameraListener, mHandler);
+                } catch (CameraAccessException | BlockingOpenException e) {
+                    Log.e(TAG, "Unable to open camera " + id + " because of error "
+                                    + e.getMessage());
+                    continue;
+                }
+
+                for (int i = 0; i < TEMPLATE_IDS.length; i++) {
+                    String value = null;
+                    CaptureRequest.Builder request;
+                    try {
+                        request = camera.createCaptureRequest(TEMPLATE_IDS[i]);
+                        value = serialize(request.build()).toString();
+                    } catch (Exception e) {
+                        Log.e(TAG, "Unable to create template " + TEMPLATE_IDS[i]
+                                        + " because of error " + e.getMessage());
+                    }
+                    templates.append("\"Camera" + id + "CaptureTemplate" +
+                                    TEMPLATE_IDS[i] + "\":");
+                    templates.append(value);
+                    if (!id.equals(cameraIds[cameraIds.length - 1]) ||
+                                    i < (TEMPLATE_IDS.length - 1)) {
+                        templates.append(",");
+                    }
+                }
+            } finally {
+                if (camera != null) {
+                    camera.close();
+                    mCameraListener.waitForState(STATE_CLOSED, CAMERA_CLOSE_TIMEOUT_MS);
+                }
+            }
+        }
+
+        templates.append("}}");
+        return templates.toString();
+    }
+
+    /*
+     * Cleanup the resources.
+     */
+    @Override
+    public void close() throws Exception {
+        mHandlerThread.quitSafely();
+    }
+
+    @Override
+    protected void finalize() throws Throwable {
+        try {
+            close();
+        } finally {
+            super.finalize();
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeRational(Rational rat) throws org.json.JSONException {
+        JSONObject ratObj = new JSONObject();
+        ratObj.put("numerator", rat.getNumerator());
+        ratObj.put("denominator", rat.getDenominator());
+        return ratObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeSize(Size size) throws org.json.JSONException {
+        JSONObject sizeObj = new JSONObject();
+        sizeObj.put("width", size.getWidth());
+        sizeObj.put("height", size.getHeight());
+        return sizeObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeSizeF(SizeF size) throws org.json.JSONException {
+        JSONObject sizeObj = new JSONObject();
+        sizeObj.put("width", size.getWidth());
+        sizeObj.put("height", size.getHeight());
+        return sizeObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeRect(Rect rect) throws org.json.JSONException {
+        JSONObject rectObj = new JSONObject();
+        rectObj.put("left", rect.left);
+        rectObj.put("right", rect.right);
+        rectObj.put("top", rect.top);
+        rectObj.put("bottom", rect.bottom);
+        return rectObj;
+    }
+
+    private static Object serializePoint(Point point) throws org.json.JSONException {
+        JSONObject pointObj = new JSONObject();
+        pointObj.put("x", point.x);
+        pointObj.put("y", point.y);
+        return pointObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeFace(Face face)
+                    throws org.json.JSONException {
+        JSONObject faceObj = new JSONObject();
+        faceObj.put("bounds", serializeRect(face.getBounds()));
+        faceObj.put("score", face.getScore());
+        faceObj.put("id", face.getId());
+        faceObj.put("leftEye", serializePoint(face.getLeftEyePosition()));
+        faceObj.put("rightEye", serializePoint(face.getRightEyePosition()));
+        faceObj.put("mouth", serializePoint(face.getMouthPosition()));
+        return faceObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeStreamConfigurationMap(
+                    StreamConfigurationMap map)
+                    throws org.json.JSONException {
+        // TODO: Serialize the rest of the StreamConfigurationMap fields.
+        JSONObject mapObj = new JSONObject();
+        JSONArray cfgArray = new JSONArray();
+        int fmts[] = map.getOutputFormats();
+        if (fmts != null) {
+            for (int fi = 0; fi < Array.getLength(fmts); fi++) {
+                Size sizes[] = map.getOutputSizes(fmts[fi]);
+                if (sizes != null) {
+                    for (int si = 0; si < Array.getLength(sizes); si++) {
+                        JSONObject obj = new JSONObject();
+                        obj.put("format", fmts[fi]);
+                        obj.put("width", sizes[si].getWidth());
+                        obj.put("height", sizes[si].getHeight());
+                        obj.put("input", false);
+                        obj.put("minFrameDuration",
+                                        map.getOutputMinFrameDuration(fmts[fi], sizes[si]));
+                        cfgArray.put(obj);
+                    }
+                }
+            }
+        }
+        mapObj.put("availableStreamConfigurations", cfgArray);
+        return mapObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeMeteringRectangle(MeteringRectangle rect)
+                    throws org.json.JSONException {
+        JSONObject rectObj = new JSONObject();
+        rectObj.put("x", rect.getX());
+        rectObj.put("y", rect.getY());
+        rectObj.put("width", rect.getWidth());
+        rectObj.put("height", rect.getHeight());
+        rectObj.put("weight", rect.getMeteringWeight());
+        return rectObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializePair(Pair pair)
+                    throws org.json.JSONException {
+        JSONArray pairObj = new JSONArray();
+        pairObj.put(pair.first);
+        pairObj.put(pair.second);
+        return pairObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeRange(Range range)
+                    throws org.json.JSONException {
+        JSONArray rangeObj = new JSONArray();
+        rangeObj.put(range.getLower());
+        rangeObj.put(range.getUpper());
+        return rangeObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeColorSpaceTransform(ColorSpaceTransform xform)
+                    throws org.json.JSONException {
+        JSONArray xformObj = new JSONArray();
+        for (int row = 0; row < 3; row++) {
+            for (int col = 0; col < 3; col++) {
+                xformObj.put(serializeRational(xform.getElement(col, row)));
+            }
+        }
+        return xformObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeTonemapCurve(TonemapCurve curve)
+                    throws org.json.JSONException {
+        JSONObject curveObj = new JSONObject();
+        String names[] = {
+                        "red", "green", "blue" };
+        for (int ch = 0; ch < 3; ch++) {
+            JSONArray curveArr = new JSONArray();
+            int len = curve.getPointCount(ch);
+            for (int i = 0; i < len; i++) {
+                curveArr.put(curve.getPoint(ch, i).x);
+                curveArr.put(curve.getPoint(ch, i).y);
+            }
+            curveObj.put(names[ch], curveArr);
+        }
+        return curveObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeRggbChannelVector(RggbChannelVector vec)
+                    throws org.json.JSONException {
+        JSONArray vecObj = new JSONArray();
+        vecObj.put(vec.getRed());
+        vecObj.put(vec.getGreenEven());
+        vecObj.put(vec.getGreenOdd());
+        vecObj.put(vec.getBlue());
+        return vecObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeBlackLevelPattern(BlackLevelPattern pat)
+                    throws org.json.JSONException {
+        int patVals[] = new int[4];
+        pat.copyTo(patVals, 0);
+        JSONArray patObj = new JSONArray();
+        patObj.put(patVals[0]);
+        patObj.put(patVals[1]);
+        patObj.put(patVals[2]);
+        patObj.put(patVals[3]);
+        return patObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeLocation(Location loc)
+                    throws org.json.JSONException {
+        return loc.toString();
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Object serializeLensShadingMap(LensShadingMap map)
+            throws org.json.JSONException {
+        JSONArray mapObj = new JSONArray();
+        for (int row = 0; row < map.getRowCount(); row++) {
+            for (int col = 0; col < map.getColumnCount(); col++) {
+                for (int ch = 0; ch < 4; ch++) {
+                    mapObj.put(map.getGainFactor(ch, col, row));
+                }
+            }
+        }
+        return mapObj;
+    }
+
+    private static String getKeyName(Object keyObj) {
+        if (keyObj.getClass() == CaptureResult.Key.class
+                || keyObj.getClass() == TotalCaptureResult.class) {
+            return ((CaptureResult.Key) keyObj).getName();
+        } else if (keyObj.getClass() == CaptureRequest.Key.class) {
+            return ((CaptureRequest.Key) keyObj).getName();
+        } else if (keyObj.getClass() == CameraCharacteristics.Key.class) {
+            return ((CameraCharacteristics.Key) keyObj).getName();
+        }
+
+        throw new IllegalArgumentException("Invalid key object");
+    }
+
+    private static Object getKeyValue(CameraMetadata md, Object keyObj) {
+        if (md.getClass() == CaptureResult.class || md.getClass() == TotalCaptureResult.class) {
+            return ((CaptureResult) md).get((CaptureResult.Key) keyObj);
+        } else if (md.getClass() == CaptureRequest.class) {
+            return ((CaptureRequest) md).get((CaptureRequest.Key) keyObj);
+        } else if (md.getClass() == CameraCharacteristics.class) {
+            return ((CameraCharacteristics) md).get((CameraCharacteristics.Key) keyObj);
+        }
+
+        throw new IllegalArgumentException("Invalid key object");
+    }
+
+    @SuppressWarnings("unchecked")
+    private static MetadataEntry serializeEntry(Type keyType, Object keyObj, CameraMetadata md) {
+        String keyName = getKeyName(keyObj);
+
+        try {
+            Object keyValue = getKeyValue(md, keyObj);
+            if (keyValue == null) {
+                return new MetadataEntry(keyName, JSONObject.NULL);
+            } else if (keyType == Float.class) {
+                // The JSON serializer doesn't handle floating point NaN or Inf.
+                if (((Float) keyValue).isInfinite() || ((Float) keyValue).isNaN()) {
+                    Log.w(TAG, "Inf/NaN floating point value serialized: " + keyName);
+                    return null;
+                }
+                return new MetadataEntry(keyName, keyValue);
+            } else if (keyType == Integer.class || keyType == Long.class || keyType == Byte.class ||
+                    keyType == Boolean.class || keyType == String.class) {
+                return new MetadataEntry(keyName, keyValue);
+            } else if (keyType == Rational.class) {
+                return new MetadataEntry(keyName, serializeRational((Rational) keyValue));
+            } else if (keyType == Size.class) {
+                return new MetadataEntry(keyName, serializeSize((Size) keyValue));
+            } else if (keyType == SizeF.class) {
+                return new MetadataEntry(keyName, serializeSizeF((SizeF) keyValue));
+            } else if (keyType == Rect.class) {
+                return new MetadataEntry(keyName, serializeRect((Rect) keyValue));
+            } else if (keyType == Face.class) {
+                return new MetadataEntry(keyName, serializeFace((Face) keyValue));
+            } else if (keyType == StreamConfigurationMap.class) {
+                return new MetadataEntry(keyName,
+                        serializeStreamConfigurationMap((StreamConfigurationMap) keyValue));
+            } else if (keyType instanceof ParameterizedType &&
+                    ((ParameterizedType) keyType).getRawType() == Range.class) {
+                return new MetadataEntry(keyName, serializeRange((Range) keyValue));
+            } else if (keyType == ColorSpaceTransform.class) {
+                return new MetadataEntry(keyName,
+                        serializeColorSpaceTransform((ColorSpaceTransform) keyValue));
+            } else if (keyType == MeteringRectangle.class) {
+                return new MetadataEntry(keyName,
+                        serializeMeteringRectangle((MeteringRectangle) keyValue));
+            } else if (keyType == Location.class) {
+                return new MetadataEntry(keyName,
+                        serializeLocation((Location) keyValue));
+            } else if (keyType == RggbChannelVector.class) {
+                return new MetadataEntry(keyName,
+                        serializeRggbChannelVector((RggbChannelVector) keyValue));
+            } else if (keyType == BlackLevelPattern.class) {
+                return new MetadataEntry(keyName,
+                        serializeBlackLevelPattern((BlackLevelPattern) keyValue));
+            } else if (keyType == TonemapCurve.class) {
+                return new MetadataEntry(keyName,
+                        serializeTonemapCurve((TonemapCurve) keyValue));
+            } else if (keyType == Point.class) {
+                return new MetadataEntry(keyName,
+                        serializePoint((Point) keyValue));
+            } else if (keyType == LensShadingMap.class) {
+                return new MetadataEntry(keyName,
+                        serializeLensShadingMap((LensShadingMap) keyValue));
+            } else {
+                Log.w(TAG, String.format("Serializing unsupported key type: " + keyType));
+                return null;
+            }
+        } catch (org.json.JSONException e) {
+            throw new IllegalStateException("JSON error for key: " + keyName + ": ", e);
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    private static MetadataEntry serializeArrayEntry(Type keyType, Object keyObj,
+            CameraMetadata md) {
+        String keyName = getKeyName(keyObj);
+        try {
+            Object keyValue = getKeyValue(md, keyObj);
+            if (keyValue == null) {
+                return new MetadataEntry(keyName, JSONObject.NULL);
+            }
+            int arrayLen = Array.getLength(keyValue);
+            Type elmtType = ((GenericArrayType) keyType).getGenericComponentType();
+            if (elmtType == int.class || elmtType == float.class || elmtType == byte.class ||
+                    elmtType == long.class || elmtType == double.class
+                    || elmtType == boolean.class) {
+                return new MetadataEntry(keyName, new JSONArray(keyValue));
+            } else if (elmtType == Rational.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeRational((Rational) Array.get(keyValue, i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == Size.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeSize((Size) Array.get(keyValue, i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == Rect.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeRect((Rect) Array.get(keyValue, i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == Face.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeFace((Face) Array.get(keyValue, i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == StreamConfigurationMap.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeStreamConfigurationMap(
+                            (StreamConfigurationMap) Array.get(keyValue, i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType instanceof ParameterizedType &&
+                    ((ParameterizedType) elmtType).getRawType() == Range.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeRange((Range) Array.get(keyValue, i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType instanceof ParameterizedType &&
+                    ((ParameterizedType) elmtType).getRawType() == Pair.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializePair((Pair) Array.get(keyValue, i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == MeteringRectangle.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeMeteringRectangle(
+                            (MeteringRectangle) Array.get(keyValue, i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == Location.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeLocation((Location) Array.get(keyValue, i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == RggbChannelVector.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeRggbChannelVector(
+                            (RggbChannelVector) Array.get(keyValue, i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == BlackLevelPattern.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializeBlackLevelPattern(
+                            (BlackLevelPattern) Array.get(keyValue, i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else if (elmtType == Point.class) {
+                JSONArray jsonArray = new JSONArray();
+                for (int i = 0; i < arrayLen; i++) {
+                    jsonArray.put(serializePoint((Point) Array.get(keyValue, i)));
+                }
+                return new MetadataEntry(keyName, jsonArray);
+            } else {
+                Log.w(TAG, String.format("Serializing unsupported array type: " + elmtType));
+                return null;
+            }
+        } catch (org.json.JSONException e) {
+            throw new IllegalStateException("JSON error for key: " + keyName + ": ", e);
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    private static JSONObject serialize(CameraMetadata md) {
+        JSONObject jsonObj = new JSONObject();
+        Field[] allFields = md.getClass().getDeclaredFields();
+        if (md.getClass() == TotalCaptureResult.class) {
+            allFields = CaptureResult.class.getDeclaredFields();
+        }
+        for (Field field : allFields) {
+            if (Modifier.isPublic(field.getModifiers()) &&
+                    Modifier.isStatic(field.getModifiers()) &&
+                            (field.getType() == CaptureRequest.Key.class
+                            || field.getType() == CaptureResult.Key.class
+                            || field.getType() == TotalCaptureResult.Key.class
+                            || field.getType() == CameraCharacteristics.Key.class)
+                    &&
+                    field.getGenericType() instanceof ParameterizedType) {
+                ParameterizedType paramType = (ParameterizedType) field.getGenericType();
+                Type[] argTypes = paramType.getActualTypeArguments();
+                if (argTypes.length > 0) {
+                    try {
+                        Type keyType = argTypes[0];
+                        Object keyObj = field.get(md);
+                        MetadataEntry entry;
+                        if (keyType instanceof GenericArrayType) {
+                            entry = serializeArrayEntry(keyType, keyObj, md);
+                        } else {
+                            entry = serializeEntry(keyType, keyObj, md);
+                        }
+
+                        // TODO: Figure this weird case out.
+                        // There is a weird case where the entry is non-null but
+                        // the toString
+                        // of the entry is null, and if this happens, the
+                        // null-ness spreads like
+                        // a virus and makes the whole JSON object null from the
+                        // top level down.
+                        // Not sure if it's a bug in the library or I'm just not
+                        // using it right.
+                        // Workaround by checking for this case explicitly and
+                        // not adding the
+                        // value to the jsonObj when it is detected.
+                        if (entry != null && entry.key != null && entry.value != null
+                                && entry.value.toString() == null) {
+                            Log.w(TAG, "Error encountered serializing value for key: "
+                                    + entry.key);
+                        } else if (entry != null) {
+                            jsonObj.put(entry.key, entry.value);
+                        } else {
+                            // Ignore.
+                        }
+                    } catch (IllegalAccessException e) {
+                        throw new IllegalStateException(
+                                "Access error for field: " + field + ": ", e);
+                    } catch (org.json.JSONException e) {
+                        throw new IllegalStateException(
+                                "JSON error for field: " + field + ": ", e);
+                    }
+                }
+            }
+        }
+        return jsonObj;
+    }
+}
diff --git a/tests/tests/media/src/android/media/cts/AdaptivePlaybackTest.java b/tests/tests/media/src/android/media/cts/AdaptivePlaybackTest.java
index 5c9f1b1..dbe2c92 100644
--- a/tests/tests/media/src/android/media/cts/AdaptivePlaybackTest.java
+++ b/tests/tests/media/src/android/media/cts/AdaptivePlaybackTest.java
@@ -19,6 +19,7 @@
 import com.android.cts.media.R;
 
 import android.content.Context;
+import android.content.pm.PackageManager;
 import android.content.res.AssetFileDescriptor;
 import android.media.MediaCodec;
 import android.media.MediaCodecInfo;
@@ -285,6 +286,11 @@
     }
 
     private void ex(Iterable<Codec> codecList, Test[] testList) {
+        if (codecList == null) {
+            Log.i(TAG, "CodecList was empty. Skipping test.");
+            return;
+        }
+
         TestList tests = new TestList();
         for (Codec c : codecList) {
             for (Test test : testList) {
@@ -1342,8 +1348,21 @@
 }
 
 class CodecFactory {
+    protected boolean hasCodec(String codecName) {
+        MediaCodecList list = new MediaCodecList(MediaCodecList.ALL_CODECS);
+        for (MediaCodecInfo info : list.getCodecInfos()) {
+            if (codecName.equals(info.getName())) {
+                return true;
+            }
+        }
+        return false;
+    }
+
     public CodecList createCodecList(
             Context context, String mime, String googleCodecName, int ...resources) {
+        if (!hasCodec(googleCodecName)) {
+            return null;
+        }
         return new CodecFamily(context, mime, googleCodecName, resources);
     }
 }
@@ -1351,6 +1370,9 @@
 class SWCodecFactory extends CodecFactory {
     public CodecList createCodecList(
             Context context, String mime, String googleCodecName, int ...resources) {
+        if (!hasCodec(googleCodecName)) {
+            return null;
+        }
         return new CodecByName(context, mime, googleCodecName, resources);
     }
 }
@@ -1358,6 +1380,9 @@
 class HWCodecFactory extends CodecFactory {
     public CodecList createCodecList(
             Context context, String mime, String googleCodecName, int ...resources) {
+        if (!hasCodec(googleCodecName)) {
+            return null;
+        }
         return new CodecFamilyExcept(context, mime, googleCodecName, resources);
     }
 }
diff --git a/tests/tests/media/src/android/media/cts/AudioTrackTest.java b/tests/tests/media/src/android/media/cts/AudioTrackTest.java
index d01ecec..a342c37 100644
--- a/tests/tests/media/src/android/media/cts/AudioTrackTest.java
+++ b/tests/tests/media/src/android/media/cts/AudioTrackTest.java
@@ -16,6 +16,7 @@
 
 package android.media.cts;
 
+import android.content.pm.PackageManager;
 import android.cts.util.CtsAndroidTestCase;
 import android.media.AudioFormat;
 import android.media.AudioManager;
@@ -1541,7 +1542,16 @@
         }
     }
 
+    private boolean hasAudioOutput() {
+        return getContext().getPackageManager()
+            .hasSystemFeature(PackageManager.FEATURE_AUDIO_OUTPUT);
+    }
+
     public void testGetTimestamp() throws Exception {
+        if (!hasAudioOutput()) {
+            return;
+        }
+        
         // constants for test
         final String TEST_NAME = "testGetTimestamp";
         final int TEST_SR = 22050;
diff --git a/tests/tests/media/src/android/media/cts/CamcorderProfileTest.java b/tests/tests/media/src/android/media/cts/CamcorderProfileTest.java
index 8130a9a..7dfb1f6 100644
--- a/tests/tests/media/src/android/media/cts/CamcorderProfileTest.java
+++ b/tests/tests/media/src/android/media/cts/CamcorderProfileTest.java
@@ -25,12 +25,47 @@
 import android.test.AndroidTestCase;
 import android.util.Log;
 
+import java.util.Arrays;
 import java.util.List;
 
 public class CamcorderProfileTest extends AndroidTestCase {
 
     private static final String TAG = "CamcorderProfileTest";
     private static final int MIN_HIGH_SPEED_FPS = 100;
+    private static final Integer[] ALL_SUPPORTED_QUALITIES = {
+        CamcorderProfile.QUALITY_LOW,
+        CamcorderProfile.QUALITY_HIGH,
+        CamcorderProfile.QUALITY_QCIF,
+        CamcorderProfile.QUALITY_CIF,
+        CamcorderProfile.QUALITY_480P,
+        CamcorderProfile.QUALITY_720P,
+        CamcorderProfile.QUALITY_1080P,
+        CamcorderProfile.QUALITY_QVGA,
+        CamcorderProfile.QUALITY_2160P,
+        CamcorderProfile.QUALITY_TIME_LAPSE_LOW,
+        CamcorderProfile.QUALITY_TIME_LAPSE_HIGH,
+        CamcorderProfile.QUALITY_TIME_LAPSE_QCIF,
+        CamcorderProfile.QUALITY_TIME_LAPSE_CIF,
+        CamcorderProfile.QUALITY_TIME_LAPSE_480P,
+        CamcorderProfile.QUALITY_TIME_LAPSE_720P,
+        CamcorderProfile.QUALITY_TIME_LAPSE_1080P,
+        CamcorderProfile.QUALITY_TIME_LAPSE_QVGA,
+        CamcorderProfile.QUALITY_TIME_LAPSE_2160P,
+        CamcorderProfile.QUALITY_HIGH_SPEED_LOW,
+        CamcorderProfile.QUALITY_HIGH_SPEED_HIGH,
+        CamcorderProfile.QUALITY_HIGH_SPEED_480P,
+        CamcorderProfile.QUALITY_HIGH_SPEED_720P,
+        CamcorderProfile.QUALITY_HIGH_SPEED_1080P,
+        CamcorderProfile.QUALITY_HIGH_SPEED_2160P
+    };
+    private static final int LAST_QUALITY = CamcorderProfile.QUALITY_2160P;
+    private static final int LAST_TIMELAPSE_QUALITY = CamcorderProfile.QUALITY_TIME_LAPSE_1080P;
+    private static final int LAST_HIGH_SPEED_QUALITY = CamcorderProfile.QUALITY_HIGH_SPEED_2160P;
+    private static final Integer[] UNKNOWN_QUALITIES = {
+        LAST_QUALITY + 1, // Unknown normal profile quality
+        LAST_TIMELAPSE_QUALITY + 1, // Unknown timelapse profile quality
+        LAST_HIGH_SPEED_QUALITY + 1 // Unknown high speed timelapse profile quality
+    };
 
     // Uses get without id if cameraId == -1 and get with id otherwise.
     private CamcorderProfile getWithOptionalId(int quality, int cameraId) {
@@ -59,27 +94,7 @@
             profile.audioSampleRate,
             profile.audioChannels));
         assertTrue(profile.duration > 0);
-        assertTrue(profile.quality == CamcorderProfile.QUALITY_LOW ||
-                   profile.quality == CamcorderProfile.QUALITY_HIGH ||
-                   profile.quality == CamcorderProfile.QUALITY_QCIF ||
-                   profile.quality == CamcorderProfile.QUALITY_CIF ||
-                   profile.quality == CamcorderProfile.QUALITY_480P ||
-                   profile.quality == CamcorderProfile.QUALITY_720P ||
-                   profile.quality == CamcorderProfile.QUALITY_1080P ||
-                   profile.quality == CamcorderProfile.QUALITY_2160P ||
-                   profile.quality == CamcorderProfile.QUALITY_TIME_LAPSE_LOW ||
-                   profile.quality == CamcorderProfile.QUALITY_TIME_LAPSE_HIGH ||
-                   profile.quality == CamcorderProfile.QUALITY_TIME_LAPSE_QCIF ||
-                   profile.quality == CamcorderProfile.QUALITY_TIME_LAPSE_CIF ||
-                   profile.quality == CamcorderProfile.QUALITY_TIME_LAPSE_480P ||
-                   profile.quality == CamcorderProfile.QUALITY_TIME_LAPSE_720P ||
-                   profile.quality == CamcorderProfile.QUALITY_TIME_LAPSE_1080P ||
-                   profile.quality == CamcorderProfile.QUALITY_TIME_LAPSE_2160P ||
-                   profile.quality == CamcorderProfile.QUALITY_HIGH_SPEED_LOW ||
-                   profile.quality == CamcorderProfile.QUALITY_HIGH_SPEED_HIGH ||
-                   profile.quality == CamcorderProfile.QUALITY_HIGH_SPEED_480P ||
-                   profile.quality == CamcorderProfile.QUALITY_HIGH_SPEED_720P ||
-                   profile.quality == CamcorderProfile.QUALITY_HIGH_SPEED_1080P);
+        assertTrue(Arrays.asList(ALL_SUPPORTED_QUALITIES).contains(profile.quality));
         assertTrue(profile.videoBitRate > 0);
         assertTrue(profile.videoFrameRate > 0);
         assertTrue(profile.videoFrameWidth > 0);
@@ -233,19 +248,30 @@
 
         final List<Size> videoSizes = getSupportedVideoSizes(cameraId);
 
-        CamcorderProfile lowProfile =
-            getWithOptionalId(CamcorderProfile.QUALITY_LOW, cameraId);
-        CamcorderProfile highProfile =
-            getWithOptionalId(CamcorderProfile.QUALITY_HIGH, cameraId);
-        checkProfile(lowProfile, videoSizes);
-        checkProfile(highProfile, videoSizes);
+        /**
+         * Check all possible supported profiles: get profile should work, and the profile
+         * should be sane. Note that, timelapse and high speed video sizes may not be listed
+         * as supported video sizes from camera, skip the size check.
+         */
+        for (Integer quality : ALL_SUPPORTED_QUALITIES) {
+            if (CamcorderProfile.hasProfile(cameraId, quality) || isProfileMandatory(quality)) {
+                List<Size> videoSizesToCheck = null;
+                if (quality >= CamcorderProfile.QUALITY_LOW &&
+                                quality <= CamcorderProfile.QUALITY_2160P) {
+                    videoSizesToCheck = videoSizes;
+                }
+                CamcorderProfile profile = getWithOptionalId(quality, cameraId);
+                checkProfile(profile, videoSizesToCheck);
+            }
+        }
 
-        CamcorderProfile lowTimeLapseProfile =
-            getWithOptionalId(CamcorderProfile.QUALITY_TIME_LAPSE_LOW, cameraId);
-        CamcorderProfile highTimeLapseProfile =
-            getWithOptionalId(CamcorderProfile.QUALITY_TIME_LAPSE_HIGH, cameraId);
-        checkProfile(lowTimeLapseProfile, null);
-        checkProfile(highTimeLapseProfile, null);
+        /**
+         * Check unknown profiles: hasProfile() should return false.
+         */
+        for (Integer quality : UNKNOWN_QUALITIES) {
+            assertFalse("Unknown profile quality " + quality + " shouldn't be supported by camera "
+                    + cameraId, CamcorderProfile.hasProfile(cameraId, quality));
+        }
 
         // High speed low and high profile are optional,
         // but they should be both present or missing.
@@ -288,8 +314,17 @@
 
         int[] specificHighSpeedProfileQualities = {CamcorderProfile.QUALITY_HIGH_SPEED_480P,
                                                    CamcorderProfile.QUALITY_HIGH_SPEED_720P,
-                                                   CamcorderProfile.QUALITY_HIGH_SPEED_1080P};
+                                                   CamcorderProfile.QUALITY_HIGH_SPEED_1080P,
+                                                   CamcorderProfile.QUALITY_HIGH_SPEED_2160P};
 
+        CamcorderProfile lowProfile =
+                getWithOptionalId(CamcorderProfile.QUALITY_LOW, cameraId);
+        CamcorderProfile highProfile =
+                getWithOptionalId(CamcorderProfile.QUALITY_HIGH, cameraId);
+        CamcorderProfile lowTimeLapseProfile =
+                getWithOptionalId(CamcorderProfile.QUALITY_TIME_LAPSE_LOW, cameraId);
+        CamcorderProfile highTimeLapseProfile =
+                getWithOptionalId(CamcorderProfile.QUALITY_TIME_LAPSE_HIGH, cameraId);
         checkSpecificProfiles(cameraId, lowProfile, highProfile,
                 specificProfileQualities, videoSizes);
         checkSpecificProfiles(cameraId, lowTimeLapseProfile, highTimeLapseProfile,
@@ -342,4 +377,11 @@
         Log.e(TAG, "Size (" + width + "x" + height + ") is not supported");
         return false;
     }
+
+    private boolean isProfileMandatory(int quality) {
+        return (quality == CamcorderProfile.QUALITY_LOW) ||
+                (quality == CamcorderProfile.QUALITY_HIGH) ||
+                (quality == CamcorderProfile.QUALITY_TIME_LAPSE_LOW) ||
+                (quality == CamcorderProfile.QUALITY_TIME_LAPSE_HIGH);
+    }
 }
diff --git a/tests/tests/media/src/android/media/cts/ClearKeySystemTest.java b/tests/tests/media/src/android/media/cts/ClearKeySystemTest.java
index c05a605..ff05246 100644
--- a/tests/tests/media/src/android/media/cts/ClearKeySystemTest.java
+++ b/tests/tests/media/src/android/media/cts/ClearKeySystemTest.java
@@ -16,6 +16,7 @@
 package android.media.cts;
 
 import android.content.Context;
+import android.content.pm.PackageManager;
 import android.media.MediaCodec;
 import android.media.MediaCodecInfo;
 import android.media.MediaCodecInfo.CodecCapabilities;
@@ -406,6 +407,10 @@
      * Tests clear key system playback.
      */
     public void testClearKeyPlayback() throws Exception {
+        if (!hasAudioOutput()) {
+            return;
+        }
+        
         MediaDrm drm = startDrm();
         if (null == drm) {
             throw new Error("Failed to create drm.");
diff --git a/tests/tests/media/src/android/media/cts/DecoderTest.java b/tests/tests/media/src/android/media/cts/DecoderTest.java
index ba70f32..d71d38a 100644
--- a/tests/tests/media/src/android/media/cts/DecoderTest.java
+++ b/tests/tests/media/src/android/media/cts/DecoderTest.java
@@ -24,6 +24,7 @@
 import android.media.Image;
 import android.media.MediaCodec;
 import android.media.MediaCodecInfo;
+import android.media.MediaCodecList;
 import android.media.MediaExtractor;
 import android.media.MediaFormat;
 import android.util.Log;
@@ -175,6 +176,10 @@
         MediaFormat format = ex.getTrackFormat(0);
         String mime = format.getString(MediaFormat.KEY_MIME);
         assertTrue("not a video track. Wrong test file?", mime.startsWith("video/"));
+        if (!hasCodecForMimeType(mime, false)) {
+            Log.i(TAG, "SKIPPING testBFrames(): Could not find a codec for mimeType: " + mime);
+            return;
+        }
         MediaCodec dec = MediaCodec.createDecoderByType(mime);
         Surface s = getActivity().getSurfaceHolder().getSurface();
         dec.configure(format, s, null, 0);
@@ -840,6 +845,10 @@
     }
 
     public void testCodecBasicH264() throws Exception {
+        if (!hasH264(false)) {
+            Log.i(TAG, "SKIPPING testCodecBasicH264(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         int frames1 = countFrames(
                 R.raw.video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz,
@@ -853,6 +862,10 @@
     }
 
     public void testCodecBasicHEVC() throws Exception {
+        if (!hasHEVC(false)) {
+            Log.i(TAG, "SKIPPING testCodecBasicHEVC(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         int frames1 = countFrames(
                 R.raw.video_1280x720_mp4_hevc_1150kbps_30fps_aac_stereo_128kbps_48000hz,
@@ -866,6 +879,10 @@
     }
 
     public void testCodecBasicH263() throws Exception {
+        if (!hasH263(false)) {
+            Log.i(TAG, "SKIPPING testCodecBasicH263(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         int frames1 = countFrames(
                 R.raw.video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz,
@@ -879,6 +896,10 @@
     }
 
     public void testCodecBasicMpeg4() throws Exception {
+        if (!hasMpeg4(false)) {
+            Log.i(TAG, "SKIPPING testCodecBasicMpeg4(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         int frames1 = countFrames(
                 R.raw.video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz,
@@ -892,6 +913,10 @@
     }
 
     public void testCodecBasicVP8() throws Exception {
+        if (!hasVP8(false)) {
+            Log.i(TAG, "SKIPPING testCodecBasicVP8(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         int frames1 = countFrames(
                 R.raw.video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_44100hz,
@@ -905,6 +930,10 @@
     }
 
     public void testCodecBasicVP9() throws Exception {
+        if (!hasVP9(false)) {
+            Log.i(TAG, "SKIPPING testCodecBasicVP9(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         int frames1 = countFrames(
                 R.raw.video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_44100hz,
@@ -918,6 +947,10 @@
     }
 
     public void testCodecEarlyEOSH263() throws Exception {
+        if (!hasH263(false)) {
+            Log.i(TAG, "SKIPPING testCodecEarlyEOSH263(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         int frames1 = countFrames(
                 R.raw.video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz,
@@ -926,6 +959,10 @@
     }
 
     public void testCodecEarlyEOSH264() throws Exception {
+        if (!hasH264(false)) {
+            Log.i(TAG, "SKIPPING testCodecEarlyEOSH264(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         int frames1 = countFrames(
                 R.raw.video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz,
@@ -934,6 +971,10 @@
     }
 
     public void testCodecEarlyEOSHEVC() throws Exception {
+        if (!hasHEVC(false)) {
+            Log.i(TAG, "SKIPPING testCodecEarlyEOSHEVC(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         int frames1 = countFrames(
                 R.raw.video_1280x720_mp4_hevc_1150kbps_30fps_aac_stereo_128kbps_48000hz,
@@ -942,6 +983,10 @@
     }
 
     public void testCodecEarlyEOSMpeg4() throws Exception {
+        if (!hasMpeg4(false)) {
+            Log.i(TAG, "SKIPPING testCodecEarlyEOSMpeg4(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         int frames1 = countFrames(
                 R.raw.video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz,
@@ -950,6 +995,10 @@
     }
 
     public void testCodecEarlyEOSVP8() throws Exception {
+        if (!hasVP8(false)) {
+            Log.i(TAG, "SKIPPING testCodecEarlyEOSVP8(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         int frames1 = countFrames(
                 R.raw.video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_44100hz,
@@ -958,6 +1007,10 @@
     }
 
     public void testCodecEarlyEOSVP9() throws Exception {
+        if (!hasVP9(false)) {
+            Log.i(TAG, "SKIPPING testCodecEarlyEOSVP9(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         int frames1 = countFrames(
                 R.raw.video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_44100hz,
@@ -966,66 +1019,114 @@
     }
 
     public void testCodecResetsH264WithoutSurface() throws Exception {
+        if (!hasH264(false)) {
+            Log.i(TAG, "SKIPPING testCodecResetsH264WithoutSurface(): No codec found.");
+            return;
+        }
         testCodecResets(
                 R.raw.video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz, null);
     }
 
     public void testCodecResetsH264WithSurface() throws Exception {
+        if (!hasH264(false)) {
+            Log.i(TAG, "SKIPPING testCodecResetsH264WithSurface(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         testCodecResets(
                 R.raw.video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz, s);
     }
 
     public void testCodecResetsHEVCWithoutSurface() throws Exception {
+        if (!hasHEVC(false)) {
+            Log.i(TAG, "SKIPPING testCodecResetsHEVCWithoutSurface(): No codec found.");
+            return;
+        }
         testCodecResets(
                 R.raw.video_1280x720_mp4_hevc_1150kbps_30fps_aac_stereo_128kbps_48000hz, null);
     }
 
     public void testCodecResetsHEVCWithSurface() throws Exception {
+        if (!hasHEVC(false)) {
+            Log.i(TAG, "SKIPPING testCodecResetsHEVCWithSurface(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         testCodecResets(
                 R.raw.video_1280x720_mp4_hevc_1150kbps_30fps_aac_stereo_128kbps_48000hz, s);
     }
 
     public void testCodecResetsH263WithoutSurface() throws Exception {
+        if (!hasH263(false)) {
+            Log.i(TAG, "SKIPPING testCodecResetsH263WithoutSurface(): No codec found.");
+            return;
+        }
         testCodecResets(
                 R.raw.video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz, null);
     }
 
     public void testCodecResetsH263WithSurface() throws Exception {
+        if (!hasH263(false)) {
+            Log.i(TAG, "SKIPPING testCodecResetsH263WithSurface(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         testCodecResets(
                 R.raw.video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz, s);
     }
 
     public void testCodecResetsMpeg4WithoutSurface() throws Exception {
+        if (!hasMpeg4(false)) {
+            Log.i(TAG, "SKIPPING testCodecResetsMpeg4WithoutSurface(): No codec found.");
+            return;
+        }
         testCodecResets(
                 R.raw.video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz, null);
     }
 
     public void testCodecResetsMpeg4WithSurface() throws Exception {
+        if (!hasMpeg4(false)) {
+            Log.i(TAG, "SKIPPING testCodecResetsMpeg4WithSurface(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         testCodecResets(
                 R.raw.video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz, s);
     }
 
     public void testCodecResetsVP8WithoutSurface() throws Exception {
+        if (!hasVP8(false)) {
+            Log.i(TAG, "SKIPPING testCodecResetsVP8WithoutSurface(): No codec found.");
+            return;
+        }
         testCodecResets(
                 R.raw.video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_44100hz, null);
     }
 
     public void testCodecResetsVP8WithSurface() throws Exception {
+        if (!hasVP8(false)) {
+            Log.i(TAG, "SKIPPING testCodecResetsVP8WithSurface(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         testCodecResets(
                 R.raw.video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_44100hz, s);
     }
 
     public void testCodecResetsVP9WithoutSurface() throws Exception {
+        if (!hasVP9(false)) {
+            Log.i(TAG, "SKIPPING testCodecResetsVP9WithoutSurface(): No codec found.");
+            return;
+        }
         testCodecResets(
                 R.raw.video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_44100hz, null);
     }
 
     public void testCodecResetsVP9WithSurface() throws Exception {
+        if (!hasVP9(false)) {
+            Log.i(TAG, "SKIPPING testCodecResetsVP9WithSurface(): No codec found.");
+            return;
+        }
         Surface s = getActivity().getSurfaceHolder().getSurface();
         testCodecResets(
                 R.raw.video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_44100hz, s);
@@ -1132,6 +1233,13 @@
         extractor.setDataSource(testFd.getFileDescriptor(), testFd.getStartOffset(),
                 testFd.getLength());
         extractor.selectTrack(0); // consider variable looping on track
+        MediaFormat format = extractor.getTrackFormat(0);
+        String mimeType = format.getString(MediaFormat.KEY_MIME);
+        if (!hasCodecForMimeType(mimeType, false)) {
+            Log.i(TAG, "SKIPPING testEOSBehavior() for resid=" + movie + " No codec found for "
+                    + "mimeType = " + mimeType);
+            return;
+        }
         List<Long> outputChecksums = new ArrayList<Long>();
         List<Long> outputTimestamps = new ArrayList<Long>();
         Arrays.sort(stopAtSample);
diff --git a/tests/tests/media/src/android/media/cts/EncodeVirtualDisplayWithCompositionTest.java b/tests/tests/media/src/android/media/cts/EncodeVirtualDisplayWithCompositionTest.java
index 7b21997..9c99c2d 100644
--- a/tests/tests/media/src/android/media/cts/EncodeVirtualDisplayWithCompositionTest.java
+++ b/tests/tests/media/src/android/media/cts/EncodeVirtualDisplayWithCompositionTest.java
@@ -140,7 +140,8 @@
         Log.i(TAG, "testRendering800x480Locally");
         Pair<Integer, Integer> maxRes = checkMaxConcurrentEncodingDecodingResolution();
         if (maxRes == null) {
-            fail("codec not supported");
+            Log.i(TAG, "SKIPPING testRendering800x480Locally(): codec not supported");
+            return;
         }
         if (maxRes.first >= 800 && maxRes.second >= 480) {
             runTestRenderingInSeparateThread(800, 480, false, false);
@@ -153,7 +154,8 @@
         Log.i(TAG, "testRenderingMaxResolutionLocally");
         Pair<Integer, Integer> maxRes = checkMaxConcurrentEncodingDecodingResolution();
         if (maxRes == null) {
-            fail("codec not supported");
+            Log.i(TAG, "SKIPPING testRenderingMaxResolutionLocally(): codec not supported");
+            return;
         }
         Log.w(TAG, "Trying resolution w:" + maxRes.first + " h:" + maxRes.second);
         runTestRenderingInSeparateThread(maxRes.first, maxRes.second, false, false);
@@ -163,7 +165,8 @@
         Log.i(TAG, "testRendering800x480Remotely");
         Pair<Integer, Integer> maxRes = checkMaxConcurrentEncodingDecodingResolution();
         if (maxRes == null) {
-            fail("codec not supported");
+            Log.i(TAG, "SKIPPING testRendering800x480Remotely(): codec not supported");
+            return;
         }
         if (maxRes.first >= 800 && maxRes.second >= 480) {
             runTestRenderingInSeparateThread(800, 480, true, false);
@@ -176,7 +179,8 @@
         Log.i(TAG, "testRenderingMaxResolutionRemotely");
         Pair<Integer, Integer> maxRes = checkMaxConcurrentEncodingDecodingResolution();
         if (maxRes == null) {
-            fail("codec not supported");
+            Log.i(TAG, "SKIPPING testRenderingMaxResolutionRemotely(): codec not supported");
+            return;
         }
         Log.w(TAG, "Trying resolution w:" + maxRes.first + " h:" + maxRes.second);
         runTestRenderingInSeparateThread(maxRes.first, maxRes.second, true, false);
@@ -186,7 +190,8 @@
         Log.i(TAG, "testRendering800x480RemotelyWith3Windows");
         Pair<Integer, Integer> maxRes = checkMaxConcurrentEncodingDecodingResolution();
         if (maxRes == null) {
-            fail("codec not supported");
+            Log.i(TAG, "SKIPPING testRendering800x480RemotelyWith3Windows(): codec not supported");
+            return;
         }
         if (maxRes.first >= 800 && maxRes.second >= 480) {
             runTestRenderingInSeparateThread(800, 480, true, true);
@@ -199,7 +204,8 @@
         Log.i(TAG, "testRendering800x480LocallyWith3Windows");
         Pair<Integer, Integer> maxRes = checkMaxConcurrentEncodingDecodingResolution();
         if (maxRes == null) {
-            fail("codec not supported");
+            Log.i(TAG, "SKIPPING testRendering800x480LocallyWith3Windows(): codec not supported");
+            return;
         }
         if (maxRes.first >= 800 && maxRes.second >= 480) {
             runTestRenderingInSeparateThread(800, 480, false, true);
diff --git a/tests/tests/media/src/android/media/cts/EnvReverbTest.java b/tests/tests/media/src/android/media/cts/EnvReverbTest.java
index e2e9b6d..4cfb744 100644
--- a/tests/tests/media/src/android/media/cts/EnvReverbTest.java
+++ b/tests/tests/media/src/android/media/cts/EnvReverbTest.java
@@ -304,6 +304,9 @@
 
     //Test case 2.1: test setEnabled() throws exception after release
     public void test2_1SetEnabledAfterRelease() throws Exception {
+        if (!isEnvReverbAvailable()) {
+            return;
+        }
         getReverb(0);
         mReverb.release();
         try {
diff --git a/tests/tests/media/src/android/media/cts/ImageReaderDecoderTest.java b/tests/tests/media/src/android/media/cts/ImageReaderDecoderTest.java
index d620995..9528db9 100644
--- a/tests/tests/media/src/android/media/cts/ImageReaderDecoderTest.java
+++ b/tests/tests/media/src/android/media/cts/ImageReaderDecoderTest.java
@@ -100,6 +100,10 @@
      * to be supported by hw decoder.
      */
     public void testHwAVCDecode360pForFlexibleYuv() throws Exception {
+        if (!MediaPlayerTestBase.hasH264(false)) {
+            Log.i(TAG, "SKIPPING testSwAVCDecode360pForFlexibleYuv(): no codec found.");
+            return;
+        }
         try {
             int format = ImageFormat.YUV_420_888;
             videoDecodeToSurface(
@@ -115,6 +119,10 @@
      * to be supported by sw decoder.
      */
     public void testSwAVCDecode360pForFlexibleYuv() throws Exception {
+        if (!MediaPlayerTestBase.hasH264(false)) {
+            Log.i(TAG, "SKIPPING testSwAVCDecode360pForFlexibleYuv(): no codec found.");
+            return;
+        }
         try {
             int format = ImageFormat.YUV_420_888;
             videoDecodeToSurface(
diff --git a/tests/tests/media/src/android/media/cts/MediaCodecCapabilitiesTest.java b/tests/tests/media/src/android/media/cts/MediaCodecCapabilitiesTest.java
index 08e6212..723652f 100644
--- a/tests/tests/media/src/android/media/cts/MediaCodecCapabilitiesTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaCodecCapabilitiesTest.java
@@ -15,6 +15,7 @@
  */
 package android.media.cts;
 
+import android.content.pm.PackageManager;
 import android.media.MediaCodecInfo;
 import android.media.MediaCodecInfo.CodecCapabilities;
 import android.media.MediaCodecInfo.CodecProfileLevel;
@@ -36,10 +37,7 @@
     private static final int PLAY_TIME_MS = 30000;
 
     public void testAvcBaseline1() throws Exception {
-        if (!supports(AVC_MIME, CodecProfileLevel.AVCProfileBaseline)) {
-          return;
-        }
-        if (!supports(AVC_MIME, CodecProfileLevel.AVCProfileBaseline,
+        if (hasCodec(AVC_MIME) && !supports(AVC_MIME, CodecProfileLevel.AVCProfileBaseline,
                 CodecProfileLevel.AVCLevel1)) {
             throw new RuntimeException("AVCLevel1 support is required by CDD");
         }
@@ -125,7 +123,7 @@
     }
 
     public void testHevcMain1() throws Exception {
-        if (!supports(HEVC_MIME, CodecProfileLevel.HEVCProfileMain,
+        if (hasCodec(HEVC_MIME) && !supports(HEVC_MIME, CodecProfileLevel.HEVCProfileMain,
                 CodecProfileLevel.HEVCMainTierLevel1)) {
             throw new RuntimeException("HECLevel1 support is required by CDD");
         }
@@ -238,4 +236,15 @@
         return false;
     }
 
+    private static boolean hasCodec(String mimeType) {
+        MediaCodecList list = new MediaCodecList(MediaCodecList.ALL_CODECS);
+        for (MediaCodecInfo info : list.getCodecInfos()) {
+            for (String type : info.getSupportedTypes()) {
+                if (type.equalsIgnoreCase(mimeType)) {
+                    return true;
+                }
+            }
+        }
+        return false;
+    }
 }
diff --git a/tests/tests/media/src/android/media/cts/MediaCodecTest.java b/tests/tests/media/src/android/media/cts/MediaCodecTest.java
index f72e3a0..5f8885d 100644
--- a/tests/tests/media/src/android/media/cts/MediaCodecTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaCodecTest.java
@@ -79,11 +79,17 @@
      * methods when called in incorrect operational states.
      */
     public void testException() throws Exception {
+        String mimeType = "audio/amr-wb";
+        if (!supportsCodec(mimeType, false)) {
+            Log.i(TAG, "No decoder found for mimeType= " + mimeType);
+            return;
+        }
+
         MediaFormat[] formatList = new MediaFormat[2];
 
         // use audio format
         formatList[0] = new MediaFormat();
-        formatList[0].setString(MediaFormat.KEY_MIME, "audio/amr-wb");
+        formatList[0].setString(MediaFormat.KEY_MIME, mimeType);
         formatList[0].setInteger(MediaFormat.KEY_SAMPLE_RATE, 16000);
         formatList[0].setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
         formatList[0].setInteger(MediaFormat.KEY_BIT_RATE, 19850);
@@ -272,6 +278,11 @@
      * <br> calling createInputSurface() with a non-Surface color format throws exception
      */
     public void testCreateInputSurfaceErrors() {
+        if (!supportsCodec(MIME_TYPE, true)) {
+            Log.i(TAG, "No encoder found for mimeType= " + MIME_TYPE);
+            return;
+        }
+
         MediaFormat format = createMediaFormat();
         MediaCodec encoder = null;
         Surface surface = null;
@@ -326,6 +337,11 @@
      * <br> submitting a frame after EOS throws exception [TODO]
      */
     public void testSignalSurfaceEOS() {
+        if (!supportsCodec(MIME_TYPE, true)) {
+            Log.i(TAG, "No encoder found for mimeType= " + MIME_TYPE);
+            return;
+        }
+
         MediaFormat format = createMediaFormat();
         MediaCodec encoder = null;
         InputSurface inputSurface = null;
@@ -378,6 +394,11 @@
      * <br> stopping with buffers in flight doesn't crash or hang
      */
     public void testAbruptStop() {
+        if (!supportsCodec(MIME_TYPE, true)) {
+            Log.i(TAG, "No encoder found for mimeType= " + MIME_TYPE);
+            return;
+        }
+
         // There appears to be a race, so run it several times with a short delay between runs
         // to allow any previous activity to shut down.
         for (int i = 0; i < 50; i++) {
@@ -432,6 +453,11 @@
      * <br> dequeueInputBuffer() fails when encoder configured with an input Surface
      */
     public void testDequeueSurface() {
+        if (!supportsCodec(MIME_TYPE, true)) {
+            Log.i(TAG, "No encoder found for mimeType= " + MIME_TYPE);
+            return;
+        }
+
         MediaFormat format = createMediaFormat();
         MediaCodec encoder = null;
         Surface surface = null;
@@ -470,6 +496,11 @@
      * <br> sending EOS with signalEndOfInputStream on non-Surface encoder fails
      */
     public void testReconfigureWithoutSurface() {
+        if (!supportsCodec(MIME_TYPE, true)) {
+            Log.i(TAG, "No encoder found for mimeType= " + MIME_TYPE);
+            return;
+        }
+
         MediaFormat format = createMediaFormat();
         MediaCodec encoder = null;
         Surface surface = null;
@@ -553,8 +584,13 @@
             mediaExtractor = getMediaExtractorForMimeType(inputResourceId, "video/");
             MediaFormat mediaFormat =
                     mediaExtractor.getTrackFormat(mediaExtractor.getSampleTrackIndex());
+            String mimeType = mediaFormat.getString(MediaFormat.KEY_MIME);
+            if (!supportsCodec(mimeType, false)) {
+                Log.i(TAG, "No decoder found for mimeType= " + MIME_TYPE);
+                return true;
+            }
             mediaCodec =
-                    MediaCodec.createDecoderByType(mediaFormat.getString(MediaFormat.KEY_MIME));
+                    MediaCodec.createDecoderByType(mimeType);
             mediaCodec.configure(mediaFormat, outputSurface.getSurface(), null, 0);
             mediaCodec.start();
             boolean eos = false;
@@ -669,6 +705,16 @@
      * Tests creating an encoder and decoder for {@link #MIME_TYPE_AUDIO} at the same time.
      */
     public void testCreateAudioDecoderAndEncoder() {
+        if (!supportsCodec(MIME_TYPE_AUDIO, true)) {
+            Log.i(TAG, "No encoder found for mimeType= " + MIME_TYPE_AUDIO);
+            return;
+        }
+
+        if (!supportsCodec(MIME_TYPE_AUDIO, false)) {
+            Log.i(TAG, "No decoder found for mimeType= " + MIME_TYPE_AUDIO);
+            return;
+        }
+
         final MediaFormat encoderFormat = MediaFormat.createAudioFormat(
                 MIME_TYPE_AUDIO, AUDIO_SAMPLE_RATE, AUDIO_CHANNEL_COUNT);
         encoderFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, AUDIO_AAC_PROFILE);
@@ -716,6 +762,16 @@
     }
 
     public void testConcurrentAudioVideoEncodings() throws InterruptedException {
+        if (!supportsCodec(MIME_TYPE_AUDIO, true)) {
+            Log.i(TAG, "No encoder found for mimeType= " + MIME_TYPE_AUDIO);
+            return;
+        }
+
+        if (!supportsCodec(MIME_TYPE, true)) {
+            Log.i(TAG, "No decoder found for mimeType= " + MIME_TYPE);
+            return;
+        }
+
         final int VIDEO_NUM_SWAPS = 100;
         // audio only checks this and stop
         mVideoEncodingOngoing = true;
@@ -1006,4 +1062,23 @@
 
         return mediaExtractor;
     }
-}
+
+    private static boolean supportsCodec(String mimeType, boolean encoder) {
+        MediaCodecList list = new MediaCodecList(MediaCodecList.ALL_CODECS);
+        for (MediaCodecInfo info : list.getCodecInfos()) {
+            if (encoder && !info.isEncoder()) {
+                continue;
+            }
+            if (!encoder && info.isEncoder()) {
+                continue;
+            }
+            
+            for (String type : info.getSupportedTypes()) {
+                if (type.equalsIgnoreCase(mimeType)) {
+                    return true;
+                }
+            }
+        }
+        return false;
+    }
+}
\ No newline at end of file
diff --git a/tests/tests/media/src/android/media/cts/MediaPlayerFlakyNetworkTest.java b/tests/tests/media/src/android/media/cts/MediaPlayerFlakyNetworkTest.java
index 8063cbb..c5cd04e 100644
--- a/tests/tests/media/src/android/media/cts/MediaPlayerFlakyNetworkTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaPlayerFlakyNetworkTest.java
@@ -92,7 +92,10 @@
         doPlayStreams(6, 0.00002f);
     }
 
-   private void doPlayStreams(int seed, float probability) throws Throwable {
+    private void doPlayStreams(int seed, float probability) throws Throwable {
+        if (!hasH264(false)) {
+            return;
+        }
         Random random = new Random(seed);
         createHttpServer(seed, probability);
         for (int i = 0; i < 10; i++) {
diff --git a/tests/tests/media/src/android/media/cts/MediaPlayerTest.java b/tests/tests/media/src/android/media/cts/MediaPlayerTest.java
index 78ba149..108aa8b 100644
--- a/tests/tests/media/src/android/media/cts/MediaPlayerTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaPlayerTest.java
@@ -21,6 +21,11 @@
 import android.content.pm.PackageManager;
 import android.content.res.AssetFileDescriptor;
 import android.media.AudioManager;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecList;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
 import android.media.MediaPlayer;
 import android.media.MediaPlayer.OnErrorListener;
 import android.media.MediaRecorder;
@@ -304,6 +309,11 @@
     }
 
     public void testPlayAudioTwice() throws Exception {
+        if (!hasAudioOutput()) {
+            Log.i(LOG_TAG, "SKIPPING testPlayAudioTwice(). No audio output.");
+            return;
+        }
+
         final int resid = R.raw.camera_click;
 
         MediaPlayer mp = MediaPlayer.create(mContext, resid);
@@ -550,6 +560,10 @@
     }
 
     private void testGapless(int resid1, int resid2) throws Exception {
+        if (!hasAudioOutput()) {
+            Log.i(LOG_TAG, "SKIPPING testPlayAudioTwice(). No audio output.");
+            return;
+        }
 
         MediaPlayer mp1 = new MediaPlayer();
         mp1.setAudioStreamType(AudioManager.STREAM_MUSIC);
@@ -660,7 +674,12 @@
             }
         });
 
-        loadResource(R.raw.testvideo);
+        try {
+            loadResource(R.raw.testvideo);
+        } catch (UnsupportedCodecException e) {
+            Log.i(LOG_TAG, "SKIPPING testVideoSurfaceResetting(). Could not find codec.");
+            return;
+        }
         playLoadedVideo(352, 288, -1);
 
         Thread.sleep(SLEEP_TIME);
@@ -1011,7 +1030,12 @@
     }
 
     public void testDeselectTrack() throws Throwable {
-        loadResource(R.raw.testvideo_with_2_subtitles);
+        try {
+            loadResource(R.raw.testvideo_with_2_subtitles);
+        } catch (UnsupportedCodecException e) {
+            Log.i(LOG_TAG, "SKIPPING testDeselectTrack(). Could not find codec.");
+            return;
+        }
         runTestOnUiThread(new Runnable() {
             public void run() {
                 try {
@@ -1082,7 +1106,12 @@
     }
 
     public void testChangeSubtitleTrack() throws Throwable {
-        loadResource(R.raw.testvideo_with_2_subtitles);
+        try {
+            loadResource(R.raw.testvideo_with_2_subtitles);
+        } catch (UnsupportedCodecException e) {
+            Log.i(LOG_TAG, "SKIPPING testChangeSubtitleTrack(). Could not find codec.");
+            return;
+        }
 
         mMediaPlayer.setDisplay(getActivity().getSurfaceHolder());
         mMediaPlayer.setScreenOnWhilePlaying(true);
@@ -1170,7 +1199,12 @@
     }
 
     public void testGetTrackInfo() throws Throwable {
-        loadResource(R.raw.testvideo_with_2_subtitles);
+        try {
+            loadResource(R.raw.testvideo_with_2_subtitles);
+        } catch (UnsupportedCodecException e) {
+            Log.i(LOG_TAG, "SKIPPING testGetTrackInfo(). Could not find codec.");
+            return;
+        }
         runTestOnUiThread(new Runnable() {
             public void run() {
                 try {
@@ -1245,7 +1279,13 @@
     public void testCallback() throws Throwable {
         final int mp4Duration = 8484;
 
-        loadResource(R.raw.testvideo);
+        try {
+            loadResource(R.raw.testvideo);
+        } catch (UnsupportedCodecException e) {
+            Log.i(LOG_TAG, "SKIPPING testCallback(). Could not find codec.");
+            return;
+        }
+
         mMediaPlayer.setDisplay(getActivity().getSurfaceHolder());
         mMediaPlayer.setScreenOnWhilePlaying(true);
 
@@ -1317,6 +1357,11 @@
 
     public void testRecordAndPlay() throws Exception {
         if (!hasMicrophone()) {
+            Log.i(LOG_TAG, "SKIPPING testRecordAndPlay(). No microphone.");
+            return;
+        }
+        if (!hasH263(false)) {
+            Log.i(LOG_TAG, "SKIPPING testRecordAndPlay(). Cound not find codec.");
             return;
         }
         File outputFile = new File(Environment.getExternalStorageDirectory(),
diff --git a/tests/tests/media/src/android/media/cts/MediaPlayerTestBase.java b/tests/tests/media/src/android/media/cts/MediaPlayerTestBase.java
index 61d8792..9225203 100644
--- a/tests/tests/media/src/android/media/cts/MediaPlayerTestBase.java
+++ b/tests/tests/media/src/android/media/cts/MediaPlayerTestBase.java
@@ -16,8 +16,15 @@
 package android.media.cts;
 
 import android.content.Context;
+
+import android.content.pm.PackageManager;
 import android.content.res.AssetFileDescriptor;
 import android.content.res.Resources;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecList;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
 import android.media.MediaPlayer;
 import android.test.ActivityInstrumentationTestCase2;
 
@@ -143,6 +150,10 @@
     }
 
     protected void loadResource(int resid) throws Exception {
+        if (!supportsPlayback(resid)) {
+            throw new UnsupportedCodecException();
+        }
+
         AssetFileDescriptor afd = mResources.openRawResourceFd(resid);
         try {
             mMediaPlayer.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(),
@@ -197,6 +208,12 @@
     }
 
     protected void playVideoTest(int resid, int width, int height) throws Exception {
+        if (!supportsPlayback(resid)) {
+            LOG.info("SKIPPING playVideoTest() for resid=" + resid 
+                    + " Could not find a codec for playback.");
+            return;
+        }
+
         loadResource(resid);
         playLoadedVideo(width, height, 0);
     }
@@ -278,4 +295,69 @@
     }
 
     private static class PrepareFailedException extends Exception {}
+    public static class UnsupportedCodecException extends Exception {}
+
+    public boolean supportsPlayback(int resid) throws IOException {
+        // First determine if the device supports playback of the requested resource.
+        AssetFileDescriptor fd = mResources.openRawResourceFd(resid);
+        MediaExtractor ex = new MediaExtractor();
+        ex.setDataSource(fd.getFileDescriptor(), fd.getStartOffset(), fd.getLength());
+        MediaFormat format = ex.getTrackFormat(0);
+        String mimeType = format.getString(MediaFormat.KEY_MIME);
+        return hasCodecForMimeType(mimeType, false);
+    }
+
+    public boolean supportsPlayback(String path) throws IOException {
+        MediaExtractor ex = new MediaExtractor();
+        ex.setDataSource(path);
+        MediaFormat format = ex.getTrackFormat(0);
+        String mimeType = format.getString(MediaFormat.KEY_MIME);
+        return hasCodecForMimeType(mimeType, false);
+    }
+
+    public static boolean hasCodecForMimeType(String mimeType, boolean encoder) {
+        MediaCodecList list = new MediaCodecList(MediaCodecList.ALL_CODECS);
+        for (MediaCodecInfo info : list.getCodecInfos()) {
+            if (encoder != info.isEncoder()) {
+                continue;
+            }
+
+            for (String type : info.getSupportedTypes()) {
+                if (type.equalsIgnoreCase(mimeType)) {
+                    LOG.info("Found codec for mimeType=" + mimeType + " codec=" + info.getName());
+                    return true;
+                }
+            }
+        }
+        return false;
+    }
+
+    public static boolean hasH264(boolean encoder) {
+        return hasCodecForMimeType("video/avc", encoder);
+    }
+
+    public static boolean hasHEVC(boolean encoder) {
+        return hasCodecForMimeType("video/hevc", encoder);
+    }
+
+    public static boolean hasH263(boolean encoder) {
+        return hasCodecForMimeType("video/3gpp", encoder);
+    }
+
+    public static boolean hasMpeg4(boolean encoder) {
+        return hasCodecForMimeType("video/mp4v-es", encoder);
+    }
+
+    public static boolean hasVP8(boolean encoder) {
+        return hasCodecForMimeType("video/x-vnd.on2.vp8", encoder);
+    }
+
+    public static boolean hasVP9(boolean encoder) {
+        return hasCodecForMimeType("video/x-vnd.on2.vp9", encoder);
+    }
+
+    public boolean hasAudioOutput() {
+        return getInstrumentation().getTargetContext().getPackageManager()
+            .hasSystemFeature(PackageManager.FEATURE_AUDIO_OUTPUT);
+    }
 }
diff --git a/tests/tests/media/src/android/media/cts/NativeDecoderTest.java b/tests/tests/media/src/android/media/cts/NativeDecoderTest.java
index fc27dfa..76620c1 100644
--- a/tests/tests/media/src/android/media/cts/NativeDecoderTest.java
+++ b/tests/tests/media/src/android/media/cts/NativeDecoderTest.java
@@ -195,10 +195,14 @@
         testDecoder(R.raw.video_1280x720_webm_vp9_309kbps_25fps_vorbis_stereo_128kbps_44100hz);
         testDecoder(R.raw.video_176x144_3gp_h263_300kbps_12fps_aac_mono_24kbps_11025hz);
         testDecoder(R.raw.video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz);
-
     }
 
     private void testDecoder(int res) throws Exception {
+        if (!supportsPlayback(res)) {
+            Log.i(TAG, "SKIPPING testDecoder() resid=" + res + " Unsupported decorder.");
+            return;
+        }
+
         AssetFileDescriptor fd = mResources.openRawResourceFd(res);
 
         int[] jdata = getDecodedData(
@@ -382,6 +386,11 @@
     }
 
     private void testVideoPlayback(int res) throws Exception {
+        if (!supportsPlayback(res)) {
+            Log.i(TAG, "SKIPPING testVideoPlayback() resid=" + res + " Unsupported decorder.");
+            return;
+        }
+
         AssetFileDescriptor fd = mResources.openRawResourceFd(res);
 
         boolean ret = testPlaybackNative(mActivity.getSurfaceHolder().getSurface(),
diff --git a/tests/tests/media/src/android/media/cts/RingtoneManagerTest.java b/tests/tests/media/src/android/media/cts/RingtoneManagerTest.java
index dfaabb8..bf47a27 100644
--- a/tests/tests/media/src/android/media/cts/RingtoneManagerTest.java
+++ b/tests/tests/media/src/android/media/cts/RingtoneManagerTest.java
@@ -21,6 +21,7 @@
 import android.app.Activity;
 import android.app.Instrumentation;
 import android.content.Context;
+import android.content.pm.PackageManager;
 import android.database.Cursor;
 import android.media.AudioManager;
 import android.media.Ringtone;
@@ -28,11 +29,13 @@
 import android.net.Uri;
 import android.provider.Settings;
 import android.test.ActivityInstrumentationTestCase2;
+import android.util.Log;
 
 public class RingtoneManagerTest
         extends ActivityInstrumentationTestCase2<RingtonePickerActivity> {
 
     private static final String PKG = "com.android.cts.media";
+    private static final String TAG = "RingtoneManagerTest";
 
     private RingtonePickerActivity mActivity;
     private Instrumentation mInstrumentation;
@@ -74,12 +77,21 @@
         super.tearDown();
     }
 
+    private boolean hasAudioOutput() {
+        return mContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_OUTPUT);
+    }
+
     public void testConstructors() {
         new RingtoneManager(mActivity);
         new RingtoneManager(mContext);
     }
 
     public void testAccessMethods() {
+        if (!hasAudioOutput()) {
+            Log.i(TAG, "Skipping testAccessMethods(): device doesn't have audio output.");
+            return;
+        }
+
         Cursor c = mRingtoneManager.getCursor();
         assertTrue("Must have at least one ring tone available", c.getCount() > 0);
 
@@ -115,6 +127,11 @@
     }
 
     public void testStopPreviousRingtone() {
+        if (!hasAudioOutput()) {
+            Log.i(TAG, "Skipping testStopPreviousRingtone(): device doesn't have audio output.");
+            return;
+        }
+
         Cursor c = mRingtoneManager.getCursor();
         assertTrue("Must have at least one ring tone available", c.getCount() > 0);
 
diff --git a/tests/tests/media/src/android/media/cts/RingtoneTest.java b/tests/tests/media/src/android/media/cts/RingtoneTest.java
index 6e3a1e9..f5218e3 100644
--- a/tests/tests/media/src/android/media/cts/RingtoneTest.java
+++ b/tests/tests/media/src/android/media/cts/RingtoneTest.java
@@ -16,16 +16,18 @@
 
 package android.media.cts;
 
-
 import android.content.Context;
+import android.content.pm.PackageManager;
 import android.media.AudioManager;
 import android.media.Ringtone;
 import android.media.RingtoneManager;
 import android.net.Uri;
 import android.provider.Settings;
 import android.test.AndroidTestCase;
+import android.util.Log;
 
 public class RingtoneTest extends AndroidTestCase {
+    private static final String TAG = "RingtoneTest";
 
     private Context mContext;
     private Ringtone mRingtone;
@@ -73,7 +75,16 @@
         super.tearDown();
     }
 
+    private boolean hasAudioOutput() {
+        return getContext().getPackageManager()
+            .hasSystemFeature(PackageManager.FEATURE_AUDIO_OUTPUT);
+    }
+
     public void testRingtone() {
+        if (!hasAudioOutput()) {
+            Log.i(TAG, "Skipping testRingtone(): device doesn't have audio output.");
+            return;
+        }
 
         assertNotNull(mRingtone.getTitle(mContext));
         assertTrue(mOriginalStreamType >= 0);
diff --git a/tests/tests/media/src/android/media/cts/StreamingMediaPlayerTest.java b/tests/tests/media/src/android/media/cts/StreamingMediaPlayerTest.java
index 2b93064..6198d5f 100644
--- a/tests/tests/media/src/android/media/cts/StreamingMediaPlayerTest.java
+++ b/tests/tests/media/src/android/media/cts/StreamingMediaPlayerTest.java
@@ -28,6 +28,8 @@
  * Tests of MediaPlayer streaming capabilities.
  */
 public class StreamingMediaPlayerTest extends MediaPlayerTestBase {
+    private static final String TAG = "StreamingMediaPlayerTest";
+
     private CtsTestServer mServer;
 
 /* RTSP tests are more flaky and vulnerable to network condition.
@@ -62,6 +64,11 @@
 */
     // Streaming HTTP video from YouTube
     public void testHTTP_H263_AMR_Video1() throws Exception {
+        if (!hasH263(false)) {
+            Log.i(TAG, "Skipping testHTTP_H263_AMR_Video1(): No codec found.");
+            return;
+        }
+
         playVideoTest("http://redirector.c.youtube.com/videoplayback?id=271de9756065677e"
                 + "&itag=13&source=youtube&ip=0.0.0.0&ipbits=0&expire=19000000000"
                 + "&sparams=ip,ipbits,expire,id,itag,source"
@@ -70,6 +77,11 @@
                 + "&key=test_key1&user=android-device-test", 176, 144);
     }
     public void testHTTP_H263_AMR_Video2() throws Exception {
+        if (!hasH263(false)) {
+            Log.i(TAG, "Skipping testHTTP_H263_AMR_Video2(): No codec found.");
+            return;
+        }
+
         playVideoTest("http://redirector.c.youtube.com/videoplayback?id=c80658495af60617"
                 + "&itag=13&source=youtube&ip=0.0.0.0&ipbits=0&expire=19000000000"
                 + "&sparams=ip,ipbits,expire,id,itag,source"
@@ -79,6 +91,11 @@
     }
 
     public void testHTTP_MPEG4SP_AAC_Video1() throws Exception {
+        if (!hasH264(false)) {
+            Log.i(TAG, "Skipping testHTTP_MPEG4SP_AAC_Video1(): No codec found.");
+            return;
+        }
+
         playVideoTest("http://redirector.c.youtube.com/videoplayback?id=271de9756065677e"
                 + "&itag=17&source=youtube&ip=0.0.0.0&ipbits=0&expire=19000000000"
                 + "&sparams=ip,ipbits,expire,id,itag,source"
@@ -87,6 +104,11 @@
                 + "&key=test_key1&user=android-device-test", 176, 144);
     }
     public void testHTTP_MPEG4SP_AAC_Video2() throws Exception {
+        if (!hasH264(false)) {
+            Log.i(TAG, "Skipping testHTTP_MPEG4SP_AAC_Video2(): No codec found.");
+            return;
+        }
+
         playVideoTest("http://redirector.c.youtube.com/videoplayback?id=c80658495af60617"
                 + "&itag=17&source=youtube&ip=0.0.0.0&ipbits=0&expire=19000000000"
                 + "&sparams=ip,ipbits,expire,id,itag,source"
@@ -96,6 +118,11 @@
     }
 
     public void testHTTP_H264Base_AAC_Video1() throws Exception {
+        if (!hasH264(false)) {
+            Log.i(TAG, "Skipping testHTTP_H264Base_AAC_Video1(): No codec found.");
+            return;
+        }
+
         playVideoTest("http://redirector.c.youtube.com/videoplayback?id=271de9756065677e"
                 + "&itag=18&source=youtube&ip=0.0.0.0&ipbits=0&expire=19000000000"
                 + "&sparams=ip,ipbits,expire,id,itag,source"
@@ -104,6 +131,11 @@
                 + "&key=test_key1&user=android-device-test", 640, 360);
     }
     public void testHTTP_H264Base_AAC_Video2() throws Exception {
+        if (!hasH264(false)) {
+            Log.i(TAG, "Skipping testHTTP_H264Base_AAC_Video2(): No codec found.");
+            return;
+        }
+
         playVideoTest("http://redirector.c.youtube.com/videoplayback?id=c80658495af60617"
                 + "&itag=18&source=youtube&ip=0.0.0.0&ipbits=0&expire=19000000000"
                 + "&sparams=ip,ipbits,expire,id,itag,source"
@@ -114,6 +146,11 @@
 
     // Streaming HLS video from YouTube
     public void testHLS() throws Exception {
+        if (!hasH264(false)) {
+            Log.i(TAG, "Skipping testHLS(): No codec found.");
+            return;
+        }
+
         // Play stream for 60 seconds
         playLiveVideoTest("http://www.youtube.com/api/manifest/hls_variant/id/"
                 + "0168724d02bd9945/itag/5/source/youtube/playlist_type/DVR/ip/"
@@ -165,6 +202,11 @@
                 stream_url = stream_url + "?" + CtsTestServer.NOLENGTH_POSTFIX;
             }
 
+            if (!supportsPlayback(stream_url)) {
+                Log.i(TAG, "Failed to find codec for: '" + stream_url + "'. Skipping test.");
+                return;
+            }
+
             mMediaPlayer.setDataSource(stream_url);
 
             mMediaPlayer.setDisplay(getActivity().getSurfaceHolder());
@@ -252,14 +294,26 @@
     }
 
     public void testPlayHlsStream() throws Throwable {
+        if (!hasH264(false)) {
+            Log.i(TAG, "Skipping testPlayHlsStream(): No codec found.");
+            return;
+        }
         localHlsTest("hls.m3u8", false, false);
     }
 
     public void testPlayHlsStreamWithQueryString() throws Throwable {
+        if (!hasH264(false)) {
+            Log.i(TAG, "Skipping testPlayHlsStreamWithQueryString(): No codec found.");
+            return;
+        }
         localHlsTest("hls.m3u8", true, false);
     }
 
     public void testPlayHlsStreamWithRedirect() throws Throwable {
+        if (!hasH264(false)) {
+            Log.i(TAG, "Skipping testPlayHlsStreamWithRedirect(): No codec found.");
+            return;
+        }
         localHlsTest("hls.m3u8", false, true);
     }
 
diff --git a/tests/tests/mediastress/src/android/mediastress/cts/H263QcifLongPlayerTest.java b/tests/tests/mediastress/src/android/mediastress/cts/H263QcifLongPlayerTest.java
index 482aec9..93dbdbd 100644
--- a/tests/tests/mediastress/src/android/mediastress/cts/H263QcifLongPlayerTest.java
+++ b/tests/tests/mediastress/src/android/mediastress/cts/H263QcifLongPlayerTest.java
@@ -16,6 +16,10 @@
 
 package android.mediastress.cts;
 
+import android.media.CamcorderProfile;
+import android.media.MediaRecorder.AudioEncoder;
+import android.media.MediaRecorder.VideoEncoder;
+
 import com.android.cts.util.TimeoutReq;
 
 public class H263QcifLongPlayerTest extends MediaPlayerStressTest {
@@ -24,6 +28,10 @@
         "bbb_full.ffmpeg.176x144.3gp.h263_56kbps_12fps.libfaac_mono_24kbps_11025Hz.3gp"
     };
 
+    public H263QcifLongPlayerTest() {
+        super(CamcorderProfile.QUALITY_QCIF, VideoEncoder.H263, AudioEncoder.AAC);
+    }
+
     @TimeoutReq(minutes = 11)
     public void testPlay00() throws Exception {
         doTestVideoPlaybackLong(0);
diff --git a/tests/tests/mediastress/src/android/mediastress/cts/H263QcifShortPlayerTest.java b/tests/tests/mediastress/src/android/mediastress/cts/H263QcifShortPlayerTest.java
index 2035869..392a2c8 100644
--- a/tests/tests/mediastress/src/android/mediastress/cts/H263QcifShortPlayerTest.java
+++ b/tests/tests/mediastress/src/android/mediastress/cts/H263QcifShortPlayerTest.java
@@ -16,6 +16,10 @@
 
 package android.mediastress.cts;
 
+import android.media.CamcorderProfile;
+import android.media.MediaRecorder.AudioEncoder;
+import android.media.MediaRecorder.VideoEncoder;
+
 public class H263QcifShortPlayerTest extends MediaPlayerStressTest {
     private final static String VIDEO_PATH_MIDDLE = "bbb_short/176x144/3gp_h263_libfaac/";
     private final String[] mMedias = {
@@ -45,6 +49,10 @@
         "bbb_short.ffmpeg.176x144.3gp.h263_56kbps_25fps.libfaac_stereo_24kbps_22050Hz.3gp"
     };
 
+    public H263QcifShortPlayerTest() {
+        super(CamcorderProfile.QUALITY_QCIF, VideoEncoder.H263, AudioEncoder.AAC);
+    }
+
     public void testPlay00() throws Exception {
         doTestVideoPlaybackShort(0);
     }
diff --git a/tests/tests/mediastress/src/android/mediastress/cts/H264R480x360AacShortPlayerTest.java b/tests/tests/mediastress/src/android/mediastress/cts/H264R480x360AacShortPlayerTest.java
index 12e8f6d..6d0afea 100644
--- a/tests/tests/mediastress/src/android/mediastress/cts/H264R480x360AacShortPlayerTest.java
+++ b/tests/tests/mediastress/src/android/mediastress/cts/H264R480x360AacShortPlayerTest.java
@@ -16,6 +16,10 @@
 
 package android.mediastress.cts;
 
+import android.media.CamcorderProfile;
+import android.media.MediaRecorder.AudioEncoder;
+import android.media.MediaRecorder.VideoEncoder;
+
 public class H264R480x360AacShortPlayerTest extends MediaPlayerStressTest {
     private static final String VIDEO_PATH_MIDDLE = "bbb_short/480x360/mp4_libx264_libfaac/";
     private final String[] mMedias = {
@@ -33,6 +37,10 @@
         "bbb_short.ffmpeg.480x360.mp4.libx264_500kbps_30fps.libfaac_stereo_192kbps_44100Hz.mp4"
     };
 
+    public H264R480x360AacShortPlayerTest() {
+        super(CamcorderProfile.QUALITY_480P, VideoEncoder.H264, AudioEncoder.AAC);
+    }
+
     public void testPlay00() throws Exception {
         doTestVideoPlaybackShort(0);
     }
diff --git a/tests/tests/mediastress/src/android/mediastress/cts/HEVCR480x360AacShortPlayerTest.java b/tests/tests/mediastress/src/android/mediastress/cts/HEVCR480x360AacShortPlayerTest.java
index 78139ce..2099916 100644
--- a/tests/tests/mediastress/src/android/mediastress/cts/HEVCR480x360AacShortPlayerTest.java
+++ b/tests/tests/mediastress/src/android/mediastress/cts/HEVCR480x360AacShortPlayerTest.java
@@ -16,6 +16,10 @@
 
 package android.mediastress.cts;
 
+import android.media.CamcorderProfile;
+import android.media.MediaRecorder.AudioEncoder;
+import android.media.MediaRecorder.VideoEncoder;
+
 public class HEVCR480x360AacShortPlayerTest extends MediaPlayerStressTest {
     private static final String VIDEO_PATH_MIDDLE = "bbb_short/480x360/mp4_libx265_libfaac/";
     private final String[] mMedias = {
@@ -27,6 +31,10 @@
         "bbb_short.fmpeg.480x360.mp4.libx265_325kbps_30fps.libfaac_stereo_128kbps_48000hz.mp4"
     };
 
+    public HEVCR480x360AacShortPlayerTest() {
+        super(CamcorderProfile.QUALITY_480P, VideoEncoder.H264, AudioEncoder.AAC);
+    }
+
     public void testPlay00() throws Exception {
         doTestVideoPlaybackShort(0);
     }
diff --git a/tests/tests/mediastress/src/android/mediastress/cts/MediaPlayerStressTest.java b/tests/tests/mediastress/src/android/mediastress/cts/MediaPlayerStressTest.java
index d980e52..05bfb42 100644
--- a/tests/tests/mediastress/src/android/mediastress/cts/MediaPlayerStressTest.java
+++ b/tests/tests/mediastress/src/android/mediastress/cts/MediaPlayerStressTest.java
@@ -121,6 +121,7 @@
      */
     protected void doTestVideoPlayback(int mediaNumber, int repeatCounter) throws Exception {
         if (!mSupported) {
+            Log.i(TAG, "Not supported!");
             return;
         }
 
diff --git a/tests/tests/mediastress/src/android/mediastress/cts/Vp8R480x360LongPlayerTest.java b/tests/tests/mediastress/src/android/mediastress/cts/Vp8R480x360LongPlayerTest.java
index 372f034..6b43558 100644
--- a/tests/tests/mediastress/src/android/mediastress/cts/Vp8R480x360LongPlayerTest.java
+++ b/tests/tests/mediastress/src/android/mediastress/cts/Vp8R480x360LongPlayerTest.java
@@ -16,12 +16,20 @@
 
 package android.mediastress.cts;
 
+import android.media.CamcorderProfile;
+import android.media.MediaRecorder.AudioEncoder;
+import android.media.MediaRecorder.VideoEncoder;
+
 public class Vp8R480x360LongPlayerTest extends MediaPlayerStressTest {
     private static final String VIDEO_PATH_MIDDLE = "bbb_full/480x360/webm_libvpx_libvorbis/";
     private final String[] mMedias = {
         "bbb_full.ffmpeg.480x360.webm.libvpx_500kbps_25fps.libvorbis_stereo_128kbps_44100Hz.webm"
     };
 
+    public Vp8R480x360LongPlayerTest() {
+        super(CamcorderProfile.QUALITY_480P, VideoEncoder.VP8, AudioEncoder.VORBIS);
+    }
+
     public void testPlay00() throws Exception {
         doTestVideoPlaybackLong(0);
     }
diff --git a/tests/tests/mediastress/src/android/mediastress/cts/Vp8R480x360ShortPlayerTest.java b/tests/tests/mediastress/src/android/mediastress/cts/Vp8R480x360ShortPlayerTest.java
index 30b4d2e..737032b 100644
--- a/tests/tests/mediastress/src/android/mediastress/cts/Vp8R480x360ShortPlayerTest.java
+++ b/tests/tests/mediastress/src/android/mediastress/cts/Vp8R480x360ShortPlayerTest.java
@@ -16,6 +16,10 @@
 
 package android.mediastress.cts;
 
+import android.media.CamcorderProfile;
+import android.media.MediaRecorder.AudioEncoder;
+import android.media.MediaRecorder.VideoEncoder;
+
 public class Vp8R480x360ShortPlayerTest extends MediaPlayerStressTest {
     private static final String VIDEO_PATH_MIDDLE = "bbb_short/480x360/webm_libvpx_libvorbis/";
     private final String[] mMedias = {
@@ -33,6 +37,10 @@
         "bbb_short.ffmpeg.480x360.webm.libvpx_500kbps_30fps.libvorbis_stereo_192kbps_44100Hz.webm"
     };
 
+    public Vp8R480x360ShortPlayerTest() {
+        super(CamcorderProfile.QUALITY_480P, VideoEncoder.VP8, AudioEncoder.VORBIS);
+    }
+
     public void testPlay00() throws Exception {
         doTestVideoPlaybackShort(0);
     }
diff --git a/tests/tests/permission/src/android/permission/cts/NoReadLogsPermissionTest.java b/tests/tests/permission/src/android/permission/cts/NoReadLogsPermissionTest.java
index 8979a07..7b3799d 100644
--- a/tests/tests/permission/src/android/permission/cts/NoReadLogsPermissionTest.java
+++ b/tests/tests/permission/src/android/permission/cts/NoReadLogsPermissionTest.java
@@ -48,7 +48,7 @@
         BufferedReader reader = null;
         try {
             logcatProc = Runtime.getRuntime().exec(new String[]
-                    {"logcat", "-d", "ActivityManager:* *:S" });
+                    {"logcat", "-v", "brief", "-d", "ActivityManager:* *:S" });
 
             reader = new BufferedReader(new InputStreamReader(logcatProc.getInputStream()));
 
diff --git a/tests/tests/print/src/android/print/cts/BasePrintTest.java b/tests/tests/print/src/android/print/cts/BasePrintTest.java
index 1493bc9..c73bb64 100644
--- a/tests/tests/print/src/android/print/cts/BasePrintTest.java
+++ b/tests/tests/print/src/android/print/cts/BasePrintTest.java
@@ -25,6 +25,7 @@
 import static org.mockito.Mockito.when;
 
 import android.content.Context;
+import android.content.pm.PackageManager;
 import android.content.res.Configuration;
 import android.content.res.Resources;
 import android.graphics.pdf.PdfDocument;
@@ -458,4 +459,8 @@
             }
         }
     }
+
+    protected boolean supportsPrinting() {
+        return getActivity().getPackageManager().hasSystemFeature(PackageManager.FEATURE_PRINTING);
+    }
 }
diff --git a/tests/tests/print/src/android/print/cts/PageRangeAdjustmentTest.java b/tests/tests/print/src/android/print/cts/PageRangeAdjustmentTest.java
index 4952cbd..b9fd50a 100644
--- a/tests/tests/print/src/android/print/cts/PageRangeAdjustmentTest.java
+++ b/tests/tests/print/src/android/print/cts/PageRangeAdjustmentTest.java
@@ -62,6 +62,10 @@
     private static final String FIRST_PRINTER = "First printer";
 
     public void testAllPagesWantedAndAllPagesWritten() throws Exception {
+        if (!supportsPrinting()) {
+            return;
+        }
+
         // Create a callback for the target print service.
         PrintServiceCallbacks firstServiceCallbacks = createMockPrintServiceCallbacks(
             new Answer<PrinterDiscoverySessionCallbacks>() {
@@ -161,6 +165,10 @@
     }
 
     public void testSomePagesWantedAndAllPagesWritten() throws Exception {
+        if (!supportsPrinting()) {
+            return;
+        }
+
         // Create a callback for the target print service.
         PrintServiceCallbacks firstServiceCallbacks = createMockPrintServiceCallbacks(
             new Answer<PrinterDiscoverySessionCallbacks>() {
@@ -269,6 +277,10 @@
     }
 
     public void testSomePagesWantedAndSomeMorePagesWritten() throws Exception {
+        if (!supportsPrinting()) {
+            return;
+        }
+
         // Create a callback for the target print service.
         PrintServiceCallbacks firstServiceCallbacks = createMockPrintServiceCallbacks(
             new Answer<PrinterDiscoverySessionCallbacks>() {
@@ -393,6 +405,10 @@
     }
 
     public void testSomePagesWantedAndNotWritten() throws Exception {
+        if (!supportsPrinting()) {
+            return;
+        }
+
         // Create a callback for the target print service.
         PrintServiceCallbacks firstServiceCallbacks = createMockPrintServiceCallbacks(
             new Answer<PrinterDiscoverySessionCallbacks>() {
@@ -481,6 +497,10 @@
     }
 
     public void testWantedPagesAlreadyWrittenForPreview() throws Exception {
+        if (!supportsPrinting()) {
+            return;
+        }
+
         // Create a callback for the target print service.
         PrintServiceCallbacks firstServiceCallbacks = createMockPrintServiceCallbacks(
             new Answer<PrinterDiscoverySessionCallbacks>() {
diff --git a/tests/tests/security/jni/android_security_cts_NetlinkSocket.cpp b/tests/tests/security/jni/android_security_cts_NetlinkSocket.cpp
index 2411f74..de315ea 100644
--- a/tests/tests/security/jni/android_security_cts_NetlinkSocket.cpp
+++ b/tests/tests/security/jni/android_security_cts_NetlinkSocket.cpp
@@ -32,7 +32,7 @@
     int sock = socket(PF_NETLINK, SOCK_DGRAM, NETLINK_KOBJECT_UEVENT);
     if (sock == -1) {
         ALOGE("Can't create socket %s", strerror(errno));
-        jclass SocketException = env->FindClass("java/security/SocketException");
+        jclass SocketException = env->FindClass("java/net/SocketException");
         env->ThrowNew(SocketException, "Can't create socket");
         return;
     }
diff --git a/tests/tests/security/src/android/security/cts/NetlinkSocket.java b/tests/tests/security/src/android/security/cts/NetlinkSocket.java
index 1ea6d26..5ea80ca 100644
--- a/tests/tests/security/src/android/security/cts/NetlinkSocket.java
+++ b/tests/tests/security/src/android/security/cts/NetlinkSocket.java
@@ -18,6 +18,7 @@
 
 import java.io.FileDescriptor;
 import java.io.IOException;
+import java.net.SocketException;
 
 public class NetlinkSocket {
 
@@ -25,7 +26,7 @@
         System.loadLibrary("ctssecurity_jni");
     }
 
-    private static native void create_native(FileDescriptor fd);
+    private static native void create_native(FileDescriptor fd) throws SocketException;
     private static native int sendmsg(FileDescriptor fd, int pid, byte[] bytes);
 
     private FileDescriptor fd = new FileDescriptor();
@@ -33,7 +34,7 @@
     /** no public constructors */
     private NetlinkSocket() { }
 
-    public static NetlinkSocket create() {
+    public static NetlinkSocket create() throws SocketException {
         NetlinkSocket retval = new NetlinkSocket();
         create_native(retval.fd);
         return retval;
diff --git a/tests/tests/security/src/android/security/cts/SELinuxDomainTest.java b/tests/tests/security/src/android/security/cts/SELinuxDomainTest.java
index ee1b027..66054f9 100644
--- a/tests/tests/security/src/android/security/cts/SELinuxDomainTest.java
+++ b/tests/tests/security/src/android/security/cts/SELinuxDomainTest.java
@@ -199,7 +199,7 @@
 
     /* drm server is always present */
     public void testDrmServerDomain() throws FileNotFoundException {
-        assertDomainOne("u:r:drmserver:s0", "/system/bin/drmserver");
+        assertDomainZeroOrOne("u:r:drmserver:s0", "/system/bin/drmserver");
     }
 
     /* Media server is always running */
diff --git a/tests/tests/security/src/android/security/cts/VoldExploitTest.java b/tests/tests/security/src/android/security/cts/VoldExploitTest.java
index edaf82a..103158f 100644
--- a/tests/tests/security/src/android/security/cts/VoldExploitTest.java
+++ b/tests/tests/security/src/android/security/cts/VoldExploitTest.java
@@ -26,6 +26,7 @@
 import java.io.FileReader;
 import java.io.IOException;
 import java.io.UnsupportedEncodingException;
+import java.net.SocketException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashSet;
@@ -103,7 +104,13 @@
           return;
         }
 
-        NetlinkSocket ns = NetlinkSocket.create();
+        NetlinkSocket ns;
+        try {
+            ns = NetlinkSocket.create();
+        } catch (SocketException e) {
+            // Can't create netlink socket. Not vulnerable.
+            return;
+        }
         for (int i : pids) {
             for (String j : devices) {
                 doAttack(ns, i, j);
diff --git a/tests/tests/speech/src/android/speech/tts/cts/TextToSpeechTest.java b/tests/tests/speech/src/android/speech/tts/cts/TextToSpeechTest.java
index 799fd8d..69acdd0 100644
--- a/tests/tests/speech/src/android/speech/tts/cts/TextToSpeechTest.java
+++ b/tests/tests/speech/src/android/speech/tts/cts/TextToSpeechTest.java
@@ -15,6 +15,7 @@
  */
 package android.speech.tts.cts;
 
+import android.content.pm.PackageManager;
 import android.os.Environment;
 import android.speech.tts.TextToSpeech;
 import android.test.AndroidTestCase;
@@ -39,6 +40,15 @@
     protected void setUp() throws Exception {
         super.setUp();
         mTts = TextToSpeechWrapper.createTextToSpeechWrapper(getContext());
+        if (mTts == null) {
+            PackageManager pm = getContext().getPackageManager();
+            if (!pm.hasSystemFeature(PackageManager.FEATURE_AUDIO_OUTPUT)) {
+                // It is OK to have no TTS, when audio-out is not supported.
+                return;
+            } else {
+                fail("FEATURE_AUDIO_OUTPUT is set, but there is no TTS engine");
+            }
+        }
         assertNotNull(mTts);
         assertTrue(checkAndSetLanguageAvailable());
     }
@@ -46,7 +56,9 @@
     @Override
     protected void tearDown() throws Exception {
         super.tearDown();
-        mTts.shutdown();
+        if (mTts != null) {
+            mTts.shutdown();
+        }
     }
 
     private TextToSpeech getTts() {
@@ -83,6 +95,9 @@
     }
 
     public void testSynthesizeToFile() throws Exception {
+        if (mTts == null) {
+            return;
+        }
         File sampleFile = new File(Environment.getExternalStorageDirectory(), SAMPLE_FILE_NAME);
         try {
             assertFalse(sampleFile.exists());
@@ -101,18 +116,27 @@
     }
 
     public void testSpeak() throws Exception {
+        if (mTts == null) {
+            return;
+        }
         int result = getTts().speak(SAMPLE_TEXT, TextToSpeech.QUEUE_FLUSH, createParams());
         assertEquals("speak() failed", TextToSpeech.SUCCESS, result);
         assertTrue("speak() completion timeout", waitForUtterance());
     }
 
     public void testGetEnginesIncludesDefault() throws Exception {
+        if (mTts == null) {
+            return;
+        }
         List<TextToSpeech.EngineInfo> engines = getTts().getEngines();
         assertNotNull("getEngines() returned null", engines);
         assertContainsEngine(getTts().getDefaultEngine(), engines);
     }
 
     public void testGetEnginesIncludesMock() throws Exception {
+        if (mTts == null) {
+            return;
+        }
         List<TextToSpeech.EngineInfo> engines = getTts().getEngines();
         assertNotNull("getEngines() returned null", engines);
         assertContainsEngine(TextToSpeechWrapper.MOCK_TTS_ENGINE, engines);
diff --git a/tests/tests/telephony/AndroidManifest.xml b/tests/tests/telephony/AndroidManifest.xml
index b3ae1a3..31abf12 100644
--- a/tests/tests/telephony/AndroidManifest.xml
+++ b/tests/tests/telephony/AndroidManifest.xml
@@ -28,6 +28,7 @@
     <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
     <uses-permission android:name="android.permission.ACCESS_WIFI_STATE" />
     <uses-permission android:name="android.permission.CHANGE_WIFI_STATE" />
+    <uses-permission android:name="android.permission.BLUETOOTH" />
 
     <application>
         <uses-library android:name="android.test.runner" />
diff --git a/tests/tests/uirendering/res/layout/simple_rect_layout.xml b/tests/tests/uirendering/res/layout/simple_rect_layout.xml
index 24c9b6b..e64c4e9 100644
--- a/tests/tests/uirendering/res/layout/simple_rect_layout.xml
+++ b/tests/tests/uirendering/res/layout/simple_rect_layout.xml
@@ -17,11 +17,10 @@
 <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
     android:orientation="vertical"
     android:layout_width="match_parent"
-    android:layout_height="match_parent"
-    android:background="#f00">
+    android:layout_height="match_parent">
 
-    <View android:layout_width="180px"
-        android:layout_height="120px"
-        android:background="#0f0" />
+    <View android:layout_width="100px"
+        android:layout_height="100px"
+        android:background="#00f" />
 
 </LinearLayout>
diff --git a/tests/tests/uirendering/src/android/uirendering/cts/testclasses/ExactCanvasTests.java b/tests/tests/uirendering/src/android/uirendering/cts/testclasses/ExactCanvasTests.java
index 3088142..afbad65 100644
--- a/tests/tests/uirendering/src/android/uirendering/cts/testclasses/ExactCanvasTests.java
+++ b/tests/tests/uirendering/src/android/uirendering/cts/testclasses/ExactCanvasTests.java
@@ -16,8 +16,6 @@
 
 package android.uirendering.cts.testclasses;
 
-import com.android.cts.uirendering.R;
-
 import android.graphics.Canvas;
 import android.graphics.Color;
 import android.graphics.Paint;
@@ -31,8 +29,7 @@
 import android.uirendering.cts.bitmapverifiers.RectVerifier;
 import android.uirendering.cts.testinfrastructure.ActivityTestBase;
 import android.uirendering.cts.testinfrastructure.CanvasClient;
-import android.uirendering.cts.testinfrastructure.ViewInitializer;
-import android.view.View;
+import com.android.cts.uirendering.R;
 
 public class ExactCanvasTests extends ActivityTestBase {
     private final BitmapComparer mExactComparer = new ExactComparer();
@@ -212,14 +209,4 @@
                 .addLayout(R.layout.blue_padded_square, null)
                 .runWithVerifier(verifier);
     }
-
-    @SmallTest
-    public void testClipping() {
-        createTest().addLayout(R.layout.simple_red_layout, new ViewInitializer() {
-            @Override
-            public void intializeView(View view) {
-                view.setClipBounds(new Rect(0, 0, 50, 50));
-            }
-        }).runWithComparer(mExactComparer);
-    }
 }
diff --git a/tests/tests/uirendering/src/android/uirendering/cts/testclasses/PathClippingTests.java b/tests/tests/uirendering/src/android/uirendering/cts/testclasses/PathClippingTests.java
new file mode 100644
index 0000000..8df8057
--- /dev/null
+++ b/tests/tests/uirendering/src/android/uirendering/cts/testclasses/PathClippingTests.java
@@ -0,0 +1,146 @@
+package android.uirendering.cts.testclasses;
+
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.graphics.Path;
+import android.graphics.Point;
+import android.graphics.Typeface;
+import android.test.suitebuilder.annotation.SmallTest;
+import android.uirendering.cts.bitmapcomparers.MSSIMComparer;
+import android.uirendering.cts.bitmapverifiers.SamplePointVerifier;
+import android.uirendering.cts.testinfrastructure.ActivityTestBase;
+import android.uirendering.cts.testinfrastructure.CanvasClient;
+import android.uirendering.cts.testinfrastructure.ViewInitializer;
+import android.view.View;
+import android.view.ViewGroup;
+import com.android.cts.uirendering.R;
+
+public class PathClippingTests extends ActivityTestBase {
+    // draw circle with whole in it, with stroked circle
+    static final CanvasClient sCircleDrawCanvasClient = new CanvasClient() {
+        @Override
+        public String getDebugString() {
+            return "StrokedCircleDraw";
+        }
+
+        @Override
+        public void draw(Canvas canvas, int width, int height) {
+            Paint paint = new Paint();
+            paint.setAntiAlias(false);
+            paint.setColor(Color.BLUE);
+            paint.setStyle(Paint.Style.STROKE);
+            paint.setStrokeWidth(20);
+            canvas.drawCircle(50, 50, 40, paint);
+        }
+    };
+
+    // draw circle with whole in it, by path operations + path clipping
+    static final CanvasClient sCircleClipCanvasClient = new CanvasClient() {
+        @Override
+        public String getDebugString() {
+            return "CircleClipDraw";
+        }
+
+        @Override
+        public void draw(Canvas canvas, int width, int height) {
+            canvas.save();
+
+            Path path = new Path();
+            path.addCircle(50, 50, 50, Path.Direction.CW);
+            path.addCircle(50, 50, 30, Path.Direction.CCW);
+
+            canvas.clipPath(path);
+            canvas.drawColor(Color.BLUE);
+
+            canvas.restore();
+        }
+    };
+
+    @SmallTest
+    public void testCircleWithCircle() {
+        createTest()
+                .addCanvasClient(sCircleDrawCanvasClient, false)
+                .addCanvasClient(sCircleClipCanvasClient)
+                .runWithComparer(new MSSIMComparer(0.90));
+    }
+
+    @SmallTest
+    public void testCircleWithPoints() {
+        createTest()
+                .addCanvasClient(sCircleClipCanvasClient)
+                .runWithVerifier(new SamplePointVerifier(
+                        new Point[] {
+                                // inside of circle
+                                new Point(50, 50),
+                                // on circle
+                                new Point(50 + 32, 50 + 32),
+                                // outside of circle
+                                new Point(50 + 38, 50 + 38),
+                                new Point(100, 100)
+                        },
+                        new int[] {
+                                Color.WHITE,
+                                Color.BLUE,
+                                Color.WHITE,
+                                Color.WHITE,
+                        }));
+    }
+
+    @SmallTest
+    public void testViewRotate() {
+        createTest()
+                .addLayout(R.layout.blue_padded_layout, new ViewInitializer() {
+                    @Override
+                    public void intializeView(View view) {
+                        ViewGroup rootView = (ViewGroup) view;
+                        rootView.setClipChildren(true);
+                        View childView = rootView.getChildAt(0);
+                        childView.setPivotX(50);
+                        childView.setPivotY(50);
+                        childView.setRotation(45f);
+
+                    }
+                })
+                .runWithVerifier(new SamplePointVerifier(
+                        new Point[] {
+                                // inside of rotated rect
+                                new Point(50, 50),
+                                new Point(50 + 32, 50 + 32),
+                                // outside of rotated rect
+                                new Point(50 + 38, 50 + 38),
+                                new Point(100, 100)
+                        },
+                        new int[] {
+                                Color.BLUE,
+                                Color.BLUE,
+                                Color.WHITE,
+                                Color.WHITE,
+                        }));
+    }
+
+    @SmallTest
+    public void testTextClip() {
+        createTest()
+                .addCanvasClient(new CanvasClient() {
+                    @Override
+                    public void draw(Canvas canvas, int width, int height) {
+                        canvas.save();
+
+                        Path path = new Path();
+                        path.addCircle(0, 50, 50, Path.Direction.CW);
+                        path.addCircle(100, 50, 50, Path.Direction.CW);
+                        canvas.clipPath(path);
+
+                        Paint paint = new Paint();
+                        paint.setAntiAlias(true);
+                        paint.setTextSize(100);
+                        paint.setTypeface(Typeface.defaultFromStyle(Typeface.BOLD));
+                        canvas.drawText("STRING", 0, 100, paint);
+
+                        canvas.restore();
+                    }
+                })
+                .runWithComparer(new MSSIMComparer(0.90));
+    }
+}
diff --git a/tests/tests/uirendering/src/android/uirendering/cts/testclasses/view/UnclippedBlueView.java b/tests/tests/uirendering/src/android/uirendering/cts/testclasses/view/UnclippedBlueView.java
index e2037f7..7a16e3c 100644
--- a/tests/tests/uirendering/src/android/uirendering/cts/testclasses/view/UnclippedBlueView.java
+++ b/tests/tests/uirendering/src/android/uirendering/cts/testclasses/view/UnclippedBlueView.java
@@ -18,12 +18,12 @@
 
     public UnclippedBlueView(Context context, AttributeSet attrs, int defStyleAttr) {
         this(context, attrs, defStyleAttr, 0);
-        setWillNotDraw(false);
     }
 
     public UnclippedBlueView(Context context, AttributeSet attrs,
             int defStyleAttr, int defStyleRes) {
         super(context, attrs, defStyleAttr, defStyleRes);
+        setWillNotDraw(false);
     }
 
     @Override
diff --git a/tests/tests/view/src/android/view/cts/WindowTest.java b/tests/tests/view/src/android/view/cts/WindowTest.java
index ead4d5b..3c5386d 100644
--- a/tests/tests/view/src/android/view/cts/WindowTest.java
+++ b/tests/tests/view/src/android/view/cts/WindowTest.java
@@ -370,7 +370,9 @@
     public void testSetBackgroundDrawable() throws Throwable {
         // DecorView holds the background
         View decor = mWindow.getDecorView();
-        assertEquals(PixelFormat.OPAQUE, decor.getBackground().getOpacity());
+        if (!mWindow.hasFeature(Window.FEATURE_SWIPE_TO_DISMISS)) {
+            assertEquals(PixelFormat.OPAQUE, decor.getBackground().getOpacity());
+        }
         runTestOnUiThread(new Runnable() {
             public void run() {
                 // setBackgroundDrawableResource(int resId) has the same
diff --git a/tests/tests/webkit/src/android/webkit/cts/WebViewSslTest.java b/tests/tests/webkit/src/android/webkit/cts/WebViewSslTest.java
index 378bf6e..dcdeead 100644
--- a/tests/tests/webkit/src/android/webkit/cts/WebViewSslTest.java
+++ b/tests/tests/webkit/src/android/webkit/cts/WebViewSslTest.java
@@ -703,6 +703,9 @@
     }
 
     public void testSecureServerRequestingClientCertDoesNotCancelRequest() throws Throwable {
+        if (!NullWebViewUtils.isWebViewAvailable()) {
+            return;
+        }
         mWebServer = new CtsTestServer(getActivity(), CtsTestServer.SslMode.WANTS_CLIENT_AUTH);
         final String url = mWebServer.getAssetUrl(TestHtmlConstants.HELLO_WORLD_URL);
         final SslErrorWebViewClient webViewClient = new SslErrorWebViewClient(mOnUiThread);
@@ -716,6 +719,9 @@
     }
 
     public void testSecureServerRequiringClientCertDoesCancelRequest() throws Throwable {
+        if (!NullWebViewUtils.isWebViewAvailable()) {
+            return;
+        }
         mWebServer = new CtsTestServer(getActivity(), CtsTestServer.SslMode.NEEDS_CLIENT_AUTH);
         final String url = mWebServer.getAssetUrl(TestHtmlConstants.HELLO_WORLD_URL);
         final SslErrorWebViewClient webViewClient = new SslErrorWebViewClient(mOnUiThread);
@@ -732,6 +738,9 @@
     }
 
     public void testProceedClientCertRequest() throws Throwable {
+        if (!NullWebViewUtils.isWebViewAvailable()) {
+            return;
+        }
         mWebServer = new CtsTestServer(getActivity(), CtsTestServer.SslMode.NEEDS_CLIENT_AUTH);
         String url = mWebServer.getAssetUrl(TestHtmlConstants.HELLO_WORLD_URL);
         final ClientCertWebViewClient webViewClient = new ClientCertWebViewClient(mOnUiThread);
@@ -756,6 +765,9 @@
     }
 
     public void testIgnoreClientCertRequest() throws Throwable {
+        if (!NullWebViewUtils.isWebViewAvailable()) {
+            return;
+        }
         mWebServer = new CtsTestServer(getActivity(), CtsTestServer.SslMode.NEEDS_CLIENT_AUTH);
         String url = mWebServer.getAssetUrl(TestHtmlConstants.HELLO_WORLD_URL);
         final ClientCertWebViewClient webViewClient = new ClientCertWebViewClient(mOnUiThread);
@@ -784,6 +796,9 @@
     }
 
     public void testCancelClientCertRequest() throws Throwable {
+        if (!NullWebViewUtils.isWebViewAvailable()) {
+            return;
+        }
         mWebServer = new CtsTestServer(getActivity(), CtsTestServer.SslMode.NEEDS_CLIENT_AUTH);
         final String url = mWebServer.getAssetUrl(TestHtmlConstants.HELLO_WORLD_URL);
         final ClientCertWebViewClient webViewClient = new ClientCertWebViewClient(mOnUiThread);
@@ -830,6 +845,9 @@
     }
 
     public void testClientCertIssuersReceivedCorrectly() throws Throwable {
+        if (!NullWebViewUtils.isWebViewAvailable()) {
+            return;
+        }
         mWebServer = new CtsTestServer(getActivity(), CtsTestServer.SslMode.NEEDS_CLIENT_AUTH,
                 new TrustManager());
         final String url = mWebServer.getAssetUrl(TestHtmlConstants.HELLO_WORLD_URL);
diff --git a/tests/tests/webkit/src/android/webkit/cts/WebViewTest.java b/tests/tests/webkit/src/android/webkit/cts/WebViewTest.java
index ef64f4d..1e22acc 100755
--- a/tests/tests/webkit/src/android/webkit/cts/WebViewTest.java
+++ b/tests/tests/webkit/src/android/webkit/cts/WebViewTest.java
@@ -474,6 +474,9 @@
 
     @UiThreadTest
     public void testPostUrlWithNonNetworkUrl() throws Exception {
+        if (!NullWebViewUtils.isWebViewAvailable()) {
+            return;
+        }
         final String nonNetworkUrl = "file:///android_asset/" + TestHtmlConstants.HELLO_WORLD_URL;
 
         mOnUiThread.postUrlAndWaitForCompletion(nonNetworkUrl, new byte[1]);
@@ -484,6 +487,9 @@
 
     @UiThreadTest
     public void testPostUrlWithNetworkUrl() throws Exception {
+        if (!NullWebViewUtils.isWebViewAvailable()) {
+            return;
+        }
         startWebServer(false);
         final String networkUrl = mWebServer.getAssetUrl(TestHtmlConstants.HELLO_WORLD_URL);
         final String postDataString = "username=my_username&password=my_password";
@@ -565,6 +571,10 @@
     }
 
     public void testCanInjectHeaders() throws Exception {
+        if (!NullWebViewUtils.isWebViewAvailable()) {
+            return;
+        }
+
         final String X_FOO = "X-foo";
         final String X_FOO_VALUE = "test";
 
@@ -899,6 +909,9 @@
     }
 
     public void testAddJavascriptInterfaceExceptions() throws Exception {
+        if (!NullWebViewUtils.isWebViewAvailable()) {
+            return;
+        }
         WebSettings settings = mOnUiThread.getSettings();
         settings.setJavaScriptEnabled(true);
         settings.setJavaScriptCanOpenWindowsAutomatically(true);
diff --git a/tests/tests/widget/res/layout/textview_layout.xml b/tests/tests/widget/res/layout/textview_layout.xml
index 419bbf9..bf7f757 100644
--- a/tests/tests/widget/res/layout/textview_layout.xml
+++ b/tests/tests/widget/res/layout/textview_layout.xml
@@ -27,6 +27,7 @@
                 android:layout_height="match_parent">
 
             <TextView android:id="@+id/textview_textAttr"
+                    android:fontFamily="@null"
                     android:text="@string/text_view_hello"
                     android:textColor="@drawable/black"
                     android:textColorHighlight="@drawable/yellow"
diff --git a/tools/device-setup/TestDeviceSetup/src/android/tests/getinfo/RootProcessScanner.java b/tools/device-setup/TestDeviceSetup/src/android/tests/getinfo/RootProcessScanner.java
index d8018a1..01ca21b 100644
--- a/tools/device-setup/TestDeviceSetup/src/android/tests/getinfo/RootProcessScanner.java
+++ b/tools/device-setup/TestDeviceSetup/src/android/tests/getinfo/RootProcessScanner.java
@@ -29,12 +29,16 @@
     /** Processes that are allowed to run as root. */
     private static final Pattern ROOT_PROCESS_WHITELIST_PATTERN = getRootProcessWhitelistPattern(
             "debuggerd",
+            "debuggerd64",
+            "healthd",
             "init",
             "installd",
+            "lmkd",
             "netd",
             "servicemanager",
             "ueventd",
             "vold",
+            "watchdogd",
             "zygote"
     );
 
diff --git a/tools/tradefed-host/src/com/android/cts/tradefed/build/CtsBuildProvider.java b/tools/tradefed-host/src/com/android/cts/tradefed/build/CtsBuildProvider.java
index e0cfee1..2ee649d 100644
--- a/tools/tradefed-host/src/com/android/cts/tradefed/build/CtsBuildProvider.java
+++ b/tools/tradefed-host/src/com/android/cts/tradefed/build/CtsBuildProvider.java
@@ -31,7 +31,7 @@
     @Option(name="cts-install-path", description="the path to the cts installation to use")
     private String mCtsRootDirPath = System.getProperty("CTS_ROOT");
 
-    public static final String CTS_BUILD_VERSION = "5.0_r1";
+    public static final String CTS_BUILD_VERSION = "5.0_r1.91";
 
     /**
      * {@inheritDoc}