Merge "Add CTS test for silent package install" into mnc-dev
diff --git a/CtsTestCaseList.mk b/CtsTestCaseList.mk
index 61704a3..c66341d 100644
--- a/CtsTestCaseList.mk
+++ b/CtsTestCaseList.mk
@@ -132,6 +132,7 @@
CtsJobSchedulerDeviceTestCases \
CtsJniTestCases \
CtsKeystoreTestCases \
+ CtsLibcoreLegacy22TestCases \
CtsLocationTestCases \
CtsLocation2TestCases \
CtsMediaStressTestCases \
@@ -166,7 +167,6 @@
CtsUtilTestCases \
CtsViewTestCases \
CtsWebkitTestCases \
- CtsWebGLTestCases \
CtsWidgetTestCases
# All APKs that need to be scanned by the coverage utilities.
diff --git a/apps/CameraITS/pymodules/its/caps.py b/apps/CameraITS/pymodules/its/caps.py
index b97091b..e57ff88 100644
--- a/apps/CameraITS/pymodules/its/caps.py
+++ b/apps/CameraITS/pymodules/its/caps.py
@@ -133,6 +133,17 @@
"""
return len(its.objects.get_available_output_sizes("raw10", props)) > 0
+def raw12(props):
+ """Returns whether a device supports RAW12 output.
+
+ Args:
+ props: Camera properties object.
+
+ Returns:
+ Boolean.
+ """
+ return len(its.objects.get_available_output_sizes("raw12", props)) > 0
+
def sensor_fusion(props):
"""Returns whether the camera and motion sensor timestamps for the device
are in the same time domain and can be compared directly.
diff --git a/apps/CameraITS/pymodules/its/device.py b/apps/CameraITS/pymodules/its/device.py
index 035e70b..e396483 100644
--- a/apps/CameraITS/pymodules/its/device.py
+++ b/apps/CameraITS/pymodules/its/device.py
@@ -368,7 +368,7 @@
The out_surfaces field can specify the width(s), height(s), and
format(s) of the captured image. The formats may be "yuv", "jpeg",
- "dng", "raw", or "raw10". The default is a YUV420 frame ("yuv")
+ "dng", "raw", "raw10", or "raw12". The default is a YUV420 frame ("yuv")
corresponding to a full sensor frame.
Note that one or more surfaces can be specified, allowing a capture to
diff --git a/apps/CameraITS/pymodules/its/image.py b/apps/CameraITS/pymodules/its/image.py
index b3bdb65..03f8ff9 100644
--- a/apps/CameraITS/pymodules/its/image.py
+++ b/apps/CameraITS/pymodules/its/image.py
@@ -64,6 +64,9 @@
if cap["format"] == "raw10":
assert(props is not None)
cap = unpack_raw10_capture(cap, props)
+ if cap["format"] == "raw12":
+ assert(props is not None)
+ cap = unpack_raw12_capture(cap, props)
if cap["format"] == "yuv":
y = cap["data"][0:w*h]
u = cap["data"][w*h:w*h*5/4]
@@ -114,12 +117,12 @@
raise its.error.Error('Invalid raw-10 buffer width')
w = img.shape[1]*4/5
h = img.shape[0]
- # Cut out the 4x8b MSBs and shift to bits [10:2] in 16b words.
+ # Cut out the 4x8b MSBs and shift to bits [9:2] in 16b words.
msbs = numpy.delete(img, numpy.s_[4::5], 1)
msbs = msbs.astype(numpy.uint16)
msbs = numpy.left_shift(msbs, 2)
msbs = msbs.reshape(h,w)
- # Cut out the 4x2b LSBs and put each in bits [2:0] of their own 8b words.
+ # Cut out the 4x2b LSBs and put each in bits [1:0] of their own 8b words.
lsbs = img[::, 4::5].reshape(h,w/4)
lsbs = numpy.right_shift(
numpy.packbits(numpy.unpackbits(lsbs).reshape(h,w/4,4,2),3), 6)
@@ -128,6 +131,56 @@
img16 = numpy.bitwise_or(msbs, lsbs).reshape(h,w)
return img16
+def unpack_raw12_capture(cap, props):
+ """Unpack a raw-12 capture to a raw-16 capture.
+
+ Args:
+ cap: A raw-12 capture object.
+ props: Camera properties object.
+
+ Returns:
+ New capture object with raw-16 data.
+ """
+ # Data is packed as 4x10b pixels in 5 bytes, with the first 4 bytes holding
+ # the MSBs of the pixels, and the 5th byte holding 4x2b LSBs.
+ w,h = cap["width"], cap["height"]
+ if w % 2 != 0:
+ raise its.error.Error('Invalid raw-12 buffer width')
+ cap = copy.deepcopy(cap)
+ cap["data"] = unpack_raw12_image(cap["data"].reshape(h,w*3/2))
+ cap["format"] = "raw"
+ return cap
+
+def unpack_raw12_image(img):
+ """Unpack a raw-12 image to a raw-16 image.
+
+ Output image will have the 12 LSBs filled in each 16b word, and the 4 MSBs
+ will be set to zero.
+
+ Args:
+ img: A raw-12 image, as a uint8 numpy array.
+
+ Returns:
+ Image as a uint16 numpy array, with all row padding stripped.
+ """
+ if img.shape[1] % 3 != 0:
+ raise its.error.Error('Invalid raw-12 buffer width')
+ w = img.shape[1]*2/3
+ h = img.shape[0]
+ # Cut out the 2x8b MSBs and shift to bits [11:4] in 16b words.
+ msbs = numpy.delete(img, numpy.s_[2::3], 1)
+ msbs = msbs.astype(numpy.uint16)
+ msbs = numpy.left_shift(msbs, 4)
+ msbs = msbs.reshape(h,w)
+ # Cut out the 2x4b LSBs and put each in bits [3:0] of their own 8b words.
+ lsbs = img[::, 2::3].reshape(h,w/2)
+ lsbs = numpy.right_shift(
+ numpy.packbits(numpy.unpackbits(lsbs).reshape(h,w/2,2,4),3), 4)
+ lsbs = lsbs.reshape(h,w)
+ # Fuse the MSBs and LSBs back together
+ img16 = numpy.bitwise_or(msbs, lsbs).reshape(h,w)
+ return img16
+
def convert_capture_to_planes(cap, props=None):
"""Convert a captured image object to separate image planes.
diff --git a/apps/CameraITS/pymodules/its/objects.py b/apps/CameraITS/pymodules/its/objects.py
index 22540b8..bc77a62 100644
--- a/apps/CameraITS/pymodules/its/objects.py
+++ b/apps/CameraITS/pymodules/its/objects.py
@@ -70,7 +70,8 @@
else:
return float(r["numerator"]) / float(r["denominator"])
-def manual_capture_request(sensitivity, exp_time, linear_tonemap=False):
+def manual_capture_request(
+ sensitivity, exp_time, linear_tonemap=False, props=None):
"""Return a capture request with everything set to manual.
Uses identity/unit color correction, and the default tonemap curve.
@@ -82,6 +83,9 @@
with.
linear_tonemap: [Optional] whether a linear tonemap should be used
in this request.
+ props: [Optional] the object returned from
+ its.device.get_camera_properties(). Must present when
+ linear_tonemap is True.
Returns:
The default manual capture request, ready to be passed to the
@@ -105,10 +109,20 @@
"android.shading.mode": 1
}
if linear_tonemap:
- req["android.tonemap.mode"] = 0
- req["android.tonemap.curveRed"] = [0.0,0.0, 1.0,1.0]
- req["android.tonemap.curveGreen"] = [0.0,0.0, 1.0,1.0]
- req["android.tonemap.curveBlue"] = [0.0,0.0, 1.0,1.0]
+ assert(props is not None)
+ #CONTRAST_CURVE mode
+ if 0 in props["android.tonemap.availableToneMapModes"]:
+ req["android.tonemap.mode"] = 0
+ req["android.tonemap.curveRed"] = [0.0,0.0, 1.0,1.0]
+ req["android.tonemap.curveGreen"] = [0.0,0.0, 1.0,1.0]
+ req["android.tonemap.curveBlue"] = [0.0,0.0, 1.0,1.0]
+ #GAMMA_VALUE mode
+ elif 3 in props["android.tonemap.availableToneMapModes"]:
+ req["android.tonemap.mode"] = 3
+ req["android.tonemap.gamma"] = 1.0
+ else:
+ print "Linear tonemap is not supported"
+ assert(False)
return req
def auto_capture_request():
@@ -142,13 +156,15 @@
"""Return a sorted list of available output sizes for a given format.
Args:
- fmt: the output format, as a string in ["jpg", "yuv", "raw"].
+ fmt: the output format, as a string in
+ ["jpg", "yuv", "raw", "raw10", "raw12"].
props: the object returned from its.device.get_camera_properties().
Returns:
A sorted list of (w,h) tuples (sorted large-to-small).
"""
- fmt_codes = {"raw":0x20, "raw10":0x25, "yuv":0x23, "jpg":0x100, "jpeg":0x100}
+ fmt_codes = {"raw":0x20, "raw10":0x25, "raw12":0x26, "yuv":0x23,
+ "jpg":0x100, "jpeg":0x100}
configs = props['android.scaler.streamConfigurationMap']\
['availableStreamConfigurations']
fmt_configs = [cfg for cfg in configs if cfg['format'] == fmt_codes[fmt]]
diff --git a/apps/CameraITS/tests/scene1/test_crop_region_raw.py b/apps/CameraITS/tests/scene1/test_crop_region_raw.py
index 189e987..7973755 100644
--- a/apps/CameraITS/tests/scene1/test_crop_region_raw.py
+++ b/apps/CameraITS/tests/scene1/test_crop_region_raw.py
@@ -64,7 +64,7 @@
# Use a manual request with a linear tonemap so that the YUV and RAW
# should look the same (once converted by the its.image module).
e, s = its.target.get_target_exposure_combos(cam)["minSensitivity"]
- req = its.objects.manual_capture_request(s,e, True)
+ req = its.objects.manual_capture_request(s,e, True, props)
cap1_raw, cap1_yuv = cam.do_capture(req, cam.CAP_RAW_YUV)
# Capture with a crop region.
diff --git a/apps/CameraITS/tests/scene1/test_jpeg.py b/apps/CameraITS/tests/scene1/test_jpeg.py
index 25c2038..7bc038d 100644
--- a/apps/CameraITS/tests/scene1/test_jpeg.py
+++ b/apps/CameraITS/tests/scene1/test_jpeg.py
@@ -33,7 +33,7 @@
its.caps.per_frame_control(props))
e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
- req = its.objects.manual_capture_request(s, e, True)
+ req = its.objects.manual_capture_request(s, e, True, props)
# YUV
size = its.objects.get_available_output_sizes("yuv", props)[0]
diff --git a/apps/CameraITS/tests/scene1/test_latching.py b/apps/CameraITS/tests/scene1/test_latching.py
index 3bc4356..176f01b 100644
--- a/apps/CameraITS/tests/scene1/test_latching.py
+++ b/apps/CameraITS/tests/scene1/test_latching.py
@@ -45,20 +45,20 @@
b_means = []
reqs = [
- its.objects.manual_capture_request(s, e, True),
- its.objects.manual_capture_request(s, e, True),
- its.objects.manual_capture_request(s*2,e, True),
- its.objects.manual_capture_request(s*2,e, True),
- its.objects.manual_capture_request(s, e, True),
- its.objects.manual_capture_request(s, e, True),
- its.objects.manual_capture_request(s, e*2, True),
- its.objects.manual_capture_request(s, e, True),
- its.objects.manual_capture_request(s*2,e, True),
- its.objects.manual_capture_request(s, e, True),
- its.objects.manual_capture_request(s, e*2, True),
- its.objects.manual_capture_request(s, e, True),
- its.objects.manual_capture_request(s, e*2, True),
- its.objects.manual_capture_request(s, e*2, True),
+ its.objects.manual_capture_request(s, e, True, props),
+ its.objects.manual_capture_request(s, e, True, props),
+ its.objects.manual_capture_request(s*2,e, True, props),
+ its.objects.manual_capture_request(s*2,e, True, props),
+ its.objects.manual_capture_request(s, e, True, props),
+ its.objects.manual_capture_request(s, e, True, props),
+ its.objects.manual_capture_request(s, e*2, True, props),
+ its.objects.manual_capture_request(s, e, True, props),
+ its.objects.manual_capture_request(s*2,e, True, props),
+ its.objects.manual_capture_request(s, e, True, props),
+ its.objects.manual_capture_request(s, e*2, True, props),
+ its.objects.manual_capture_request(s, e, True, props),
+ its.objects.manual_capture_request(s, e*2, True, props),
+ its.objects.manual_capture_request(s, e*2, True, props),
]
caps = cam.do_capture(reqs, fmt)
diff --git a/apps/CameraITS/tests/scene1/test_locked_burst.py b/apps/CameraITS/tests/scene1/test_locked_burst.py
index 958fc72..6552c73 100644
--- a/apps/CameraITS/tests/scene1/test_locked_burst.py
+++ b/apps/CameraITS/tests/scene1/test_locked_burst.py
@@ -32,7 +32,8 @@
NAME = os.path.basename(__file__).split(".")[0]
BURST_LEN = 8
- SPREAD_THRESH = 0.005
+ SPREAD_THRESH_MANUAL_SENSOR = 0.005
+ SPREAD_THRESH = 0.03
FPS_MAX_DIFF = 2.0
with its.device.ItsSession() as cam:
@@ -67,8 +68,10 @@
for means in [r_means, g_means, b_means]:
spread = max(means) - min(means)
print "Patch mean spread", spread, \
- " (min/max: ", min(means), "/", max(means), ")"
- assert(spread < SPREAD_THRESH)
+ " (min/max: ", min(means), "/", max(means), ")"
+ threshold = SPREAD_THRESH_MANUAL_SENSOR \
+ if its.caps.manual_sensor(props) else SPREAD_THRESH
+ assert(spread < threshold)
if __name__ == '__main__':
main()
diff --git a/apps/CameraITS/tests/scene1/test_param_color_correction.py b/apps/CameraITS/tests/scene1/test_param_color_correction.py
index b7fdc7b..09b3707 100644
--- a/apps/CameraITS/tests/scene1/test_param_color_correction.py
+++ b/apps/CameraITS/tests/scene1/test_param_color_correction.py
@@ -42,7 +42,7 @@
# Baseline request
e, s = its.target.get_target_exposure_combos(cam)["midSensitivity"]
- req = its.objects.manual_capture_request(s, e, True)
+ req = its.objects.manual_capture_request(s, e, True, props)
req["android.colorCorrection.mode"] = 0
# Transforms:
diff --git a/apps/CameraITS/tests/scene1/test_param_exposure_time.py b/apps/CameraITS/tests/scene1/test_param_exposure_time.py
index e6078d9..0c0aab1 100644
--- a/apps/CameraITS/tests/scene1/test_param_exposure_time.py
+++ b/apps/CameraITS/tests/scene1/test_param_exposure_time.py
@@ -39,7 +39,7 @@
e,s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
for i,e_mult in enumerate([0.8, 0.9, 1.0, 1.1, 1.2]):
- req = its.objects.manual_capture_request(s, e * e_mult, True)
+ req = its.objects.manual_capture_request(s, e * e_mult, True, props)
cap = cam.do_capture(req)
img = its.image.convert_capture_to_rgb_image(cap)
its.image.write_image(
diff --git a/apps/CameraITS/tests/scene1/test_param_flash_mode.py b/apps/CameraITS/tests/scene1/test_param_flash_mode.py
index aae56aa..38f864f 100644
--- a/apps/CameraITS/tests/scene1/test_param_flash_mode.py
+++ b/apps/CameraITS/tests/scene1/test_param_flash_mode.py
@@ -39,7 +39,7 @@
# linear tonemap.
e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
e /= 4
- req = its.objects.manual_capture_request(s, e, True)
+ req = its.objects.manual_capture_request(s, e, True, props)
for f in [0,1,2]:
req["android.flash.mode"] = f
diff --git a/apps/CameraITS/tests/scene1/test_param_shading_mode.py b/apps/CameraITS/tests/scene1/test_param_shading_mode.py
new file mode 100644
index 0000000..65b7e97
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_param_shading_mode.py
@@ -0,0 +1,109 @@
+# Copyright 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.caps
+import its.device
+import its.objects
+import matplotlib
+import numpy
+import os
+import os.path
+import pylab
+
+def main():
+ """Test that the android.shading.mode param is applied.
+
+ Switching shading modes and checks that the lens shading maps are
+ modified as expected.
+ """
+ NAME = os.path.basename(__file__).split(".")[0]
+
+ NUM_SHADING_MODE_SWITCH_LOOPS = 3
+ THRESHOLD_DIFF_RATIO = 0.15
+
+ with its.device.ItsSession() as cam:
+ props = cam.get_camera_properties()
+
+ its.caps.skip_unless(its.caps.per_frame_control(props))
+
+ assert(props.has_key("android.lens.info.shadingMapSize") and
+ props["android.lens.info.shadingMapSize"] != None)
+
+ num_map_gains = props["android.lens.info.shadingMapSize"]["width"] * \
+ props["android.lens.info.shadingMapSize"]["height"] * 4
+
+ # Test 1: Switching shading modes several times and verify:
+ # 1. Lens shading maps with mode OFF are all 1.0
+ # 2. Lens shading maps with mode FAST are similar after switching
+ # shading modes.
+ # 3. Lens shading maps with mode HIGH_QUALITY are similar after
+ # switching shading modes.
+ cam.do_3a();
+
+ # Get the reference lens shading maps for OFF, FAST, and HIGH_QUALITY
+ # in different sessions.
+ # reference_maps[mode]
+ reference_maps = [[] for mode in range(3)]
+ reference_maps[0] = [1.0] * num_map_gains
+ for mode in range(1, 3):
+ req = its.objects.auto_capture_request();
+ req["android.statistics.lensShadingMapMode"] = 1
+ req["android.shading.mode"] = mode
+ reference_maps[mode] = cam.do_capture(req)["metadata"] \
+ ["android.statistics.lensShadingMap"]
+
+ # Get the lens shading maps while switching modes in one session.
+ reqs = []
+ for i in range(NUM_SHADING_MODE_SWITCH_LOOPS):
+ for mode in range(3):
+ req = its.objects.auto_capture_request();
+ req["android.statistics.lensShadingMapMode"] = 1
+ req["android.shading.mode"] = mode
+ reqs.append(req);
+
+ caps = cam.do_capture(reqs)
+
+ # shading_maps[mode][loop]
+ shading_maps = [[[] for loop in range(NUM_SHADING_MODE_SWITCH_LOOPS)]
+ for mode in range(3)]
+
+ # Get the shading maps out of capture results
+ for i in range(len(caps)):
+ shading_maps[i % 3][i / 3] = \
+ caps[i]["metadata"]["android.statistics.lensShadingMap"]
+
+ # Draw the maps
+ for mode in range(3):
+ for i in range(NUM_SHADING_MODE_SWITCH_LOOPS):
+ pylab.clf()
+ pylab.plot(range(num_map_gains), shading_maps[mode][i], 'r')
+ pylab.plot(range(num_map_gains), reference_maps[mode], 'g')
+ pylab.xlim([0, num_map_gains])
+ pylab.ylim([0.9, 4.0])
+ matplotlib.pyplot.savefig("%s_ls_maps_mode_%d_loop_%d.png" %
+ (NAME, mode, i))
+
+ print "Verifying lens shading maps with mode OFF are all 1.0"
+ for i in range(NUM_SHADING_MODE_SWITCH_LOOPS):
+ assert(numpy.allclose(shading_maps[0][i], reference_maps[0]))
+
+ for mode in range(1, 3):
+ print "Verifying lens shading maps with mode", mode, "are similar"
+ for i in range(NUM_SHADING_MODE_SWITCH_LOOPS):
+ assert(numpy.allclose(shading_maps[mode][i],
+ reference_maps[mode],
+ THRESHOLD_DIFF_RATIO))
+
+if __name__ == '__main__':
+ main()
diff --git a/apps/CameraITS/tests/scene1/test_tonemap_sequence.py b/apps/CameraITS/tests/scene1/test_tonemap_sequence.py
index 18ca506..7c87ca2 100644
--- a/apps/CameraITS/tests/scene1/test_tonemap_sequence.py
+++ b/apps/CameraITS/tests/scene1/test_tonemap_sequence.py
@@ -40,7 +40,7 @@
means = []
# Capture 3 manual shots with a linear tonemap.
- req = its.objects.manual_capture_request(sens, exp_time, True)
+ req = its.objects.manual_capture_request(sens, exp_time, True, props)
for i in [0,1,2]:
cap = cam.do_capture(req)
img = its.image.convert_capture_to_rgb_image(cap)
diff --git a/apps/CameraITS/tests/scene1/test_yuv_jpeg_all.py b/apps/CameraITS/tests/scene1/test_yuv_jpeg_all.py
index 1b278ef..0c428fc 100644
--- a/apps/CameraITS/tests/scene1/test_yuv_jpeg_all.py
+++ b/apps/CameraITS/tests/scene1/test_yuv_jpeg_all.py
@@ -35,7 +35,7 @@
# Use a manual request with a linear tonemap so that the YUV and JPEG
# should look the same (once converted by the its.image module).
e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
- req = its.objects.manual_capture_request(s, e, True)
+ req = its.objects.manual_capture_request(s, e, True, props)
rgbs = []
diff --git a/apps/CameraITS/tests/scene1/test_yuv_plus_jpeg.py b/apps/CameraITS/tests/scene1/test_yuv_plus_jpeg.py
index 6daa243..9ce8d76 100644
--- a/apps/CameraITS/tests/scene1/test_yuv_plus_jpeg.py
+++ b/apps/CameraITS/tests/scene1/test_yuv_plus_jpeg.py
@@ -37,7 +37,7 @@
# Use a manual request with a linear tonemap so that the YUV and JPEG
# should look the same (once converted by the its.image module).
e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
- req = its.objects.manual_capture_request(s, e, True)
+ req = its.objects.manual_capture_request(s, e, True, props)
cap_yuv, cap_jpeg = cam.do_capture(req, [fmt_yuv, fmt_jpeg])
diff --git a/apps/CameraITS/tests/scene1/test_yuv_plus_raw.py b/apps/CameraITS/tests/scene1/test_yuv_plus_raw.py
index eb01c1a..f13801b 100644
--- a/apps/CameraITS/tests/scene1/test_yuv_plus_raw.py
+++ b/apps/CameraITS/tests/scene1/test_yuv_plus_raw.py
@@ -36,7 +36,7 @@
# Use a manual request with a linear tonemap so that the YUV and RAW
# should look the same (once converted by the its.image module).
e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
- req = its.objects.manual_capture_request(s, e, True)
+ req = its.objects.manual_capture_request(s, e, True, props)
cap_raw, cap_yuv = cam.do_capture(req, cam.CAP_RAW_YUV)
diff --git a/apps/CameraITS/tests/scene1/test_yuv_plus_raw10.py b/apps/CameraITS/tests/scene1/test_yuv_plus_raw10.py
index 910a8ea..e52946d 100644
--- a/apps/CameraITS/tests/scene1/test_yuv_plus_raw10.py
+++ b/apps/CameraITS/tests/scene1/test_yuv_plus_raw10.py
@@ -36,7 +36,7 @@
# Use a manual request with a linear tonemap so that the YUV and RAW
# should look the same (once converted by the its.image module).
e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
- req = its.objects.manual_capture_request(s, e, True)
+ req = its.objects.manual_capture_request(s, e, True, props)
cap_raw, cap_yuv = cam.do_capture(req,
[{"format":"raw10"}, {"format":"yuv"}])
diff --git a/apps/CameraITS/tests/scene1/test_yuv_plus_raw12.py b/apps/CameraITS/tests/scene1/test_yuv_plus_raw12.py
new file mode 100644
index 0000000..c5c3c73
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_yuv_plus_raw12.py
@@ -0,0 +1,63 @@
+# Copyright 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import os.path
+import math
+
+def main():
+ """Test capturing a single frame as both RAW12 and YUV outputs.
+ """
+ NAME = os.path.basename(__file__).split(".")[0]
+
+ THRESHOLD_MAX_RMS_DIFF = 0.035
+
+ with its.device.ItsSession() as cam:
+ props = cam.get_camera_properties()
+ its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+ its.caps.raw12(props) and
+ its.caps.per_frame_control(props))
+
+ # Use a manual request with a linear tonemap so that the YUV and RAW
+ # should look the same (once converted by the its.image module).
+ e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
+ req = its.objects.manual_capture_request(s, e, True, props)
+
+ cap_raw, cap_yuv = cam.do_capture(req,
+ [{"format":"raw12"}, {"format":"yuv"}])
+
+ img = its.image.convert_capture_to_rgb_image(cap_yuv)
+ its.image.write_image(img, "%s_yuv.jpg" % (NAME), True)
+ tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+ rgb0 = its.image.compute_image_means(tile)
+
+ # Raw shots are 1/2 x 1/2 smaller after conversion to RGB, so scale the
+ # tile appropriately.
+ img = its.image.convert_capture_to_rgb_image(cap_raw, props=props)
+ its.image.write_image(img, "%s_raw.jpg" % (NAME), True)
+ tile = its.image.get_image_patch(img, 0.475, 0.475, 0.05, 0.05)
+ rgb1 = its.image.compute_image_means(tile)
+
+ rms_diff = math.sqrt(
+ sum([pow(rgb0[i] - rgb1[i], 2.0) for i in range(3)]) / 3.0)
+ print "RMS difference:", rms_diff
+ assert(rms_diff < THRESHOLD_MAX_RMS_DIFF)
+
+if __name__ == '__main__':
+ main()
+
diff --git a/apps/CtsVerifier/Android.mk b/apps/CtsVerifier/Android.mk
index 227c6cb..2800fd1 100644
--- a/apps/CtsVerifier/Android.mk
+++ b/apps/CtsVerifier/Android.mk
@@ -26,11 +26,14 @@
LOCAL_SRC_FILES := $(call all-java-files-under, src) $(call all-Iaidl-files-under, src)
LOCAL_STATIC_JAVA_LIBRARIES := android-ex-camera2 \
+ android-support-v4 \
compatibility-common-util-devicesidelib_v2 \
cts-sensors-tests \
+ cts-verifier-facilities \
ctstestrunner \
apache-commons-math \
androidplot \
+ ctsverifier-opencv \
LOCAL_PACKAGE_NAME := CtsVerifier
@@ -44,6 +47,16 @@
include $(BUILD_PACKAGE)
+
+# opencv library
+include $(CLEAR_VARS)
+
+LOCAL_PREBUILT_STATIC_JAVA_LIBRARIES := \
+ ctsverifier-opencv:libs/opencv-android.jar
+
+include $(BUILD_MULTI_PREBUILT)
+
+
notification-bot := $(call intermediates-dir-for,APPS,NotificationBot)/package.apk
# Builds and launches CTS Verifier on a device.
@@ -102,3 +115,18 @@
endif
include $(call all-makefiles-under,$(LOCAL_PATH))
+
+# Build CTS verifier facilities as a libary.
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_MODULE := cts-verifier-facilities
+
+LOCAL_SRC_FILES := $(call all-java-files-under, src) $(call all-Iaidl-files-under, src)
+
+LOCAL_STATIC_JAVA_LIBRARIES := compatibility-common-util-devicesidelib_v2
+
+include $(BUILD_STATIC_JAVA_LIBRARY)
+
diff --git a/apps/CtsVerifier/AndroidManifest.xml b/apps/CtsVerifier/AndroidManifest.xml
index 96b5676..4226bf2 100644
--- a/apps/CtsVerifier/AndroidManifest.xml
+++ b/apps/CtsVerifier/AndroidManifest.xml
@@ -720,6 +720,33 @@
android:value="android.hardware.sensor.compass" />
</activity>
+ <activity
+ android:name=".sensors.RVCVXCheckTestActivity"
+ android:keepScreenOn="true"
+ android:label="@string/snsr_rvcvxchk_test"
+ android:screenOrientation="locked" >
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN" />
+ <category android:name="android.cts.intent.category.MANUAL_TEST"/>
+ </intent-filter>
+
+ <meta-data
+ android:name="test_category"
+ android:value="@string/test_category_sensors" />
+ <meta-data
+ android:name="test_required_features"
+ android:value="android.hardware.sensor.accelerometer:android.hardware.sensor.gyroscope:android.hardware.sensor.compass:android.hardware.camera.any" />
+ <meta-data android:name="test_excluded_features"
+ android:value="android.hardware.type.television" />
+ </activity>
+ <activity
+ android:name=".sensors.RVCVRecordActivity"
+ android:keepScreenOn="true"
+ android:label="@string/snsr_rvcvxchk_test_rec"
+ android:screenOrientation="locked" >
+ </activity>
+
+
<!-- TODO: enable when a full set of verifications can be implemented -->
<!--activity android:name=".sensors.RotationVectorTestActivity"
android:label="@string/snsr_rot_vec_test"
@@ -1279,10 +1306,23 @@
<action android:name="com.android.cts.verifier.managedprovisioning.BYOD_REMOVE" />
<action android:name="com.android.cts.verifier.managedprovisioning.BYOD_INSTALL_APK" />
<action android:name="com.android.cts.verifier.managedprovisioning.action.CHECK_INTENT_FILTERS" />
+ <action android:name="com.android.cts.verifier.managedprovisioning.BYOD_CAPTURE_AND_CHECK_IMAGE" />
+ <action android:name="com.android.cts.verifier.managedprovisioning.BYOD_CAPTURE_AND_CHECK_VIDEO" />
+ <action android:name="com.android.cts.verifier.managedprovisioning.BYOD_CAPTURE_AND_CHECK_AUDIO" />
<category android:name="android.intent.category.DEFAULT"></category>
</intent-filter>
</activity>
+ <provider
+ android:name="android.support.v4.content.FileProvider"
+ android:authorities="com.android.cts.verifier.managedprovisioning.fileprovider"
+ android:grantUriPermissions="true"
+ android:exported="false">
+ <meta-data
+ android:name="android.support.FILE_PROVIDER_PATHS"
+ android:resource="@xml/filepaths" />
+ </provider>
+
<activity android:name=".managedprovisioning.ByodIconSamplerActivity">
<intent-filter>
<action android:name="com.android.cts.verifier.managedprovisioning.BYOD_SAMPLE_ICON" />
@@ -1425,6 +1465,17 @@
<meta-data android:name="test_required_features" android:value="android.hardware.microphone" />
</activity>
+ <activity android:name=".audio.HifiUltrasoundSpeakerTestActivity"
+ android:label="@string/hifi_ultrasound_speaker_test"
+ android:screenOrientation="locked">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN" />
+ <category android:name="android.cts.intent.category.MANUAL_TEST" />
+ </intent-filter>
+ <meta-data android:name="test_category" android:value="@string/test_category_audio" />
+ <meta-data android:name="test_required_features" android:value="android.hardware.microphone" />
+ </activity>
+
<service android:name=".tv.MockTvInputService"
android:permission="android.permission.BIND_TV_INPUT">
<intent-filter>
diff --git a/apps/CtsVerifier/libs/opencv-android.jar b/apps/CtsVerifier/libs/opencv-android.jar
new file mode 100644
index 0000000..1c13eee
--- /dev/null
+++ b/apps/CtsVerifier/libs/opencv-android.jar
Binary files differ
diff --git a/apps/CtsVerifier/libs/opencv-android_LICENSE b/apps/CtsVerifier/libs/opencv-android_LICENSE
new file mode 100644
index 0000000..5e32d88
--- /dev/null
+++ b/apps/CtsVerifier/libs/opencv-android_LICENSE
@@ -0,0 +1,33 @@
+By downloading, copying, installing or using the software you agree to this license.
+If you do not agree to this license, do not download, install,
+copy or use the software.
+
+
+ License Agreement
+ For Open Source Computer Vision Library
+ (3-clause BSD License)
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+ * Neither the names of the copyright holders nor the names of the contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+This software is provided by the copyright holders and contributors "as is" and
+any express or implied warranties, including, but not limited to, the implied
+warranties of merchantability and fitness for a particular purpose are disclaimed.
+In no event shall copyright holders or contributors be liable for any direct,
+indirect, incidental, special, exemplary, or consequential damages
+(including, but not limited to, procurement of substitute goods or services;
+loss of use, data, or profits; or business interruption) however caused
+and on any theory of liability, whether in contract, strict liability,
+or tort (including negligence or otherwise) arising in any way out of
+the use of this software, even if advised of the possibility of such damage.
diff --git a/apps/CtsVerifier/res/drawable/prompt_x.png b/apps/CtsVerifier/res/drawable/prompt_x.png
new file mode 100644
index 0000000..64302dc
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable/prompt_x.png
Binary files differ
diff --git a/apps/CtsVerifier/res/drawable/prompt_y.png b/apps/CtsVerifier/res/drawable/prompt_y.png
new file mode 100644
index 0000000..01926b5
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable/prompt_y.png
Binary files differ
diff --git a/apps/CtsVerifier/res/drawable/prompt_z.png b/apps/CtsVerifier/res/drawable/prompt_z.png
new file mode 100644
index 0000000..f4d86d6
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable/prompt_z.png
Binary files differ
diff --git a/apps/CtsVerifier/res/layout/byod_present_media.xml b/apps/CtsVerifier/res/layout/byod_present_media.xml
new file mode 100644
index 0000000..f6c7eb3
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/byod_present_media.xml
@@ -0,0 +1,52 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:orientation="vertical">
+
+ <ImageView android:id="@+id/imageView"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:layout_weight="1"
+ android:scaleType="fitCenter"
+ android:minHeight="300dp"
+ android:minWidth="300dp"
+ android:visibility="gone"/>
+
+ <VideoView android:id="@+id/videoView"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:layout_weight="1"
+ android:minHeight="300dp"
+ android:minWidth="300dp"
+ android:layout_gravity="center"
+ android:visibility="gone"/>
+
+ <Button android:id="@+id/playButton"
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:layout_weight="0"
+ android:text="@string/provisioning_byod_play"
+ android:visibility="gone"/>
+
+ <Button android:id="@+id/dismissButton"
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:layout_weight="0"
+ android:text="@string/provisioning_byod_dismiss_result_dialog"/>
+
+</LinearLayout>
diff --git a/apps/CtsVerifier/res/layout/cam_preview_overlay.xml b/apps/CtsVerifier/res/layout/cam_preview_overlay.xml
new file mode 100644
index 0000000..41bbeb1
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/cam_preview_overlay.xml
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="utf-8"?>
+<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:orientation="vertical" android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:keepScreenOn="true">
+ <view
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ class="com.android.cts.verifier.sensors.RVCVCameraPreview"
+ android:id="@+id/cam_preview"
+ android:layout_centerVertical="true"
+ android:layout_centerHorizontal="true" />
+
+ <!--
+ <ImageView
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:layout_centerVertical="true"
+ android:id="@+id/cam_overlay"
+ android:src="@drawable/icon"
+ android:scaleType="fitStart"
+ />
+ -->
+ <view
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ class="com.android.cts.verifier.sensors.MotionIndicatorView"
+ android:id="@+id/cam_indicator"
+ android:layout_centerVertical="true"
+ android:layout_centerHorizontal="true" />
+
+ <ImageView
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_centerInParent="true"
+ android:id="@+id/cam_overlay"
+ android:scaleType="fitStart"
+ />
+</RelativeLayout>
\ No newline at end of file
diff --git a/apps/CtsVerifier/res/layout/test_list_footer.xml b/apps/CtsVerifier/res/layout/test_list_footer.xml
index fdb8e43..cb73ed1 100644
--- a/apps/CtsVerifier/res/layout/test_list_footer.xml
+++ b/apps/CtsVerifier/res/layout/test_list_footer.xml
@@ -17,22 +17,26 @@
-->
<GridLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="horizontal"
+ android:columnCount="@integer/test_list_footer_button_count"
android:layout_width="match_parent"
android:layout_height="wrap_content">
<Button
android:id="@+id/clear"
android:text="@string/clear"
+ android:layout_gravity="center"
android:layout_width="wrap_content"
android:layout_height="wrap_content" />
<Button
android:id="@+id/view"
android:text="@string/view"
+ android:layout_gravity="center"
android:layout_width="wrap_content"
android:layout_height="wrap_content" />
<Button
android:id="@+id/export"
android:text="@string/export"
+ android:layout_gravity="center"
android:layout_width="wrap_content"
android:layout_height="wrap_content" />
</GridLayout>
diff --git a/apps/CtsVerifier/res/raw/next_axis.mp3 b/apps/CtsVerifier/res/raw/next_axis.mp3
new file mode 100644
index 0000000..0a3174d
--- /dev/null
+++ b/apps/CtsVerifier/res/raw/next_axis.mp3
Binary files differ
diff --git a/apps/CtsVerifier/res/values-small/integers.xml b/apps/CtsVerifier/res/values-small/integers.xml
new file mode 100644
index 0000000..274db44
--- /dev/null
+++ b/apps/CtsVerifier/res/values-small/integers.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<resources>
+ <integer name="test_list_footer_button_count">1</integer>
+</resources>
diff --git a/apps/CtsVerifier/res/values/integers.xml b/apps/CtsVerifier/res/values/integers.xml
new file mode 100644
index 0000000..2ced54b
--- /dev/null
+++ b/apps/CtsVerifier/res/values/integers.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<resources>
+ <integer name="test_list_footer_button_count">3</integer>
+</resources>
diff --git a/apps/CtsVerifier/res/values/strings.xml b/apps/CtsVerifier/res/values/strings.xml
index e70fa6d..9f8f4c1 100644
--- a/apps/CtsVerifier/res/values/strings.xml
+++ b/apps/CtsVerifier/res/values/strings.xml
@@ -287,20 +287,46 @@
<string name="empty"></string>
<!-- Strings for HifiUltrasoundTestActivity -->
- <string name="hifi_ultrasound_test">Hifi Ultrasound Test</string>
- <string name="hifi_ultrasound_test_info">This is a test for near-ultrasound response.\n
- This test requires two devices, one as recording device, one as playback device.\n</string>
- <string name="hifi_ultrasound_test_play">GENERATE</string>
+ <string name="hifi_ultrasound_test">Hifi Ultrasound Test (microphone)</string>
+ <string name="hifi_ultrasound_test_info">
+ This is a test for microphone near-ultrasound (18500Hz - 20000Hz) response.\n
+ This test requires two devices.\n</string>
+ <string name="hifi_ultrasound_test_play">PLAY</string>
<string name="hifi_ultrasound_test_record">RECORD</string>
- <string name="hifi_ultrasound_test_playback">PLAY</string>
<string name="hifi_ultrasound_test_plot">PLOT</string>
<string name="hifi_ultrasound_test_dismiss">DISMISS</string>
<string name="hifi_ultrasound_test_instruction1">
- Set the volume of the playback device at 70% and hold it with one hand.\n
- Hold the recording device with the other hand\n
- Press the RECORD button on the recording device, the GENERATE button on the playback device simultaneously.\n</string>
+ Set the volume of the reference device at 70% and hold it with one hand.\n
+ Hold the testing device with the other hand\n
+ Press the RECORD button on the testing device, then the PLAY button on the reference device within one second.\n
+ After the test, report result on the testing device.\n</string>
<string name="hifi_ultrasound_test_pass">PASS</string>
<string name="hifi_ultrasound_test_fail">FAIL</string>
+ <string name="hifi_ultrasound_test_default_false_string">false</string>
+ <string name="hifi_ultrasound_test_mic_prop">persist.audio.mic.ultrasound</string>
+ <string name="hifi_ultrasound_test_spkr_prop">persist.audio.spkr.ultrasound</string>
+ <string name="hifi_ultrasound_test_mic_no_support">
+ Device does not support near-ultrasound recording.\n
+ Please click pass if this is the testing device.\n</string>
+ <string name="hifi_ultrasound_test_spkr_no_support">
+ Device does not support near-ultrasound playback.\n
+ If this is your reference device, please use a different reference device.\n</string>
+
+ <string name="hifi_ultrasound_speaker_test">Hifi Ultrasound Test (speaker)</string>
+ <string name="hifi_ultrasound_speaker_test_info">
+ This is a test for speaker near-ultrasound (18500Hz - 20000Hz) response.\n
+ This test requires two devices.\n</string>
+ <string name="hifi_ultrasound_speaker_test_instruction1">
+ Set the volume of the testing device at 70% and hold it with one hand.\n
+ Hold the reference device with the other hand\n
+ Press the RECORD button on the reference device, then the PLAY button on the testing device within one second.\n
+ After the test, report result on the testing device.\n</string>
+ <string name="hifi_ultrasound_speaker_test_mic_no_support">
+ Device does not support near-ultrasound recording.\n
+ If this is your reference device, please use a different reference device.\n</string>
+ <string name="hifi_ultrasound_speaker_test_spkr_no_support">
+ Device does not support near-ultrasound playback.\n
+ Please click pass if this is the testing device.\n</string>
<!-- Strings for Location tests -->
<string name="location_gps_test">GPS Test</string>
@@ -500,6 +526,8 @@
<string name="snsr_test_skipped">SKIPPED</string>
<string name="snsr_test_fail">FAIL</string>
<string name="snsr_execution_time">Test execution time %1$s sec</string>
+ <string name="snsr_rvcvxchk_test">Rotation Vector CV XCheck</string>
+ <string name="snsr_rvcvxchk_test_rec">Rotation Vector CV XCheck Recording</string>
<!-- Strings to interact with users in Sensor Tests -->
<string name="snsr_test_play_sound">A sound will be played once the verification is complete...</string>
@@ -1124,6 +1152,39 @@
2. Verify that the installation of the package is refused.
</string>
+ <string name="provisioning_byod_capture_image_support">Camera support cross profile image capture</string>
+ <string name="provisioning_byod_capture_image_support_info">
+ This test verifies that images can be captured from the managed profile using the primary profile camera.\n
+ 1. Capture a picture using the camera.\n
+ 2. Verify that the captured picture is shown.\n
+ 3. Click on the close button.
+ </string>
+ <string name="provisioning_byod_capture_video_support">Camera support cross profile video capture</string>
+ <string name="provisioning_byod_capture_video_support_info">
+ This test verifies that videos can be captured from the managed profile using the primary profile camera.\n
+ 1. Capture a video using the camera.\n
+ 2. Click on the play button.\n
+ 3. Verify that the captured video is played.\n
+ 4. Click on the close button.
+ </string>
+ <string name="provisioning_byod_capture_audio_support">Sound recorder support cross profile audio capture</string>
+ <string name="provisioning_byod_capture_audio_support_info">
+ This test verifies that audio can be captured from the managed profile using the primary profile sound recorder.\n
+ 1. Capture audio.\n
+ 2. Click on the play button.\n
+ 3. Verify that the captured audio is played.\n
+ 4. Click on the close button.\n
+ </string>
+ <string name="provisioning_byod_dismiss_result_dialog">Close</string>
+ <string name="provisioning_byod_play">Play</string>
+ <string name="provisioning_byod_verify_image_title">Verify captured image</string>
+ <string name="provisioning_byod_verify_video_title">Verify captured video</string>
+ <string name="provisioning_byod_verify_audio_title">Verify captured audio</string>
+ <string name="provisioning_byod_no_image_capture_resolver">No image capture app present. Skip test.</string>
+ <string name="provisioning_byod_no_video_capture_resolver">No video capture app present. Skip test.</string>
+ <string name="provisioning_byod_no_audio_capture_resolver">No audio capture app present. Skip test.</string>
+ <string name="provisioning_byod_capture_media_error">Error while capturing media from managed profile.</string>
+
<!-- Strings for DeskClock -->
<string name="deskclock_tests">Alarms and Timers Tests</string>
<string name="deskclock_tests_info">
@@ -1436,6 +1497,14 @@
Overlay view must be shown. Verify that there is a text view displaying \"Overlay View Dummy Text\"
when you tune to the \"Dummy\" channel.
</string>
+ <string name="tv_input_discover_test_go_to_epg">
+ Press the \"Launch EPG\" button, and locate the channel named \"Dummy\".
+ </string>
+ <string name="tv_input_discover_test_verify_epg">
+ Do you see the programs named \"Dummy Program\" and its description
+ "Dummy Program Description" in the EPG?
+ </string>
+ <string name="tv_input_discover_test_yes">Yes</string>
<string name="tv_parental_control_test">Live Channels app parental control test</string>
<string name="tv_parental_control_test_info">
@@ -1463,14 +1532,15 @@
</string>
<string name="tv_launch_tv_app">Launch Live Channels</string>
+ <string name="tv_launch_epg">Launch EPG</string>
<string name="tv_channel_not_found">
CtsVerifier channel is not set up. Please set up before proceeding.
</string>
- <string name="tv_multiple_tracks_test">Live Channels app multiple tracks / subtitle test</string>
+ <string name="tv_multiple_tracks_test">Live Channels app closed captions and multi-audio test</string>
<string name="tv_multiple_tracks_test_info">
- This test verifies that the default Live Channels app invokes proper mulitple tracks / subtitle
- APIs in the framework.
+ This test verifies that the default Live Channels app invokes proper mulitple tracks APIs in the
+ framework.
</string>
<string name="tv_multiple_tracks_test_select_subtitle">
Press the \"Launch Live Channels\" button. Verify that the closed caption is off by default.
diff --git a/apps/CtsVerifier/res/xml/filepaths.xml b/apps/CtsVerifier/res/xml/filepaths.xml
new file mode 100644
index 0000000..2d555a2
--- /dev/null
+++ b/apps/CtsVerifier/res/xml/filepaths.xml
@@ -0,0 +1,3 @@
+<paths xmlns:android="http://schemas.android.com/apk/res/android">
+ <files-path path="images/" name="images" />
+</paths>
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioRecordHelper.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioRecordHelper.java
index dafa117..80dd250 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioRecordHelper.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioRecordHelper.java
@@ -1,14 +1,13 @@
package com.android.cts.verifier.audio;
import android.media.AudioFormat;
+import android.media.AudioManager;
import android.media.AudioRecord;
+import android.media.AudioTrack;
import android.media.MediaRecorder;
import android.util.Log;
import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
import java.io.IOException;
/**
@@ -16,15 +15,10 @@
*/
public class AudioRecordHelper {
- // order of preference
- // MIC 48000Hz
- // MIC 44100Hz
- // VOICE_RECOGNITION 48000Hz
- // VOICE_RECOGNITION 44100Hz
- // if all these 4 settings failed, it logs an error
private static final int[] SOURCE = {
MediaRecorder.AudioSource.MIC, MediaRecorder.AudioSource.VOICE_RECOGNITION};
- private static final int[] SAMPLE_RATES_HZ = {48000, 44100};
+ private static final int[] SAMPLE_RATES_HZ = {
+ AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC), 48000, 44100};
private static final int CHANNEL = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private static final int ENCODING = AudioFormat.ENCODING_PCM_16BIT;
@@ -147,18 +141,4 @@
public byte[] getByte() {
return os.toByteArray();
}
-
- /**
- * Writes data to file
- */
- public void writeToFile() {
- try {
- FileOutputStream fos = new FileOutputStream(new File(Common.PCM_FILE));
- fos.write(os.toByteArray());
- } catch (FileNotFoundException e) {
- e.printStackTrace();
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/Common.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/Common.java
index f30b990..df7460a 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/Common.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/Common.java
@@ -1,5 +1,8 @@
package com.android.cts.verifier.audio;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+
import java.util.ArrayList;
import java.util.Random;
@@ -8,51 +11,39 @@
*/
public class Common {
+ public static final int RECORDING_SAMPLE_RATE_HZ
+ = AudioRecordHelper.getInstance().getSampleRate();
+ public static final int PLAYING_SAMPLE_RATE_HZ
+ = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC);
+
// Default constants.
+ public static final double PASSING_THRESHOLD_DB = -40.0;
public static final double PIP_DURATION_S = 0.004;
public static final double PAUSE_DURATION_S = 0.016;
public static final int PREFIX_NUM_CHIPS = 1023;
public static final int PREFIX_SAMPLES_PER_CHIP = 4;
- public static final int PREFIX_LENGTH =
- PREFIX_NUM_CHIPS * PREFIX_SAMPLES_PER_CHIP;
- public static final double PAUSE_BEFORE_PREFIX_DURATION_S = 1.0;
- public static final double PAUSE_AFTER_PREFIX_DURATION_S = 0.5;
+ public static final double PREFIX_LENGTH_S = 0.1;
+ public static final double PAUSE_BEFORE_PREFIX_DURATION_S = 0.5;
+ public static final double PAUSE_AFTER_PREFIX_DURATION_S = 0.4;
public static final double MIN_FREQUENCY_HZ = 500;
public static final double MAX_FREQUENCY_HZ = 21000;
public static final double FREQUENCY_STEP_HZ = 100;
- public static final int AUDIBLE_SIGNAL_MIN_STRENGTH_DB = 10;
- public static final int ULTRASOUND_SIGNAL_MIN_STRENGTH_RATIO = 2;
- // Variables defined for convenience.
+ public static final int SIGNAL_MIN_STRENGTH_DB_ABOVE_NOISE = 10;
public static final int REPETITIONS = 5;
- public static final double[] PREFIX = prefix();
+ public static final int NOISE_SAMPLES = 3;
+
public static final double[] FREQUENCIES_ORIGINAL = originalFrequencies();
public static final int PIP_NUM = FREQUENCIES_ORIGINAL.length;
public static final int[] ORDER = order();
public static final double[] FREQUENCIES = frequencies();
- // A PCM file is just raw monaural sample data. Samples are serialized as little endian signed
- // 16-bit integers. The sample rate is determined by AudioRecordHelper.getSampleRate().
- public static final String PCM_FILE = "/sdcard/sound_self_tester.pcm";
- private static int recordingSampleRateHz = -1;
- private static double[] window;
- private static double[] generateWindow;
- public static int getSampleRate() {
- return recordingSampleRateHz;
- }
+ public static final double[] WINDOW_FOR_RECORDER =
+ hann(Util.toLength(PIP_DURATION_S, RECORDING_SAMPLE_RATE_HZ));
+ public static final double[] WINDOW_FOR_PLAYER =
+ hann(Util.toLength(PIP_DURATION_S, PLAYING_SAMPLE_RATE_HZ));
- public static void setSampleRate(int sampleRate) {
- recordingSampleRateHz = sampleRate;
- window = hann(Util.toLength(PIP_DURATION_S, recordingSampleRateHz));
- generateWindow = hann(Util.toLength(PIP_DURATION_S, recordingSampleRateHz));
- }
-
- public static double[] window() {
- return window;
- }
-
- public static double[] generateWindow() {
- return generateWindow;
- }
+ public static final double[] PREFIX_FOR_RECORDER = prefix(RECORDING_SAMPLE_RATE_HZ);
+ public static final double[] PREFIX_FOR_PLAYER = prefix(PLAYING_SAMPLE_RATE_HZ);
/**
* Get a Hann window.
@@ -69,7 +60,7 @@
/**
* Get a maximum length sequence, used as prefix to indicate start of signal.
*/
- private static double[] prefix() {
+ private static double[] prefix(int rate) {
double[] codeSequence = new double[PREFIX_NUM_CHIPS];
for (int i = 0; i < PREFIX_NUM_CHIPS; i++) {
if (i < 10) {
@@ -79,7 +70,7 @@
* codeSequence[i - 9] * codeSequence[i - 10];
}
}
- double[] prefixArray = new double[PREFIX_LENGTH];
+ double[] prefixArray = new double[PREFIX_NUM_CHIPS * PREFIX_SAMPLES_PER_CHIP];
int offset = 0;
for (int i = 0; i < PREFIX_NUM_CHIPS; i++) {
double value = codeSequence[i];
@@ -88,7 +79,14 @@
}
offset += PREFIX_SAMPLES_PER_CHIP;
}
- return prefixArray;
+ int prefixLength = (int) Math.round(PREFIX_LENGTH_S * rate);
+ double[] samplePrefixArray = new double[prefixLength];
+ for (int i = 0; i < prefixLength; i++) {
+ double index = (double) i / prefixLength * (prefixArray.length - 1);
+ samplePrefixArray[i] = (1 - index + Math.floor(index)) * prefixArray[(int) Math.floor(index)]
+ + (1 + index - Math.ceil(index)) * prefixArray[(int) Math.ceil(index)];
+ }
+ return samplePrefixArray;
}
/**
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundSpeakerTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundSpeakerTestActivity.java
new file mode 100644
index 0000000..fa5ad81
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundSpeakerTestActivity.java
@@ -0,0 +1,372 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.audio;
+
+import com.android.cts.verifier.PassFailButtons;
+import com.android.cts.verifier.R;
+
+import android.content.Context;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+import android.os.AsyncTask;
+import android.os.Bundle;
+import android.text.method.ScrollingMovementMethod;
+import android.util.Log;
+import android.view.Gravity;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.widget.Button;
+import android.widget.LinearLayout;
+import android.widget.LinearLayout.LayoutParams;
+import android.widget.PopupWindow;
+import android.widget.TextView;
+import java.util.Arrays;
+
+import com.androidplot.xy.SimpleXYSeries;
+import com.androidplot.xy.XYSeries;
+import com.androidplot.xy.*;
+
+public class HifiUltrasoundSpeakerTestActivity extends PassFailButtons.Activity {
+
+ public enum Status {
+ START, RECORDING, DONE, PLAYER
+ }
+
+ private static final String TAG = "HifiUltrasoundTestActivity";
+
+ private Status status = Status.START;
+ private boolean onPlotScreen = false;
+ private TextView info;
+ private Button playerButton;
+ private Button recorderButton;
+ private AudioTrack audioTrack;
+ private LayoutInflater layoutInflater;
+ private View popupView;
+ private PopupWindow popupWindow;
+ private boolean micSupport = true;
+ private boolean spkrSupport = true;
+
+ @Override
+ public void onBackPressed () {
+ if (onPlotScreen) {
+ popupWindow.dismiss();
+ onPlotScreen = false;
+ recorderButton.setEnabled(true);
+ } else {
+ super.onBackPressed();
+ }
+ }
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ setContentView(R.layout.hifi_ultrasound);
+ setInfoResources(R.string.hifi_ultrasound_speaker_test,
+ R.string.hifi_ultrasound_speaker_test_info, -1);
+ setPassFailButtonClickListeners();
+ getPassButton().setEnabled(false);
+
+ info = (TextView) findViewById(R.id.info_text);
+ info.setMovementMethod(new ScrollingMovementMethod());
+ info.setText(R.string.hifi_ultrasound_speaker_test_instruction1);
+
+ AudioManager audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+ String micSupportString = audioManager.getProperty(
+ getResources().getString(R.string.hifi_ultrasound_test_mic_prop));
+ String spkrSupportString = audioManager.getProperty(
+ getResources().getString(R.string.hifi_ultrasound_test_spkr_prop));
+
+ if (micSupportString == null) {
+ micSupportString = "null";
+ }
+ if (spkrSupportString == null) {
+ spkrSupportString = "null";
+ }
+ if (micSupportString.equalsIgnoreCase(getResources().getString(
+ R.string.hifi_ultrasound_test_default_false_string))) {
+ micSupport = false;
+ getPassButton().setEnabled(true);
+ info.append(getResources().getString(R.string.hifi_ultrasound_speaker_test_mic_no_support));
+ }
+ if (spkrSupportString.equalsIgnoreCase(getResources().getString(
+ R.string.hifi_ultrasound_test_default_false_string))) {
+ spkrSupport = false;
+ info.append(getResources().getString(R.string.hifi_ultrasound_speaker_test_spkr_no_support));
+ }
+
+ layoutInflater = (LayoutInflater) getBaseContext().getSystemService(
+ LAYOUT_INFLATER_SERVICE);
+ popupView = layoutInflater.inflate(R.layout.hifi_ultrasound_popup, null);
+ popupWindow = new PopupWindow(
+ popupView, LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
+
+ final AudioRecordHelper audioRecorder = AudioRecordHelper.getInstance();
+ final int recordRate = audioRecorder.getSampleRate();
+
+ recorderButton = (Button) findViewById(R.id.recorder_button);
+ recorderButton.setEnabled(micSupport);
+ recorderButton.setOnClickListener(new View.OnClickListener() {
+ private WavAnalyzerTask wavAnalyzerTask = null;
+ private void stopRecording() {
+ audioRecorder.stop();
+ wavAnalyzerTask = new WavAnalyzerTask(audioRecorder.getByte());
+ wavAnalyzerTask.execute();
+ status = Status.DONE;
+ }
+ @Override
+ public void onClick(View v) {
+ switch (status) {
+ case START:
+ info.append("Recording at " + recordRate + "Hz using ");
+ final int source = audioRecorder.getAudioSource();
+ switch (source) {
+ case 1:
+ info.append("MIC");
+ break;
+ case 6:
+ info.append("VOICE_RECOGNITION");
+ break;
+ default:
+ info.append("UNEXPECTED " + source);
+ break;
+ }
+ info.append("\n");
+ status = Status.RECORDING;
+ playerButton.setEnabled(false);
+ recorderButton.setEnabled(false);
+ audioRecorder.start();
+
+ final View finalV = v;
+ new Thread() {
+ @Override
+ public void run() {
+ Double recordingDuration_millis = new Double(1000 * (2.5
+ + Common.PREFIX_LENGTH_S
+ + Common.PAUSE_BEFORE_PREFIX_DURATION_S
+ + Common.PAUSE_AFTER_PREFIX_DURATION_S
+ + Common.PIP_NUM * (Common.PIP_DURATION_S + Common.PAUSE_DURATION_S)
+ * Common.REPETITIONS));
+ Log.d(TAG, "Recording for " + recordingDuration_millis + "ms");
+ try {
+ Thread.sleep(recordingDuration_millis.intValue());
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ stopRecording();
+ }
+ });
+ }
+ }.start();
+
+ break;
+
+ case DONE:
+ plotResponse(wavAnalyzerTask);
+ break;
+
+ default: break;
+ }
+ }
+ });
+
+ playerButton = (Button) findViewById(R.id.player_button);
+ playerButton.setEnabled(spkrSupport);
+ playerButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View v) {
+ recorderButton.setEnabled(false);
+ status = Status.PLAYER;
+ play();
+ getPassButton().setEnabled(true);
+ }
+ });
+ }
+
+
+ private void plotResponse(WavAnalyzerTask wavAnalyzerTask) {
+ Button dismissButton = (Button)popupView.findViewById(R.id.dismiss);
+ dismissButton.setOnClickListener(new Button.OnClickListener(){
+ @Override
+ public void onClick(View v) {
+ popupWindow.dismiss();
+ onPlotScreen = false;
+ recorderButton.setEnabled(true);
+ }});
+ popupWindow.showAtLocation(info, Gravity.CENTER, 0, 0);
+ onPlotScreen = true;
+
+ recorderButton.setEnabled(false);
+
+ XYPlot plot = (XYPlot) popupView.findViewById(R.id.responseChart);
+ plot.setDomainStep(XYStepMode.INCREMENT_BY_VAL, 2000);
+
+ Double[] frequencies = new Double[Common.PIP_NUM];
+ for (int i = 0; i < Common.PIP_NUM; i++) {
+ frequencies[i] = new Double(Common.FREQUENCIES_ORIGINAL[i]);
+ }
+
+ if (wavAnalyzerTask != null) {
+
+ double[][] power = wavAnalyzerTask.getPower();
+ for(int i = 0; i < Common.REPETITIONS; i++) {
+ Double[] powerWrap = new Double[Common.PIP_NUM];
+ for (int j = 0; j < Common.PIP_NUM; j++) {
+ powerWrap[j] = new Double(10 * Math.log10(power[j][i]));
+ }
+ XYSeries series = new SimpleXYSeries(
+ Arrays.asList(frequencies),
+ Arrays.asList(powerWrap),
+ "");
+ LineAndPointFormatter seriesFormat = new LineAndPointFormatter();
+ seriesFormat.configure(getApplicationContext(),
+ R.xml.ultrasound_line_formatter_trials);
+ seriesFormat.setPointLabelFormatter(null);
+ plot.addSeries(series, seriesFormat);
+ }
+
+ double[] noiseDB = wavAnalyzerTask.getNoiseDB();
+ Double[] noiseDBWrap = new Double[Common.PIP_NUM];
+ for (int i = 0; i < Common.PIP_NUM; i++) {
+ noiseDBWrap[i] = new Double(noiseDB[i]);
+ }
+
+ XYSeries noiseSeries = new SimpleXYSeries(
+ Arrays.asList(frequencies),
+ Arrays.asList(noiseDBWrap),
+ "background noise");
+ LineAndPointFormatter noiseSeriesFormat = new LineAndPointFormatter();
+ noiseSeriesFormat.configure(getApplicationContext(),
+ R.xml.ultrasound_line_formatter_noise);
+ noiseSeriesFormat.setPointLabelFormatter(null);
+ plot.addSeries(noiseSeries, noiseSeriesFormat);
+
+ double[] dB = wavAnalyzerTask.getDB();
+ Double[] dBWrap = new Double[Common.PIP_NUM];
+ for (int i = 0; i < Common.PIP_NUM; i++) {
+ dBWrap[i] = new Double(dB[i]);
+ }
+
+ XYSeries series = new SimpleXYSeries(
+ Arrays.asList(frequencies),
+ Arrays.asList(dBWrap),
+ "median");
+ LineAndPointFormatter seriesFormat = new LineAndPointFormatter();
+ seriesFormat.configure(getApplicationContext(),
+ R.xml.ultrasound_line_formatter_median);
+ seriesFormat.setPointLabelFormatter(null);
+ plot.addSeries(series, seriesFormat);
+
+ Double[] passX = new Double[] {Common.MIN_FREQUENCY_HZ, Common.MAX_FREQUENCY_HZ};
+ Double[] passY = new Double[] {wavAnalyzerTask.getThreshold(), wavAnalyzerTask.getThreshold()};
+ XYSeries passSeries = new SimpleXYSeries(
+ Arrays.asList(passX), Arrays.asList(passY), "passing");
+ LineAndPointFormatter passSeriesFormat = new LineAndPointFormatter();
+ passSeriesFormat.configure(getApplicationContext(),
+ R.xml.ultrasound_line_formatter_pass);
+ passSeriesFormat.setPointLabelFormatter(null);
+ plot.addSeries(passSeries, passSeriesFormat);
+ }
+ }
+
+ /**
+ * Plays the generated pips.
+ */
+ private void play() {
+ play(SoundGenerator.getInstance().getByte(), Common.PLAYING_SAMPLE_RATE_HZ);
+ }
+
+ /**
+ * Plays the sound data.
+ */
+ private void play(byte[] data, int sampleRate) {
+ if (audioTrack != null) {
+ audioTrack.stop();
+ audioTrack.release();
+ }
+ audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
+ sampleRate, AudioFormat.CHANNEL_OUT_MONO,
+ AudioFormat.ENCODING_PCM_16BIT, Math.max(data.length, AudioTrack.getMinBufferSize(
+ sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT)),
+ AudioTrack.MODE_STATIC);
+ audioTrack.write(data, 0, data.length);
+ audioTrack.play();
+ }
+
+ /**
+ * AsyncTask class for the analyzing.
+ */
+ private class WavAnalyzerTask extends AsyncTask<Void, String, String>
+ implements WavAnalyzer.Listener {
+
+ private static final String TAG = "WavAnalyzerTask";
+ WavAnalyzer wavAnalyzer;
+
+ public WavAnalyzerTask(byte[] recording) {
+ wavAnalyzer = new WavAnalyzer(recording, Common.RECORDING_SAMPLE_RATE_HZ,
+ WavAnalyzerTask.this);
+ }
+
+ double[] getDB() {
+ return wavAnalyzer.getDB();
+ }
+
+ double[][] getPower() {
+ return wavAnalyzer.getPower();
+ }
+
+ double[] getNoiseDB() {
+ return wavAnalyzer.getNoiseDB();
+ }
+
+ double getThreshold() {
+ return wavAnalyzer.getThreshold();
+ }
+
+ @Override
+ protected String doInBackground(Void... params) {
+ boolean result = wavAnalyzer.doWork();
+ if (result) {
+ return getString(R.string.hifi_ultrasound_test_pass);
+ }
+ return getString(R.string.hifi_ultrasound_test_fail);
+ }
+
+ @Override
+ protected void onPostExecute(String result) {
+ info.append(result);
+ recorderButton.setEnabled(true);
+ recorderButton.setText(R.string.hifi_ultrasound_test_plot);
+ }
+
+ @Override
+ protected void onProgressUpdate(String... values) {
+ for (String message : values) {
+ info.append(message);
+ Log.d(TAG, message);
+ }
+ }
+
+ @Override
+ public void sendMessage(String message) {
+ publishProgress(message);
+ }
+ }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundTestActivity.java
index 01eb4b0..690e109 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundTestActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundTestActivity.java
@@ -50,10 +50,27 @@
private static final String TAG = "HifiUltrasoundTestActivity";
private Status status = Status.START;
+ private boolean onPlotScreen = false;
private TextView info;
private Button playerButton;
private Button recorderButton;
private AudioTrack audioTrack;
+ private LayoutInflater layoutInflater;
+ private View popupView;
+ private PopupWindow popupWindow;
+ private boolean micSupport = true;
+ private boolean spkrSupport = true;
+
+ @Override
+ public void onBackPressed () {
+ if (onPlotScreen) {
+ popupWindow.dismiss();
+ onPlotScreen = false;
+ recorderButton.setEnabled(true);
+ } else {
+ super.onBackPressed();
+ }
+ }
@Override
protected void onCreate(Bundle savedInstanceState) {
@@ -63,22 +80,49 @@
setPassFailButtonClickListeners();
getPassButton().setEnabled(false);
- Common.setSampleRate(AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC));
-
info = (TextView) findViewById(R.id.info_text);
info.setMovementMethod(new ScrollingMovementMethod());
info.setText(R.string.hifi_ultrasound_test_instruction1);
+ AudioManager audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+ String micSupportString = audioManager.getProperty(
+ getResources().getString(R.string.hifi_ultrasound_test_mic_prop));
+ String spkrSupportString = audioManager.getProperty(
+ getResources().getString(R.string.hifi_ultrasound_test_spkr_prop));
+
+ if (micSupportString == null) {
+ micSupportString = "null";
+ }
+ if (spkrSupportString == null) {
+ spkrSupportString = "null";
+ }
+ if (micSupportString.equalsIgnoreCase(getResources().getString(
+ R.string.hifi_ultrasound_test_default_false_string))) {
+ micSupport = false;
+ getPassButton().setEnabled(true);
+ info.append(getResources().getString(R.string.hifi_ultrasound_test_mic_no_support));
+ }
+ if (spkrSupportString.equalsIgnoreCase(getResources().getString(
+ R.string.hifi_ultrasound_test_default_false_string))) {
+ spkrSupport = false;
+ info.append(getResources().getString(R.string.hifi_ultrasound_test_spkr_no_support));
+ }
+
+ layoutInflater = (LayoutInflater) getBaseContext().getSystemService(
+ LAYOUT_INFLATER_SERVICE);
+ popupView = layoutInflater.inflate(R.layout.hifi_ultrasound_popup, null);
+ popupWindow = new PopupWindow(
+ popupView, LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
+
final AudioRecordHelper audioRecorder = AudioRecordHelper.getInstance();
final int recordRate = audioRecorder.getSampleRate();
recorderButton = (Button) findViewById(R.id.recorder_button);
+ recorderButton.setEnabled(micSupport);
recorderButton.setOnClickListener(new View.OnClickListener() {
private WavAnalyzerTask wavAnalyzerTask = null;
private void stopRecording() {
audioRecorder.stop();
- playerButton.setText(R.string.hifi_ultrasound_test_playback);
- playerButton.setEnabled(true);
wavAnalyzerTask = new WavAnalyzerTask(audioRecorder.getByte());
wavAnalyzerTask.execute();
status = Status.DONE;
@@ -86,7 +130,6 @@
@Override
public void onClick(View v) {
switch (status) {
-
case START:
info.append("Recording at " + recordRate + "Hz using ");
final int source = audioRecorder.getAudioSource();
@@ -111,12 +154,12 @@
new Thread() {
@Override
public void run() {
- Double recordingDuration_millis = new Double(1000 * (2.5 +
- Common.PREFIX_LENGTH / Common.getSampleRate() +
- Common.PAUSE_BEFORE_PREFIX_DURATION_S +
- Common.PAUSE_AFTER_PREFIX_DURATION_S +
- Common.PIP_NUM * (Common.PIP_DURATION_S + Common.PAUSE_DURATION_S)
- * Common.REPETITIONS));
+ Double recordingDuration_millis = new Double(1000 * (2.5
+ + Common.PREFIX_LENGTH_S
+ + Common.PAUSE_BEFORE_PREFIX_DURATION_S
+ + Common.PAUSE_AFTER_PREFIX_DURATION_S
+ + Common.PIP_NUM * (Common.PIP_DURATION_S + Common.PAUSE_DURATION_S)
+ * Common.REPETITIONS));
Log.d(TAG, "Recording for " + recordingDuration_millis + "ms");
try {
Thread.sleep(recordingDuration_millis.intValue());
@@ -144,40 +187,29 @@
});
playerButton = (Button) findViewById(R.id.player_button);
+ playerButton.setEnabled(spkrSupport);
playerButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
- switch (status) {
- case START:
- playerButton.setEnabled(false);
- recorderButton.setEnabled(false);
- status = Status.PLAYER;
- play();
- break;
- default:
- play(audioRecorder.getByte(), recordRate);
- break;
- }
+ recorderButton.setEnabled(false);
+ status = Status.PLAYER;
+ play();
}
});
}
private void plotResponse(WavAnalyzerTask wavAnalyzerTask) {
- LayoutInflater layoutInflater
- = (LayoutInflater) getBaseContext().getSystemService(LAYOUT_INFLATER_SERVICE);
- View popupView = layoutInflater.inflate(R.layout.hifi_ultrasound_popup, null);
- final PopupWindow popupWindow = new PopupWindow(
- popupView, LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
-
Button dismissButton = (Button)popupView.findViewById(R.id.dismiss);
dismissButton.setOnClickListener(new Button.OnClickListener(){
@Override
public void onClick(View v) {
popupWindow.dismiss();
+ onPlotScreen = false;
recorderButton.setEnabled(true);
}});
popupWindow.showAtLocation(info, Gravity.CENTER, 0, 0);
+ onPlotScreen = true;
recorderButton.setEnabled(false);
@@ -256,7 +288,7 @@
* Plays the generated pips.
*/
private void play() {
- play(SoundGenerator.getInstance().getByte(), Common.getSampleRate());
+ play(SoundGenerator.getInstance().getByte(), Common.PLAYING_SAMPLE_RATE_HZ);
}
/**
@@ -286,7 +318,7 @@
WavAnalyzer wavAnalyzer;
public WavAnalyzerTask(byte[] recording) {
- wavAnalyzer = new WavAnalyzer(recording, AudioRecordHelper.getInstance().getSampleRate(),
+ wavAnalyzer = new WavAnalyzer(recording, Common.RECORDING_SAMPLE_RATE_HZ,
WavAnalyzerTask.this);
}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/SoundGenerator.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/SoundGenerator.java
index f7318ed..0ad9371 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/SoundGenerator.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/SoundGenerator.java
@@ -13,25 +13,25 @@
private SoundGenerator() {
// Initialize sample.
int pipNum = Common.PIP_NUM;
- int prefixTotalLength = Common.PREFIX.length
- + Util.toLength(Common.PAUSE_BEFORE_PREFIX_DURATION_S, Common.getSampleRate())
- + Util.toLength(Common.PAUSE_AFTER_PREFIX_DURATION_S, Common.getSampleRate());
+ int prefixTotalLength = Util.toLength(Common.PREFIX_LENGTH_S, Common.PLAYING_SAMPLE_RATE_HZ)
+ + Util.toLength(Common.PAUSE_BEFORE_PREFIX_DURATION_S, Common.PLAYING_SAMPLE_RATE_HZ)
+ + Util.toLength(Common.PAUSE_AFTER_PREFIX_DURATION_S, Common.PLAYING_SAMPLE_RATE_HZ);
int repetitionLength = pipNum * Util.toLength(
- Common.PIP_DURATION_S + Common.PAUSE_DURATION_S, Common.getSampleRate());
+ Common.PIP_DURATION_S + Common.PAUSE_DURATION_S, Common.PLAYING_SAMPLE_RATE_HZ);
int sampleLength = prefixTotalLength + Common.REPETITIONS * repetitionLength;
sample = new double[sampleLength];
// Fill sample with prefix.
- System.arraycopy(Common.PREFIX, 0, sample,
- Util.toLength(Common.PAUSE_BEFORE_PREFIX_DURATION_S, Common.getSampleRate()),
- Common.PREFIX.length);
+ System.arraycopy(Common.PREFIX_FOR_PLAYER, 0, sample,
+ Util.toLength(Common.PAUSE_BEFORE_PREFIX_DURATION_S, Common.PLAYING_SAMPLE_RATE_HZ),
+ Common.PREFIX_FOR_PLAYER.length);
// Fill the sample.
for (int i = 0; i < pipNum * Common.REPETITIONS; i++) {
- double[] pip = getPip(Common.generateWindow(), Common.FREQUENCIES[i]);
+ double[] pip = getPip(Common.WINDOW_FOR_PLAYER, Common.FREQUENCIES[i]);
System.arraycopy(pip, 0, sample,
prefixTotalLength + i * Util.toLength(
- Common.PIP_DURATION_S + Common.PAUSE_DURATION_S, Common.getSampleRate()),
+ Common.PIP_DURATION_S + Common.PAUSE_DURATION_S, Common.PLAYING_SAMPLE_RATE_HZ),
pip.length);
}
@@ -58,7 +58,7 @@
private static double[] getPip(double[] window, double frequency) {
int pipArrayLength = window.length;
double[] pipArray = new double[pipArrayLength];
- double radPerSample = 2 * Math.PI / (Common.getSampleRate() / frequency);
+ double radPerSample = 2 * Math.PI / (Common.PLAYING_SAMPLE_RATE_HZ / frequency);
for (int i = 0; i < pipArrayLength; i++) {
pipArray[i] = window[i] * Math.sin(i * radPerSample);
}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/WavAnalyzer.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/WavAnalyzer.java
index 84d59d6..b75c40b 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/WavAnalyzer.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/WavAnalyzer.java
@@ -15,7 +15,8 @@
private double[] dB; // Average response
private double[][] power; // power of each trial
private double[] noiseDB; // background noise
- private double threshold; // threshold of passing
+ private double[][] noisePower;
+ private double threshold; // threshold of passing, drop off compared to 2000 kHz
private boolean result = false; // result of the test
/**
@@ -41,12 +42,12 @@
return false;
}
// Calculating the pip strength.
- listener.sendMessage("Calculating...\n");
+ listener.sendMessage("Calculating... Please wait...\n");
try {
dB = measurePipStrength();
} catch (IndexOutOfBoundsException e) {
listener.sendMessage("WARNING: May have missed the prefix."
- + " Turn up the volume or move to a quieter location.\n");
+ + " Turn up the volume of the playback device or move to a quieter location.\n");
return false;
}
if (!isConsistent()) {
@@ -63,7 +64,7 @@
for (int i = 1; i < data.length; i++) {
if ((Math.abs(data[i]) >= Short.MAX_VALUE) && (Math.abs(data[i - 1]) >= Short.MAX_VALUE)) {
listener.sendMessage("WARNING: Data is clipped."
- + " Turn the volume down and redo the procedure.\n");
+ + " Turn down the volume of the playback device and redo the procedure.\n");
return true;
}
}
@@ -84,7 +85,7 @@
}
if (Util.mean(coeffOfVar) > 1.0) {
listener.sendMessage("WARNING: Inconsistent result across trials."
- + " Turn up the volume or move to a quieter location.\n");
+ + " Turn up the volume of the playback device or move to a quieter location.\n");
return false;
}
return true;
@@ -103,80 +104,80 @@
}
}
- int indexOf4kHz = Util.findClosest(Common.FREQUENCIES_ORIGINAL, 4000.0);
- double[] responseBelow4kHz = new double[indexOf4kHz];
- System.arraycopy(dB, 0, responseBelow4kHz, 0, indexOf4kHz);
- double medianResponseBelow4kHz = Util.median(responseBelow4kHz);
- double[] noiseBelow4kHz = new double[indexOf4kHz];
- System.arraycopy(noiseDB, 0, noiseBelow4kHz, 0, indexOf4kHz);
- double medianNoiseBelow4kHz = Util.median(noiseBelow4kHz);
- if ((medianResponseBelow4kHz - medianNoiseBelow4kHz) < Common.AUDIBLE_SIGNAL_MIN_STRENGTH_DB) {
+ if (Util.mean(dB) - Util.mean(noiseDB) < Common.SIGNAL_MIN_STRENGTH_DB_ABOVE_NOISE) {
listener.sendMessage("WARNING: Signal is too weak or background noise is too strong."
- + " Turn up the volume or move to a quieter location.\n");
+ + " Turn up the volume of the playback device or move to a quieter location.\n");
return false;
}
+ int indexOf2000Hz = Util.findClosest(Common.FREQUENCIES_ORIGINAL, 2000.0);
+ threshold = dB[indexOf2000Hz] + Common.PASSING_THRESHOLD_DB;
int indexOf18500Hz = Util.findClosest(Common.FREQUENCIES_ORIGINAL, 18500.0);
int indexOf20000Hz = Util.findClosest(Common.FREQUENCIES_ORIGINAL, 20000.0);
double[] responseInRange = new double[indexOf20000Hz - indexOf18500Hz];
System.arraycopy(dB, indexOf18500Hz, responseInRange, 0, responseInRange.length);
- if (Util.mean(responseInRange) > threshold) {
- return true;
+ if (Util.mean(responseInRange) < threshold) {
+ listener.sendMessage(
+ "WARNING: Failed. Retry with different orientations or report failed.\n");
+ return false;
}
- return false;
+ return true;
}
/**
* Calculate the Fourier Coefficient at the pip frequency to calculate the frequency response.
- * dB relative to background noise.
* Package visible for unit testing.
*/
double[] measurePipStrength() {
- listener.sendMessage("Aligning data\n");
+ listener.sendMessage("Aligning data... Please wait...\n");
final int dataStartI = alignData();
- final int prefixTotalLength = dataStartI + Common.PREFIX.length
- + Util.toLength(Common.PAUSE_AFTER_PREFIX_DURATION_S, sampleRate);
+ final int prefixTotalLength = dataStartI
+ + Util.toLength(Common.PREFIX_LENGTH_S + Common.PAUSE_AFTER_PREFIX_DURATION_S, sampleRate);
listener.sendMessage("Done.\n");
listener.sendMessage("Prefix starts at " + (double) dataStartI / sampleRate + " s \n");
- if (dataStartI > Math.round(sampleRate
- * (Common.PAUSE_BEFORE_PREFIX_DURATION_S + Common.PAUSE_AFTER_PREFIX_DURATION_S))
- + Common.PREFIX_LENGTH) {
- listener.sendMessage("WARNING: Unexpected prefix start time. May have missed the prefix."
- + " Turn up the volume or move to a quieter location.\n");
+ if (dataStartI > Math.round(sampleRate * (Common.PREFIX_LENGTH_S
+ + Common.PAUSE_BEFORE_PREFIX_DURATION_S + Common.PAUSE_AFTER_PREFIX_DURATION_S))) {
+ listener.sendMessage("WARNING: Unexpected prefix start time. May have missed the prefix.\n"
+ + "PLAY button should be pressed on the playback device within one second"
+ + " after RECORD is pressed on the recording device.\n"
+ + "If this happens repeatedly,"
+ + " turn up the volume of the playback device or move to a quieter location.\n");
}
- double[] noisePoints = new double[Common.window().length];
- System.arraycopy(data, dataStartI - noisePoints.length - 1, noisePoints, 0, noisePoints.length);
- for (int j = 0; j < noisePoints.length; j++) {
- noisePoints[j] = noisePoints[j] * Common.window()[j];
- }
-
+ listener.sendMessage("Analyzing noise strength... Please wait...\n");
+ noisePower = new double[Common.PIP_NUM][Common.NOISE_SAMPLES];
noiseDB = new double[Common.PIP_NUM];
- listener.sendMessage("Analyzing noise strength...\n");
- for (int i = 0; i < Common.PIP_NUM; i++) {
- double freq = Common.FREQUENCIES_ORIGINAL[i];
- Complex fourierCoeff = new Complex(0, 0);
- final Complex rotator = new Complex(0,
- -2.0 * Math.PI * freq / sampleRate).exp();
- Complex phasor = new Complex(1, 0);
+ for (int s = 0; s < Common.NOISE_SAMPLES; s++) {
+ double[] noisePoints = new double[Common.WINDOW_FOR_RECORDER.length];
+ System.arraycopy(data, dataStartI - (s + 1) * noisePoints.length - 1,
+ noisePoints, 0, noisePoints.length);
for (int j = 0; j < noisePoints.length; j++) {
- fourierCoeff = fourierCoeff.add(phasor.multiply(noisePoints[j]));
- phasor = phasor.multiply(rotator);
+ noisePoints[j] = noisePoints[j] * Common.WINDOW_FOR_RECORDER[j];
}
- fourierCoeff = fourierCoeff.multiply(1.0 / noisePoints.length);
- double noisePower = fourierCoeff.multiply(fourierCoeff.conjugate()).abs();
- noiseDB[i] = 10 * Math.log10(noisePower);
+ for (int i = 0; i < Common.PIP_NUM; i++) {
+ double freq = Common.FREQUENCIES_ORIGINAL[i];
+ Complex fourierCoeff = new Complex(0, 0);
+ final Complex rotator = new Complex(0,
+ -2.0 * Math.PI * freq / sampleRate).exp();
+ Complex phasor = new Complex(1, 0);
+ for (int j = 0; j < noisePoints.length; j++) {
+ fourierCoeff = fourierCoeff.add(phasor.multiply(noisePoints[j]));
+ phasor = phasor.multiply(rotator);
+ }
+ fourierCoeff = fourierCoeff.multiply(1.0 / noisePoints.length);
+ noisePower[i][s] = fourierCoeff.multiply(fourierCoeff.conjugate()).abs();
+ }
+ }
+ for (int i = 0; i < Common.PIP_NUM; i++) {
+ double meanNoisePower = 0;
+ for (int j = 0; j < Common.NOISE_SAMPLES; j++) {
+ meanNoisePower += noisePower[i][j];
+ }
+ meanNoisePower /= Common.NOISE_SAMPLES;
+ noiseDB[i] = 10 * Math.log10(meanNoisePower);
}
- int indexOf18500Hz = Util.findClosest(Common.FREQUENCIES_ORIGINAL, 18500.0);
- int indexOf20000Hz = Util.findClosest(Common.FREQUENCIES_ORIGINAL, 20000.0);
- double[] noiseInRange = new double[indexOf20000Hz - indexOf18500Hz + 1];
- System.arraycopy(noiseDB, indexOf18500Hz, noiseInRange, 0, indexOf20000Hz - indexOf18500Hz + 1);
- double medianNoiseInRange = Util.median(noiseInRange);
- double stdNoiseInRange = Util.std(noiseInRange);
- threshold = medianNoiseInRange + Common.ULTRASOUND_SIGNAL_MIN_STRENGTH_RATIO * stdNoiseInRange;
-
- listener.sendMessage("Analyzing pips...\n");
+ listener.sendMessage("Analyzing pips... Please wait...\n");
power = new double[Common.PIP_NUM][Common.REPETITIONS];
for (int i = 0; i < Common.PIP_NUM * Common.REPETITIONS; i++) {
if (i % Common.PIP_NUM == 0) {
@@ -187,10 +188,10 @@
pipExpectedStartI = prefixTotalLength
+ Util.toLength(i * (Common.PIP_DURATION_S + Common.PAUSE_DURATION_S), sampleRate);
// Cut out the data points for the current pip.
- double[] pipPoints = new double[Common.window().length];
+ double[] pipPoints = new double[Common.WINDOW_FOR_RECORDER.length];
System.arraycopy(data, pipExpectedStartI, pipPoints, 0, pipPoints.length);
- for (int j = 0; j < Common.window().length; j++) {
- pipPoints[j] = pipPoints[j] * Common.window()[j];
+ for (int j = 0; j < Common.WINDOW_FOR_RECORDER.length; j++) {
+ pipPoints[j] = pipPoints[j] * Common.WINDOW_FOR_RECORDER[j];
}
Complex fourierCoeff = new Complex(0, 0);
final Complex rotator = new Complex(0,
@@ -219,18 +220,16 @@
*/
int alignData() {
// Zeropadding samples to add in the correlation to avoid FFT wraparound.
- final int zeroPad = Common.PREFIX_LENGTH - 1;
- int fftSize = Util.nextPowerOfTwo(
- (int) Math.round(sampleRate
- * (Common.PAUSE_BEFORE_PREFIX_DURATION_S + Common.PAUSE_AFTER_PREFIX_DURATION_S))
- + Common.PREFIX_LENGTH
+ final int zeroPad = Util.toLength(Common.PREFIX_LENGTH_S, Common.RECORDING_SAMPLE_RATE_HZ) - 1;
+ int fftSize = Util.nextPowerOfTwo((int) Math.round(sampleRate * (Common.PREFIX_LENGTH_S
+ + Common.PAUSE_BEFORE_PREFIX_DURATION_S + Common.PAUSE_AFTER_PREFIX_DURATION_S + 0.5))
+ zeroPad);
double[] dataCut = new double[fftSize - zeroPad];
System.arraycopy(data, 0, dataCut, 0, fftSize - zeroPad);
double[] xCorrDataPrefix = Util.computeCrossCorrelation(
Util.padZeros(Util.toComplex(dataCut), fftSize),
- Util.padZeros(Util.toComplex(Common.PREFIX), fftSize));
+ Util.padZeros(Util.toComplex(Common.PREFIX_FOR_RECORDER), fftSize));
return Util.findMaxIndex(xCorrDataPrefix);
}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java
index e3d0b6d..58b51a5 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java
@@ -146,6 +146,7 @@
private AtomicInteger mCountCallbacksRemaining = new AtomicInteger();
private AtomicInteger mCountRawOrDng = new AtomicInteger();
private AtomicInteger mCountRaw10 = new AtomicInteger();
+ private AtomicInteger mCountRaw12 = new AtomicInteger();
private AtomicInteger mCountJpg = new AtomicInteger();
private AtomicInteger mCountYuv = new AtomicInteger();
private AtomicInteger mCountCapRes = new AtomicInteger();
@@ -658,6 +659,8 @@
jsonSurface.put("format", "raw");
} else if (format == ImageFormat.RAW10) {
jsonSurface.put("format", "raw10");
+ } else if (format == ImageFormat.RAW12) {
+ jsonSurface.put("format", "raw12");
} else if (format == ImageFormat.JPEG) {
jsonSurface.put("format", "jpeg");
} else if (format == ImageFormat.YUV_420_888) {
@@ -1004,6 +1007,7 @@
mCountJpg.set(0);
mCountYuv.set(0);
mCountRaw10.set(0);
+ mCountRaw12.set(0);
mCountCapRes.set(0);
mCaptureRawIsDng = false;
mCaptureResults = new CaptureResult[requests.size()];
@@ -1028,13 +1032,16 @@
sizes = ItsUtils.getJpegOutputSizes(mCameraCharacteristics);
} else if ("raw".equals(sformat)) {
formats[i] = ImageFormat.RAW_SENSOR;
- sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
+ sizes = ItsUtils.getRaw16OutputSizes(mCameraCharacteristics);
} else if ("raw10".equals(sformat)) {
formats[i] = ImageFormat.RAW10;
- sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
+ sizes = ItsUtils.getRaw10OutputSizes(mCameraCharacteristics);
+ } else if ("raw12".equals(sformat)) {
+ formats[i] = ImageFormat.RAW12;
+ sizes = ItsUtils.getRaw12OutputSizes(mCameraCharacteristics);
} else if ("dng".equals(sformat)) {
formats[i] = ImageFormat.RAW_SENSOR;
- sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
+ sizes = ItsUtils.getRaw16OutputSizes(mCameraCharacteristics);
mCaptureRawIsDng = true;
} else {
throw new ItsException("Unsupported format: " + sformat);
@@ -1170,6 +1177,12 @@
ByteBuffer buf = ByteBuffer.wrap(img);
int count = mCountRaw10.getAndIncrement();
mSocketRunnableObj.sendResponseCaptureBuffer("raw10Image", buf);
+ } else if (format == ImageFormat.RAW12) {
+ Logt.i(TAG, "Received RAW12 capture");
+ byte[] img = ItsUtils.getDataFromImage(capture);
+ ByteBuffer buf = ByteBuffer.wrap(img);
+ int count = mCountRaw12.getAndIncrement();
+ mSocketRunnableObj.sendResponseCaptureBuffer("raw12Image", buf);
} else if (format == ImageFormat.RAW_SENSOR) {
Logt.i(TAG, "Received RAW16 capture");
int count = mCountRawOrDng.getAndIncrement();
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsUtils.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsUtils.java
index 2011314..b09b90c 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsUtils.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsUtils.java
@@ -91,11 +91,21 @@
}
}
- public static Size[] getRawOutputSizes(CameraCharacteristics ccs)
+ public static Size[] getRaw16OutputSizes(CameraCharacteristics ccs)
throws ItsException {
return getOutputSizes(ccs, ImageFormat.RAW_SENSOR);
}
+ public static Size[] getRaw10OutputSizes(CameraCharacteristics ccs)
+ throws ItsException {
+ return getOutputSizes(ccs, ImageFormat.RAW10);
+ }
+
+ public static Size[] getRaw12OutputSizes(CameraCharacteristics ccs)
+ throws ItsException {
+ return getOutputSizes(ccs, ImageFormat.RAW12);
+ }
+
public static Size[] getJpegOutputSizes(CameraCharacteristics ccs)
throws ItsException {
return getOutputSizes(ccs, ImageFormat.JPEG);
@@ -139,7 +149,7 @@
buffer.get(data);
return data;
} else if (format == ImageFormat.YUV_420_888 || format == ImageFormat.RAW_SENSOR
- || format == ImageFormat.RAW10) {
+ || format == ImageFormat.RAW10 || format == ImageFormat.RAW12) {
int offset = 0;
data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
int maxRowSize = planes[0].getRowStride();
@@ -213,6 +223,7 @@
return 3 == planes.length;
case ImageFormat.RAW_SENSOR:
case ImageFormat.RAW10:
+ case ImageFormat.RAW12:
case ImageFormat.JPEG:
return 1 == planes.length;
default:
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/ByodFlowTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/ByodFlowTestActivity.java
index 628ff3e..e41c6d0 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/ByodFlowTestActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/ByodFlowTestActivity.java
@@ -82,6 +82,9 @@
private TestItem mCredSettingsVisibleTest;
private TestItem mPrintSettingsVisibleTest;
private TestItem mIntentFiltersTest;
+ private TestItem mCrossProfileImageCaptureSupportTest;
+ private TestItem mCrossProfileVideoCaptureSupportTest;
+ private TestItem mCrossProfileAudioCaptureSupportTest;
private int mCurrentTestPosition;
@@ -256,6 +259,50 @@
mTests.add(mDisableNonMarketTest);
mTests.add(mEnableNonMarketTest);
mTests.add(mIntentFiltersTest);
+
+ if (canResolveIntent(ByodHelperActivity.getCaptureImageIntent())) {
+ // Capture image intent can be resolved in primary profile, so test.
+ mCrossProfileImageCaptureSupportTest = new TestItem(this,
+ R.string.provisioning_byod_capture_image_support,
+ R.string.provisioning_byod_capture_image_support_info,
+ new Intent(ByodHelperActivity.ACTION_CAPTURE_AND_CHECK_IMAGE));
+ mTests.add(mCrossProfileImageCaptureSupportTest);
+ } else {
+ // Capture image intent cannot be resolved in primary profile, so skip test.
+ Toast.makeText(ByodFlowTestActivity.this,
+ R.string.provisioning_byod_no_image_capture_resolver, Toast.LENGTH_SHORT)
+ .show();
+ }
+
+ if (canResolveIntent(ByodHelperActivity.getCaptureVideoIntent())) {
+ // Capture video intent can be resolved in primary profile, so test.
+ mCrossProfileVideoCaptureSupportTest = new TestItem(this,
+ R.string.provisioning_byod_capture_video_support,
+ R.string.provisioning_byod_capture_video_support_info,
+ new Intent(ByodHelperActivity.ACTION_CAPTURE_AND_CHECK_VIDEO));
+ mTests.add(mCrossProfileVideoCaptureSupportTest);
+ } else {
+ // Capture video intent cannot be resolved in primary profile, so skip test.
+ Toast.makeText(ByodFlowTestActivity.this,
+ R.string.provisioning_byod_no_video_capture_resolver, Toast.LENGTH_SHORT)
+ .show();
+ }
+
+ /* TODO: reinstate when bug b/20131958 is fixed
+ if (canResolveIntent(ByodHelperActivity.getCaptureAudioIntent())) {
+ // Capture audio intent can be resolved in primary profile, so test.
+ mCrossProfileAudioCaptureSupportTest = new TestItem(this,
+ R.string.provisioning_byod_capture_audio_support,
+ R.string.provisioning_byod_capture_audio_support_info,
+ new Intent(ByodHelperActivity.ACTION_CAPTURE_AND_CHECK_AUDIO));
+ mTests.add(mCrossProfileAudioCaptureSupportTest);
+ } else {
+ // Capture audio intent cannot be resolved in primary profile, so skip test.
+ Toast.makeText(ByodFlowTestActivity.this,
+ R.string.provisioning_byod_no_audio_capture_resolver, Toast.LENGTH_SHORT)
+ .show();
+ }
+ */
}
@Override
@@ -266,6 +313,11 @@
test.performTest(this);
}
+ // Return whether the intent can be resolved in the current profile
+ private boolean canResolveIntent(Intent intent) {
+ return intent.resolveActivity(getPackageManager()) != null;
+ }
+
private void showManualTestDialog(final TestItem test) {
AlertDialog.Builder dialogBuilder = new AlertDialog.Builder(this)
.setIcon(android.R.drawable.ic_dialog_info)
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/ByodHelperActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/ByodHelperActivity.java
index 13af890..d8a3387 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/ByodHelperActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/ByodHelperActivity.java
@@ -18,6 +18,7 @@
import android.app.Activity;
import android.app.admin.DevicePolicyManager;
+import android.app.Dialog;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.Context;
@@ -26,24 +27,31 @@
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Bundle;
+import android.provider.MediaStore;
import android.provider.Settings;
+import android.support.v4.content.FileProvider;
import android.util.Log;
import android.widget.Toast;
import static android.provider.Settings.Secure.INSTALL_NON_MARKET_APPS;
+import java.io.File;
+import java.util.ArrayList;
+
import com.android.cts.verifier.R;
import com.android.cts.verifier.managedprovisioning.ByodFlowTestActivity.TestResult;
+import com.android.cts.verifier.managedprovisioning.ByodPresentMediaDialog.DialogCallback;
/**
* A helper activity from the managed profile side that responds to requests from CTS verifier in
* primary user. Profile owner APIs are accessible inside this activity (given this activity is
* started within the work profile). Its current functionalities include making sure the profile
- * owner is setup correctly, and removing the work profile upon request.
+ * owner is setup correctly, removing the work profile upon request, and verifying the image and
+ * video capture functionality.
*
* Note: We have to use a dummy activity because cross-profile intents only work for activities.
*/
-public class ByodHelperActivity extends Activity {
+public class ByodHelperActivity extends Activity implements DialogCallback {
static final String TAG = "ByodHelperActivity";
// Primary -> managed intent: query if the profile owner has been set up.
@@ -54,6 +62,12 @@
public static final String ACTION_REMOVE_PROFILE_OWNER = "com.android.cts.verifier.managedprovisioning.BYOD_REMOVE";
// Managed -> managed intent: provisioning completed successfully
public static final String ACTION_PROFILE_PROVISIONED = "com.android.cts.verifier.managedprovisioning.BYOD_PROVISIONED";
+ // Primage -> managed intent: request to capture and check an image
+ public static final String ACTION_CAPTURE_AND_CHECK_IMAGE = "com.android.cts.verifier.managedprovisioning.BYOD_CAPTURE_AND_CHECK_IMAGE";
+ // Primage -> managed intent: request to capture and check a video
+ public static final String ACTION_CAPTURE_AND_CHECK_VIDEO = "com.android.cts.verifier.managedprovisioning.BYOD_CAPTURE_AND_CHECK_VIDEO";
+ // Primage -> managed intent: request to capture and check an audio recording
+ public static final String ACTION_CAPTURE_AND_CHECK_AUDIO = "com.android.cts.verifier.managedprovisioning.BYOD_CAPTURE_AND_CHECK_AUDIO";
public static final String EXTRA_PROVISIONED = "extra_provisioned";
@@ -68,6 +82,9 @@
public static final int RESULT_FAILED = RESULT_FIRST_USER;
private static final int REQUEST_INSTALL_PACKAGE = 1;
+ private static final int REQUEST_IMAGE_CAPTURE = 2;
+ private static final int REQUEST_VIDEO_CAPTURE = 3;
+ private static final int REQUEST_AUDIO_CAPTURE = 4;
private static final String ORIGINAL_SETTINGS_NAME = "original settings";
private Bundle mOriginalSettings;
@@ -75,6 +92,11 @@
private ComponentName mAdminReceiverComponent;
private DevicePolicyManager mDevicePolicyManager;
+ private Uri mImageUri;
+ private Uri mVideoUri;
+
+ private ArrayList<File> mTempFiles = new ArrayList<File>();
+
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
@@ -132,6 +154,40 @@
new IntentFiltersTestHelper(this).checkCrossProfileIntentFilters(
IntentFiltersTestHelper.FLAG_INTENTS_FROM_MANAGED);
setResult(intentFiltersSetForManagedIntents? RESULT_OK : RESULT_FAILED, null);
+ } else if (action.equals(ACTION_CAPTURE_AND_CHECK_IMAGE)) {
+ Intent captureImageIntent = getCaptureImageIntent();
+ mImageUri = getTempUri("image.jpg");
+ captureImageIntent.putExtra(MediaStore.EXTRA_OUTPUT, mImageUri);
+ if (captureImageIntent.resolveActivity(getPackageManager()) != null) {
+ startActivityForResult(captureImageIntent, REQUEST_IMAGE_CAPTURE);
+ } else {
+ Log.e(TAG, "Capture image intent could not be resolved in managed profile.");
+ showToast(R.string.provisioning_byod_capture_media_error);
+ finish();
+ }
+ return;
+ } else if (action.equals(ACTION_CAPTURE_AND_CHECK_VIDEO)) {
+ Intent captureVideoIntent = getCaptureVideoIntent();
+ mVideoUri = getTempUri("video.mp4");
+ captureVideoIntent.putExtra(MediaStore.EXTRA_OUTPUT, mVideoUri);
+ if (captureVideoIntent.resolveActivity(getPackageManager()) != null) {
+ startActivityForResult(captureVideoIntent, REQUEST_VIDEO_CAPTURE);
+ } else {
+ Log.e(TAG, "Capture video intent could not be resolved in managed profile.");
+ showToast(R.string.provisioning_byod_capture_media_error);
+ finish();
+ }
+ return;
+ } else if (action.equals(ACTION_CAPTURE_AND_CHECK_AUDIO)) {
+ Intent captureAudioIntent = getCaptureAudioIntent();
+ if (captureAudioIntent.resolveActivity(getPackageManager()) != null) {
+ startActivityForResult(captureAudioIntent, REQUEST_AUDIO_CAPTURE);
+ } else {
+ Log.e(TAG, "Capture audio intent could not be resolved in managed profile.");
+ showToast(R.string.provisioning_byod_capture_media_error);
+ finish();
+ }
+ return;
}
// This activity has no UI and is only used to respond to CtsVerifier in the primary side.
finish();
@@ -157,6 +213,36 @@
finish();
break;
}
+ case REQUEST_IMAGE_CAPTURE: {
+ if (resultCode == RESULT_OK) {
+ ByodPresentMediaDialog.newImageInstance(mImageUri)
+ .show(getFragmentManager(), "ViewImageDialogFragment");
+ } else {
+ // Failed capturing image.
+ finish();
+ }
+ break;
+ }
+ case REQUEST_VIDEO_CAPTURE: {
+ if (resultCode == RESULT_OK) {
+ ByodPresentMediaDialog.newVideoInstance(mVideoUri)
+ .show(getFragmentManager(), "PlayVideoDialogFragment");
+ } else {
+ // Failed capturing video.
+ finish();
+ }
+ break;
+ }
+ case REQUEST_AUDIO_CAPTURE: {
+ if (resultCode == RESULT_OK) {
+ ByodPresentMediaDialog.newAudioInstance(data.getData())
+ .show(getFragmentManager(), "PlayAudioDialogFragment");
+ } else {
+ // Failed capturing audio.
+ finish();
+ }
+ break;
+ }
default: {
Log.wtf(TAG, "Unknown requestCode " + requestCode + "; data = " + data);
break;
@@ -164,6 +250,39 @@
}
}
+ @Override
+ protected void onDestroy() {
+ cleanUpTempUris();
+ super.onDestroy();
+ }
+
+ public static Intent getCaptureImageIntent() {
+ return new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
+ }
+
+ public static Intent getCaptureVideoIntent() {
+ return new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
+ }
+
+ public static Intent getCaptureAudioIntent() {
+ return new Intent(MediaStore.Audio.Media.RECORD_SOUND_ACTION);
+ }
+
+ private Uri getTempUri(String fileName) {
+ final File file = new File(getFilesDir() + File.separator + "images"
+ + File.separator + fileName);
+ file.getParentFile().mkdirs(); //if the folder doesn't exists it is created
+ mTempFiles.add(file);
+ return FileProvider.getUriForFile(this,
+ "com.android.cts.verifier.managedprovisioning.fileprovider", file);
+ }
+
+ private void cleanUpTempUris() {
+ for (File file : mTempFiles) {
+ file.delete();
+ }
+ }
+
private boolean isProfileOwner() {
return mDevicePolicyManager.isAdminActive(mAdminReceiverComponent) &&
mDevicePolicyManager.isProfileOwnerApp(mAdminReceiverComponent.getPackageName());
@@ -193,4 +312,9 @@
String message = getString(messageId);
Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
}
+
+ @Override
+ public void onDialogClose() {
+ finish();
+ }
}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/ByodPresentMediaDialog.java b/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/ByodPresentMediaDialog.java
new file mode 100644
index 0000000..b3f126b
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/ByodPresentMediaDialog.java
@@ -0,0 +1,167 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.managedprovisioning;
+
+import android.app.Dialog;
+import android.app.DialogFragment;
+import android.content.DialogInterface;
+import android.graphics.Bitmap;
+import android.net.Uri;
+import android.media.MediaPlayer;
+import android.media.MediaPlayer.OnPreparedListener;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.View;
+import android.view.View.OnClickListener;
+import android.widget.Button;
+import android.widget.ImageView;
+import android.widget.Toast;
+import android.widget.VideoView;
+
+import com.android.cts.verifier.R;
+
+import java.io.IOException;
+
+/**
+ * This dialog shows/plays an image, video or audio uri.
+ */
+public class ByodPresentMediaDialog extends DialogFragment {
+ static final String TAG = "ByodPresentMediaDialog";
+
+ private static final String KEY_VIDEO_URI = "video";
+ private static final String KEY_IMAGE_URI = "image";
+ private static final String KEY_AUDIO_URI = "audio";
+
+ /**
+ * Get a dialogFragment showing an image.
+ */
+ public static ByodPresentMediaDialog newImageInstance(Uri uri) {
+ ByodPresentMediaDialog dialog = new ByodPresentMediaDialog();
+ Bundle args = new Bundle();
+ args.putParcelable(KEY_IMAGE_URI, uri);
+ dialog.setArguments(args);
+ return dialog;
+ }
+
+ /**
+ * Get a dialogFragment playing a video.
+ */
+ public static ByodPresentMediaDialog newVideoInstance(Uri uri) {
+ ByodPresentMediaDialog dialog = new ByodPresentMediaDialog();
+ Bundle args = new Bundle();
+ args.putParcelable(KEY_VIDEO_URI, uri);
+ dialog.setArguments(args);
+ return dialog;
+ }
+
+ /**
+ * Get a dialogFragment playing audio.
+ */
+ public static ByodPresentMediaDialog newAudioInstance(Uri uri) {
+ ByodPresentMediaDialog dialog = new ByodPresentMediaDialog();
+ Bundle args = new Bundle();
+ args.putParcelable(KEY_AUDIO_URI, uri);
+ dialog.setArguments(args);
+ return dialog;
+ }
+
+ @Override
+ public Dialog onCreateDialog(Bundle savedInstanceState) {
+ final Dialog dialog = new Dialog(getActivity());
+ dialog.setContentView(R.layout.byod_present_media);
+
+ Button dismissButton = (Button) dialog.findViewById(R.id.dismissButton);
+ dismissButton.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View v) {
+ dismiss();
+ ((DialogCallback) getActivity()).onDialogClose();
+ }
+ });
+
+ Bundle arguments = getArguments();
+
+ // Initially all video and image specific UI is invisible.
+ if (arguments.containsKey(KEY_VIDEO_URI)) {
+ // Show video UI.
+ dialog.setTitle(getString(R.string.provisioning_byod_verify_video_title));
+
+ Uri uri = (Uri) getArguments().getParcelable(KEY_VIDEO_URI);
+ final VideoView videoView = (VideoView) dialog.findViewById(R.id.videoView);
+ videoView.setVisibility(View.VISIBLE);
+ videoView.setVideoURI(uri);
+
+ Button playButton = (Button) dialog.findViewById(R.id.playButton);
+ playButton.setVisibility(View.VISIBLE);
+ playButton.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View v) {
+ videoView.start();
+ }
+ });
+ } else if (arguments.containsKey(KEY_IMAGE_URI)) {
+ // Show image UI.
+ dialog.setTitle(getString(R.string.provisioning_byod_verify_image_title));
+
+ Uri uri = (Uri) getArguments().getParcelable(KEY_IMAGE_URI);
+ ImageView imageView = (ImageView) dialog.findViewById(R.id.imageView);
+ imageView.setVisibility(View.VISIBLE);
+ imageView.setImageURI(uri);
+ } else if (arguments.containsKey(KEY_AUDIO_URI)) {
+ // Show audio playback UI.
+ dialog.setTitle(getString(R.string.provisioning_byod_verify_audio_title));
+
+ Uri uri = (Uri) getArguments().getParcelable(KEY_AUDIO_URI);
+ final MediaPlayer mediaPlayer = new MediaPlayer();
+ final Button playButton = (Button) dialog.findViewById(R.id.playButton);
+ playButton.setVisibility(View.VISIBLE);
+ playButton.setEnabled(false);
+
+ try {
+ mediaPlayer.setDataSource(getActivity(), uri);
+ mediaPlayer.prepare();
+ } catch (IllegalArgumentException|SecurityException|IllegalStateException
+ |IOException e) {
+ Log.e(TAG, "Cannot play given audio with media player.", e);
+ Toast.makeText(getActivity(), R.string.provisioning_byod_capture_media_error,
+ Toast.LENGTH_SHORT).show();
+ getActivity().finish();
+ }
+
+ mediaPlayer.setOnPreparedListener(new OnPreparedListener() {
+ @Override
+ public void onPrepared(MediaPlayer mp) {
+ playButton.setEnabled(true);
+ playButton.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View v) {
+ mediaPlayer.start();
+ }
+ });
+ }
+ });
+ }
+
+ return dialog;
+ }
+
+ @Override
+ public void onCancel(DialogInterface dialog) {
+ ((DialogCallback) getActivity()).onDialogClose();
+ }
+
+ public interface DialogCallback {
+ public abstract void onDialogClose();
+ }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/DeviceAdminTestReceiver.java b/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/DeviceAdminTestReceiver.java
index 58c068f..e95752e 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/DeviceAdminTestReceiver.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/DeviceAdminTestReceiver.java
@@ -50,6 +50,9 @@
filter.addAction(ByodHelperActivity.ACTION_REMOVE_PROFILE_OWNER);
filter.addAction(ByodHelperActivity.ACTION_INSTALL_APK);
filter.addAction(ByodHelperActivity.ACTION_CHECK_INTENT_FILTERS);
+ filter.addAction(ByodHelperActivity.ACTION_CAPTURE_AND_CHECK_IMAGE);
+ filter.addAction(ByodHelperActivity.ACTION_CAPTURE_AND_CHECK_VIDEO);
+ filter.addAction(ByodHelperActivity.ACTION_CAPTURE_AND_CHECK_AUDIO);
filter.addAction(CrossProfileTestActivity.ACTION_CROSS_PROFILE);
filter.addAction(WorkNotificationTestActivity.ACTION_WORK_NOTIFICATION);
filter.addAction(WorkNotificationTestActivity.ACTION_CLEAR_WORK_NOTIFICATION);
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaOutputSurface.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaOutputSurface.java
new file mode 100644
index 0000000..b28e06b
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaOutputSurface.java
@@ -0,0 +1,322 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.sensors;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLSurface;
+import android.util.Log;
+import android.view.Surface;
+
+
+//
+// This file is copied from android.hardware.cts.media
+//
+
+/**
+ * Holds state associated with a Surface used for MediaCodec decoder output.
+ * <p>
+ * The (width,height) constructor for this class will prepare GL, create a SurfaceTexture,
+ * and then create a Surface for that SurfaceTexture. The Surface can be passed to
+ * MediaCodec.configure() to receive decoder output. When a frame arrives, we latch the
+ * texture with updateTexImage, then render the texture with GL to a pbuffer.
+ * <p>
+ * The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer.
+ * Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives
+ * we just draw it on whatever surface is current.
+ * <p>
+ * By default, the Surface will be using a BufferQueue in asynchronous mode, so we
+ * can potentially drop frames.
+ */
+class CtsMediaOutputSurface implements SurfaceTexture.OnFrameAvailableListener {
+ private static final String TAG = "OutputSurface";
+ private static final boolean VERBOSE = false;
+
+ private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+ private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
+ private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
+
+ private SurfaceTexture mSurfaceTexture;
+ private Surface mSurface;
+
+ private Object mFrameSyncObject = new Object(); // guards mFrameAvailable
+ private boolean mFrameAvailable;
+
+ private CtsMediaTextureRender mTextureRender;
+
+ /**
+ * Creates an OutputSurface backed by a pbuffer with the specifed dimensions. The new
+ * EGL context and surface will be made current. Creates a Surface that can be passed
+ * to MediaCodec.configure().
+ */
+ public CtsMediaOutputSurface(int width, int height) {
+ if (width <= 0 || height <= 0) {
+ throw new IllegalArgumentException();
+ }
+
+ eglSetup(width, height);
+ makeCurrent();
+
+ setup(this);
+ }
+
+ /**
+ * Creates an OutputSurface using the current EGL context (rather than establishing a
+ * new one). Creates a Surface that can be passed to MediaCodec.configure().
+ */
+ public CtsMediaOutputSurface() {
+ setup(this);
+ }
+
+ public CtsMediaOutputSurface(final SurfaceTexture.OnFrameAvailableListener listener) {
+ setup(listener);
+ }
+
+ /**
+ * Creates instances of TextureRender and SurfaceTexture, and a Surface associated
+ * with the SurfaceTexture.
+ */
+ private void setup(SurfaceTexture.OnFrameAvailableListener listener) {
+ mTextureRender = new CtsMediaTextureRender();
+ mTextureRender.surfaceCreated();
+
+ // Even if we don't access the SurfaceTexture after the constructor returns, we
+ // still need to keep a reference to it. The Surface doesn't retain a reference
+ // at the Java level, so if we don't either then the object can get GCed, which
+ // causes the native finalizer to run.
+ if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId());
+ mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
+
+ // This doesn't work if OutputSurface is created on the thread that CTS started for
+ // these test cases.
+ //
+ // The CTS-created thread has a Looper, and the SurfaceTexture constructor will
+ // create a Handler that uses it. The "frame available" message is delivered
+ // there, but since we're not a Looper-based thread we'll never see it. For
+ // this to do anything useful, OutputSurface must be created on a thread without
+ // a Looper, so that SurfaceTexture uses the main application Looper instead.
+ //
+ // Java language note: passing "this" out of a constructor is generally unwise,
+ // but we should be able to get away with it here.
+ mSurfaceTexture.setOnFrameAvailableListener(listener);
+
+ mSurface = new Surface(mSurfaceTexture);
+ }
+
+ /**
+ * Prepares EGL. We want a GLES 2.0 context and a surface that supports pbuffer.
+ */
+ private void eglSetup(int width, int height) {
+ mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new RuntimeException("unable to get EGL14 display");
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
+ mEGLDisplay = null;
+ throw new RuntimeException("unable to initialize EGL14");
+ }
+
+ // Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits
+ // to be able to tell if the frame is reasonable.
+ int[] attribList = {
+ EGL14.EGL_RED_SIZE, 8,
+ EGL14.EGL_GREEN_SIZE, 8,
+ EGL14.EGL_BLUE_SIZE, 8,
+ EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
+ EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT,
+ EGL14.EGL_NONE
+ };
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
+ numConfigs, 0)) {
+ throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
+ }
+
+ // Configure context for OpenGL ES 2.0.
+ int[] attrib_list = {
+ EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
+ EGL14.EGL_NONE
+ };
+ mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
+ attrib_list, 0);
+ checkEglError("eglCreateContext");
+ if (mEGLContext == null) {
+ throw new RuntimeException("null context");
+ }
+
+ // Create a pbuffer surface. By using this for output, we can use glReadPixels
+ // to test values in the output.
+ int[] surfaceAttribs = {
+ EGL14.EGL_WIDTH, width,
+ EGL14.EGL_HEIGHT, height,
+ EGL14.EGL_NONE
+ };
+ mEGLSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs, 0);
+ checkEglError("eglCreatePbufferSurface");
+ if (mEGLSurface == null) {
+ throw new RuntimeException("surface was null");
+ }
+ }
+
+ /**
+ * Discard all resources held by this class, notably the EGL context.
+ */
+ public void release() {
+ if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
+ EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
+ EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(mEGLDisplay);
+ }
+
+ mSurface.release();
+
+ // this causes a bunch of warnings that appear harmless but might confuse someone:
+ // W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
+ //mSurfaceTexture.release();
+
+ mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+ mEGLContext = EGL14.EGL_NO_CONTEXT;
+ mEGLSurface = EGL14.EGL_NO_SURFACE;
+
+ mTextureRender = null;
+ mSurface = null;
+ mSurfaceTexture = null;
+ }
+
+ /**
+ * Makes our EGL context and surface current.
+ */
+ public void makeCurrent() {
+ if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ /**
+ * Returns the Surface that we draw onto.
+ */
+ public Surface getSurface() {
+ return mSurface;
+ }
+
+ /**
+ * Replaces the fragment shader.
+ */
+ public void changeFragmentShader(String fragmentShader) {
+ mTextureRender.changeFragmentShader(fragmentShader);
+ }
+
+ /**
+ * Latches the next buffer into the texture. Must be called from the thread that created
+ * the OutputSurface object, after the onFrameAvailable callback has signaled that new
+ * data is available.
+ */
+ public void awaitNewImage() {
+ final int TIMEOUT_MS = 500;
+
+ synchronized (mFrameSyncObject) {
+ while (!mFrameAvailable) {
+ try {
+ // Wait for onFrameAvailable() to signal us. Use a timeout to avoid
+ // stalling the test if it doesn't arrive.
+ mFrameSyncObject.wait(TIMEOUT_MS);
+ if (!mFrameAvailable) {
+ // TODO: if "spurious wakeup", continue while loop
+ throw new RuntimeException("Surface frame wait timed out");
+ }
+ } catch (InterruptedException ie) {
+ // shouldn't happen
+ throw new RuntimeException(ie);
+ }
+ }
+ mFrameAvailable = false;
+ }
+
+ // Latch the data.
+ mTextureRender.checkGlError("before updateTexImage");
+ mSurfaceTexture.updateTexImage();
+ }
+
+ /**
+ * Wait up to given timeout until new image become available.
+ * @param timeoutMs
+ * @return true if new image is available. false for no new image until timeout.
+ */
+ public boolean checkForNewImage(int timeoutMs) {
+ synchronized (mFrameSyncObject) {
+ while (!mFrameAvailable) {
+ try {
+ // Wait for onFrameAvailable() to signal us. Use a timeout to avoid
+ // stalling the test if it doesn't arrive.
+ mFrameSyncObject.wait(timeoutMs);
+ if (!mFrameAvailable) {
+ return false;
+ }
+ } catch (InterruptedException ie) {
+ // shouldn't happen
+ throw new RuntimeException(ie);
+ }
+ }
+ mFrameAvailable = false;
+ }
+
+ // Latch the data.
+ mTextureRender.checkGlError("before updateTexImage");
+ mSurfaceTexture.updateTexImage();
+ return true;
+ }
+
+ /**
+ * Draws the data from SurfaceTexture onto the current EGL surface.
+ */
+ public void drawImage() {
+ mTextureRender.drawFrame(mSurfaceTexture);
+ }
+
+ public void latchImage() {
+ mTextureRender.checkGlError("before updateTexImage");
+ mSurfaceTexture.updateTexImage();
+ }
+
+ @Override
+ public void onFrameAvailable(SurfaceTexture st) {
+ if (VERBOSE) Log.d(TAG, "new frame available");
+ synchronized (mFrameSyncObject) {
+ if (mFrameAvailable) {
+ throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
+ }
+ mFrameAvailable = true;
+ mFrameSyncObject.notifyAll();
+ }
+ }
+
+ /**
+ * Checks for EGL errors.
+ */
+ private void checkEglError(String msg) {
+ int error;
+ if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
+ throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
+ }
+ }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaTextureRender.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaTextureRender.java
new file mode 100644
index 0000000..a96033d
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaTextureRender.java
@@ -0,0 +1,306 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.sensors;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+import android.graphics.Bitmap;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.opengl.Matrix;
+import android.util.Log;
+
+
+//
+// This file is copied from android.hardware.cts.media
+//
+
+/**
+ * Code for rendering a texture onto a surface using OpenGL ES 2.0.
+ */
+class CtsMediaTextureRender {
+ private static final String TAG = "TextureRender";
+
+ private static final int FLOAT_SIZE_BYTES = 4;
+ private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
+ private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
+ private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
+ private final float[] mTriangleVerticesData = {
+ // X, Y, Z, U, V
+ -1.0f, -1.0f, 0, 0.f, 0.f,
+ 1.0f, -1.0f, 0, 1.f, 0.f,
+ -1.0f, 1.0f, 0, 0.f, 1.f,
+ 1.0f, 1.0f, 0, 1.f, 1.f,
+ };
+
+ private FloatBuffer mTriangleVertices;
+
+ private static final String VERTEX_SHADER =
+ "uniform mat4 uMVPMatrix;\n" +
+ "uniform mat4 uSTMatrix;\n" +
+ "attribute vec4 aPosition;\n" +
+ "attribute vec4 aTextureCoord;\n" +
+ "varying vec2 vTextureCoord;\n" +
+ "void main() {\n" +
+ " gl_Position = uMVPMatrix * aPosition;\n" +
+ " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
+ "}\n";
+
+ private static final String FRAGMENT_SHADER =
+ "#extension GL_OES_EGL_image_external : require\n" +
+ "precision mediump float;\n" + // highp here doesn't seem to matter
+ "varying vec2 vTextureCoord;\n" +
+ "uniform samplerExternalOES sTexture;\n" +
+ "void main() {\n" +
+ " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
+ "}\n";
+
+ private float[] mMVPMatrix = new float[16];
+ private float[] mSTMatrix = new float[16];
+
+ private int mProgram;
+ private int mTextureID = -12345;
+ private int muMVPMatrixHandle;
+ private int muSTMatrixHandle;
+ private int maPositionHandle;
+ private int maTextureHandle;
+
+ public CtsMediaTextureRender() {
+ mTriangleVertices = ByteBuffer.allocateDirect(
+ mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
+ .order(ByteOrder.nativeOrder()).asFloatBuffer();
+ mTriangleVertices.put(mTriangleVerticesData).position(0);
+
+ Matrix.setIdentityM(mSTMatrix, 0);
+ }
+
+ public int getTextureId() {
+ return mTextureID;
+ }
+
+ public void drawFrame(SurfaceTexture st) {
+ checkGlError("onDrawFrame start");
+ st.getTransformMatrix(mSTMatrix);
+
+ GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
+
+ GLES20.glUseProgram(mProgram);
+ checkGlError("glUseProgram");
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
+
+ mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
+ GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
+ TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+ checkGlError("glVertexAttribPointer maPosition");
+ GLES20.glEnableVertexAttribArray(maPositionHandle);
+ checkGlError("glEnableVertexAttribArray maPositionHandle");
+
+ mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
+ GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
+ TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+ checkGlError("glVertexAttribPointer maTextureHandle");
+ GLES20.glEnableVertexAttribArray(maTextureHandle);
+ checkGlError("glEnableVertexAttribArray maTextureHandle");
+
+ Matrix.setIdentityM(mMVPMatrix, 0);
+ GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
+ GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
+
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ checkGlError("glDrawArrays");
+ GLES20.glFinish();
+ }
+
+ /**
+ * Initializes GL state. Call this after the EGL surface has been created and made current.
+ */
+ public void surfaceCreated() {
+ mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
+ if (mProgram == 0) {
+ throw new RuntimeException("failed creating program");
+ }
+ maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
+ checkGlError("glGetAttribLocation aPosition");
+ if (maPositionHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for aPosition");
+ }
+ maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
+ checkGlError("glGetAttribLocation aTextureCoord");
+ if (maTextureHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for aTextureCoord");
+ }
+
+ muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
+ checkGlError("glGetUniformLocation uMVPMatrix");
+ if (muMVPMatrixHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for uMVPMatrix");
+ }
+
+ muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
+ checkGlError("glGetUniformLocation uSTMatrix");
+ if (muSTMatrixHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for uSTMatrix");
+ }
+
+
+ int[] textures = new int[1];
+ GLES20.glGenTextures(1, textures, 0);
+
+ mTextureID = textures[0];
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
+ checkGlError("glBindTexture mTextureID");
+
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
+ GLES20.GL_NEAREST);
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
+ GLES20.GL_LINEAR);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
+ GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
+ GLES20.GL_CLAMP_TO_EDGE);
+ checkGlError("glTexParameter");
+ }
+
+ /**
+ * Replaces the fragment shader.
+ */
+ public void changeFragmentShader(String fragmentShader) {
+ GLES20.glDeleteProgram(mProgram);
+ mProgram = createProgram(VERTEX_SHADER, fragmentShader);
+ if (mProgram == 0) {
+ throw new RuntimeException("failed creating program");
+ }
+ }
+
+ private int loadShader(int shaderType, String source) {
+ int shader = GLES20.glCreateShader(shaderType);
+ checkGlError("glCreateShader type=" + shaderType);
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ int[] compiled = new int[1];
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+ if (compiled[0] == 0) {
+ Log.e(TAG, "Could not compile shader " + shaderType + ":");
+ Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
+ GLES20.glDeleteShader(shader);
+ shader = 0;
+ }
+ return shader;
+ }
+
+ private int createProgram(String vertexSource, String fragmentSource) {
+ int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ if (vertexShader == 0) {
+ return 0;
+ }
+ int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ if (pixelShader == 0) {
+ return 0;
+ }
+
+ int program = GLES20.glCreateProgram();
+ checkGlError("glCreateProgram");
+ if (program == 0) {
+ Log.e(TAG, "Could not create program");
+ }
+ GLES20.glAttachShader(program, vertexShader);
+ checkGlError("glAttachShader");
+ GLES20.glAttachShader(program, pixelShader);
+ checkGlError("glAttachShader");
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[1];
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ Log.e(TAG, "Could not link program: ");
+ Log.e(TAG, GLES20.glGetProgramInfoLog(program));
+ GLES20.glDeleteProgram(program);
+ program = 0;
+ }
+ return program;
+ }
+
+ public void checkGlError(String op) {
+ int error;
+ while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+ Log.e(TAG, op + ": glError " + error);
+ throw new RuntimeException(op + ": glError " + error);
+ }
+ }
+
+ /**
+ * Saves the current frame to disk as a PNG image. Frame starts from (0,0).
+ * <p>
+ * Useful for debugging.
+ */
+ public static void saveFrame(String filename, int width, int height) {
+ // glReadPixels gives us a ByteBuffer filled with what is essentially big-endian RGBA
+ // data (i.e. a byte of red, followed by a byte of green...). We need an int[] filled
+ // with native-order ARGB data to feed to Bitmap.
+ //
+ // If we implement this as a series of buf.get() calls, we can spend 2.5 seconds just
+ // copying data around for a 720p frame. It's better to do a bulk get() and then
+ // rearrange the data in memory. (For comparison, the PNG compress takes about 500ms
+ // for a trivial frame.)
+ //
+ // So... we set the ByteBuffer to little-endian, which should turn the bulk IntBuffer
+ // get() into a straight memcpy on most Android devices. Our ints will hold ABGR data.
+ // Swapping B and R gives us ARGB. We need about 30ms for the bulk get(), and another
+ // 270ms for the color swap.
+ //
+ // Making this even more interesting is the upside-down nature of GL, which means we
+ // may want to flip the image vertically here.
+
+ ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
+ buf.order(ByteOrder.LITTLE_ENDIAN);
+ GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
+ buf.rewind();
+
+ int pixelCount = width * height;
+ int[] colors = new int[pixelCount];
+ buf.asIntBuffer().get(colors);
+ for (int i = 0; i < pixelCount; i++) {
+ int c = colors[i];
+ colors[i] = (c & 0xff00ff00) | ((c & 0x00ff0000) >> 16) | ((c & 0x000000ff) << 16);
+ }
+
+ FileOutputStream fos = null;
+ try {
+ fos = new FileOutputStream(filename);
+ Bitmap bmp = Bitmap.createBitmap(colors, width, height, Bitmap.Config.ARGB_8888);
+ bmp.compress(Bitmap.CompressFormat.PNG, 90, fos);
+ bmp.recycle();
+ } catch (IOException ioe) {
+ throw new RuntimeException("Failed to write file " + filename, ioe);
+ } finally {
+ try {
+ if (fos != null) fos.close();
+ } catch (IOException ioe2) {
+ throw new RuntimeException("Failed to close file " + filename, ioe2);
+ }
+ }
+ Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
+ }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/MotionIndicatorView.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/MotionIndicatorView.java
new file mode 100644
index 0000000..12d4582
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/MotionIndicatorView.java
@@ -0,0 +1,409 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.sensors;
+
+import android.content.Context;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.graphics.PorterDuff;
+import android.graphics.PorterDuffXfermode;
+import android.graphics.RectF;
+import android.hardware.SensorManager;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.View;
+
+/**
+ * A view class that draws the user prompt
+ *
+ * The following piece of code should show how to use this view.
+ *
+ * public void testUI() {
+ * final int MAX_TILT_ANGLE = 70; // +/- 70
+ *
+ * final int TILT_ANGLE_STEP = 5; // 5 degree(s) per step
+ * final int YAW_ANGLE_STEP = 10; // 10 degree(s) per step
+ *
+ * RangeCoveredRegister xCovered, yCovered, zCovered;
+ * xCovered = new RangeCoveredRegister(-MAX_TILT_ANGLE, +MAX_TILT_ANGLE, TILT_ANGLE_STEP);
+ *
+ * yCovered = new RangeCoveredRegister(-MAX_TILT_ANGLE, +MAX_TILT_ANGLE, TILT_ANGLE_STEP);
+ * zCovered = new RangeCoveredRegister(YAW_ANGLE_STEP);
+ *
+ * xCovered.update(40);
+ * xCovered.update(-40);
+ * xCovered.update(12);
+ *
+ * yCovered.update(50);
+ * yCovered.update(-51);
+ *
+ * zCovered.update(150);
+ * zCovered.update(42);
+ *
+ * setDataProvider(xCovered, yCovered, zCovered);
+ * enableAxis(RVCVRecordActivity.AXIS_ALL); //debug mode, show all three axis
+ * }
+ */
+public class MotionIndicatorView extends View {
+ private final String TAG = "MotionIndicatorView";
+ private final boolean LOCAL_LOGV = false;
+
+ private Paint mCursorPaint;
+ private Paint mLimitPaint;
+ private Paint mCoveredPaint;
+ private Paint mRangePaint;
+ private Paint mEraserPaint;
+
+ // UI settings
+ private final int XBAR_WIDTH = 50;
+ private final int XBAR_MARGIN = 50;
+ private final int XBAR_CURSOR_ADD = 20;
+
+ private final int YBAR_WIDTH = 50;
+ private final int YBAR_MARGIN = 50;
+ private final int YBAR_CURSOR_ADD = 20;
+
+ private final int ZRING_WIDTH = 50;
+ private final int ZRING_CURSOR_ADD = 30;
+
+
+ private int mXSize, mYSize;
+ private RectF mZBoundOut, mZBoundOut2, mZBoundIn, mZBoundIn2;
+
+ private RangeCoveredRegister mXCovered, mYCovered, mZCovered;
+
+ private boolean mXEnabled, mYEnabled, mZEnabled;
+
+ /**
+ * Constructor
+ * @param context
+ */
+ public MotionIndicatorView(Context context) {
+ super(context);
+ init();
+ }
+
+ /**
+ * Constructor
+ * @param context Application context
+ * @param attrs
+ */
+ public MotionIndicatorView(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ init();
+ }
+
+ /**
+ * Initialize the Paint objects
+ */
+ private void init() {
+
+ mCursorPaint = new Paint();
+ mCursorPaint.setColor(Color.BLUE);
+
+ mLimitPaint = new Paint();
+ mLimitPaint.setColor(Color.YELLOW);
+
+ mCoveredPaint = new Paint();
+ mCoveredPaint.setColor(Color.CYAN);
+
+ mRangePaint = new Paint();
+ mRangePaint.setColor(Color.DKGRAY);
+
+ mEraserPaint = new Paint();
+ mEraserPaint.setColor(Color.TRANSPARENT);
+ // ensure the erasing effect
+ mEraserPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC));
+ }
+
+ /**
+ * Connect the view to certain data provider objects
+ * @param x Data provider for x direction tilt angle
+ * @param y Data provider for y direction tilt angle
+ * @param z Data provider for z rotation
+ */
+ public void setDataProvider(RangeCoveredRegister x,
+ RangeCoveredRegister y,
+ RangeCoveredRegister z) {
+ mXCovered = x;
+ mYCovered = y;
+ mZCovered = z;
+ }
+
+ /**
+ * Set the active axis for display
+ *
+ * @param axis AXIS_X, AXIS_Y, AXIS_Z for x, y, z axis indicators, or AXIS_ALL for all three.
+ */
+ public void enableAxis(int axis) {
+ mXEnabled = mYEnabled = mZEnabled = false;
+
+ switch(axis)
+ {
+ case SensorManager.AXIS_X:
+ mXEnabled = true;
+ break;
+ case SensorManager.AXIS_Y:
+ mYEnabled = true;
+ break;
+ case SensorManager.AXIS_Z:
+ mZEnabled = true;
+ break;
+ case RVCVRecordActivity.AXIS_ALL:
+ mXEnabled = mYEnabled = mZEnabled = true;
+ }
+ }
+
+ /**
+ * Doing some pre-calculation that only changes when view dimensions are changed.
+ * @param w
+ * @param h
+ * @param oldw
+ * @param oldh
+ */
+ @Override
+ protected void onSizeChanged (int w, int h, int oldw, int oldh) {
+ mXSize = w;
+ mYSize = h;
+
+ mZBoundOut = new RectF(w/2-w/2.5f, h/2-w/2.5f, w/2+w/2.5f, h/2+w/2.5f);
+ mZBoundOut2 = new RectF(
+ w/2-w/2.5f-ZRING_CURSOR_ADD, h/2-w/2.5f-ZRING_CURSOR_ADD,
+ w/2+w/2.5f+ZRING_CURSOR_ADD, h/2+w/2.5f+ZRING_CURSOR_ADD);
+ mZBoundIn = new RectF(
+ w/2-w/2.5f+ZRING_WIDTH, h/2-w/2.5f+ZRING_WIDTH,
+ w/2+w/2.5f-ZRING_WIDTH, h/2+w/2.5f-ZRING_WIDTH);
+ mZBoundIn2 = new RectF(
+ w/2-w/2.5f+ZRING_WIDTH+ZRING_CURSOR_ADD, h/2-w/2.5f+ZRING_WIDTH+ZRING_CURSOR_ADD,
+ w/2+w/2.5f-ZRING_WIDTH-ZRING_CURSOR_ADD, h/2+w/2.5f-ZRING_WIDTH-ZRING_CURSOR_ADD);
+
+ if (LOCAL_LOGV) Log.v(TAG, "New view size = ("+w+", "+h+")");
+ }
+
+ /**
+ * Draw UI depends on the selected axis and registered value
+ *
+ * @param canvas the canvas to draw on
+ */
+ @Override
+ protected void onDraw(Canvas canvas) {
+ super.onDraw(canvas);
+ int i,t;
+
+ Paint p = new Paint();
+ p.setColor(Color.YELLOW);
+ canvas.drawRect(10,10, 50, 50, p);
+
+ if (mXEnabled && mXCovered != null) {
+ int xNStep = mXCovered.getNSteps() + 4; // two on each side as a buffer
+ int xStepSize = mXSize * 3/4 / xNStep;
+ int xLeft = mXSize * 1/8 + (mXSize * 3/4 % xNStep)/2;
+
+ // base bar
+ canvas.drawRect(xLeft, XBAR_MARGIN,
+ xLeft+xStepSize*xNStep-1, XBAR_WIDTH+XBAR_MARGIN, mRangePaint);
+
+ // covered range
+ for (i=0; i<mXCovered.getNSteps(); ++i) {
+ if (mXCovered.isCovered(i)) {
+ canvas.drawRect(
+ xLeft+xStepSize*(i+2), XBAR_MARGIN,
+ xLeft+xStepSize*(i+3)-1, XBAR_WIDTH + XBAR_MARGIN,
+ mCoveredPaint);
+ }
+ }
+
+ // limit
+ canvas.drawRect(xLeft+xStepSize*2-4, XBAR_MARGIN,
+ xLeft+xStepSize*2+3, XBAR_WIDTH+XBAR_MARGIN, mLimitPaint);
+ canvas.drawRect(xLeft+xStepSize*(xNStep-2)-4, XBAR_MARGIN,
+ xLeft+xStepSize*(xNStep-2)+3, XBAR_WIDTH+XBAR_MARGIN, mLimitPaint);
+
+ // cursor
+ t = (int)(xLeft+xStepSize*(mXCovered.getLastValue()+2));
+ canvas.drawRect(t-4, XBAR_MARGIN-XBAR_CURSOR_ADD, t+3,
+ XBAR_WIDTH+XBAR_MARGIN+XBAR_CURSOR_ADD, mCursorPaint);
+ }
+ if (mYEnabled && mYCovered != null) {
+ int yNStep = mYCovered.getNSteps() + 4; // two on each side as a buffer
+ int yStepSize = mYSize * 3/4 / yNStep;
+ int yLeft = mYSize * 1/8 + (mYSize * 3/4 % yNStep)/2;
+
+ // base bar
+ canvas.drawRect(YBAR_MARGIN, yLeft,
+ YBAR_WIDTH+YBAR_MARGIN, yLeft+yStepSize*yNStep-1, mRangePaint);
+
+ // covered range
+ for (i=0; i<mYCovered.getNSteps(); ++i) {
+ if (mYCovered.isCovered(i)) {
+ canvas.drawRect(
+ YBAR_MARGIN, yLeft+yStepSize*(i+2),
+ YBAR_WIDTH + YBAR_MARGIN, yLeft+yStepSize*(i+3)-1,
+ mCoveredPaint);
+ }
+ }
+
+ // limit
+ canvas.drawRect(YBAR_MARGIN, yLeft + yStepSize * 2 - 4,
+ YBAR_WIDTH + YBAR_MARGIN, yLeft + yStepSize * 2 + 3, mLimitPaint);
+ canvas.drawRect(YBAR_MARGIN, yLeft + yStepSize * (yNStep - 2) - 4,
+ YBAR_WIDTH + YBAR_MARGIN, yLeft + yStepSize * (yNStep - 2) + 3, mLimitPaint);
+
+ // cursor
+ t = (int)(yLeft+yStepSize*(mYCovered.getLastValue()+2));
+ canvas.drawRect( YBAR_MARGIN-YBAR_CURSOR_ADD, t-4,
+ YBAR_WIDTH+YBAR_MARGIN+YBAR_CURSOR_ADD, t+3, mCursorPaint);
+ }
+
+ if (mZEnabled && mZCovered != null) {
+ float stepSize = 360.0f/mZCovered.getNSteps();
+
+ // base bar
+ canvas.drawArc(mZBoundOut,0, 360, true, mRangePaint);
+
+ // covered range
+ for (i=0; i<mZCovered.getNSteps(); ++i) {
+ if (mZCovered.isCovered(i)) {
+ canvas.drawArc(mZBoundOut,i*stepSize-0.2f, stepSize+0.4f,
+ true, mCoveredPaint);
+ }
+ }
+ // clear center
+ canvas.drawArc(mZBoundIn, 0, 360, true, mEraserPaint);
+ // cursor
+ canvas.drawArc(mZBoundOut2, mZCovered.getLastValue()*stepSize- 1, 2,
+ true, mCursorPaint);
+ canvas.drawArc(mZBoundIn2, mZCovered.getLastValue()*stepSize-1.5f, 3,
+ true, mEraserPaint);
+ }
+ }
+}
+
+/**
+ * A range register class for the RVCVRecord Activity
+ */
+class RangeCoveredRegister {
+ enum MODE {
+ LINEAR,
+ ROTATE2D
+ }
+
+ private boolean[] mCovered;
+ private MODE mMode;
+ private int mStep;
+ private int mLow, mHigh;
+ private int mLastData;
+
+ // high is not inclusive
+ RangeCoveredRegister(int low, int high, int step) {
+ mMode = MODE.LINEAR;
+ mStep = step;
+ mLow = low;
+ mHigh = high;
+ init();
+ }
+
+ RangeCoveredRegister(int step) {
+ mMode = MODE.ROTATE2D;
+ mStep = step;
+ mLow = 0;
+ mHigh = 360;
+ init();
+ }
+
+ private void init() {
+ if (mMode == MODE.LINEAR) {
+ mCovered = new boolean[(mHigh-mLow)/mStep];
+ }else {
+ mCovered = new boolean[360/mStep];
+ }
+ }
+
+ /**
+ * Test if the range defined is fully covered.
+ *
+ * @return if the range is fully covered, return true; otherwise false.
+ */
+ public boolean isFullyCovered() {
+ for (boolean i:mCovered) {
+ if (!i) return false;
+ }
+ return true;
+ }
+
+ /**
+ * Test if a specific step is covered.
+ *
+ * @param i the step number
+ * @return if the step specified is covered, return true; otherwise false.
+ */
+ public boolean isCovered(int i) {
+ return mCovered[i];
+ }
+
+ /**
+ *
+ *
+ * @param data
+ * @return if this update changes the status of
+ */
+ public boolean update(int data) {
+ mLastData = data;
+
+ if (mMode == MODE.ROTATE2D) {
+ data %= 360;
+ }
+
+ int iStep = (data - mLow)/mStep;
+
+ if (iStep>=0 && iStep<getNSteps()) {
+ // only record valid data
+ mLastData = data;
+
+ if (mCovered[iStep]) {
+ return false;
+ } else {
+ mCovered[iStep] = true;
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Get the number of steps in this register
+ *
+ * @return The number of steps in this register
+ */
+ public int getNSteps() {
+ //if (mCovered == null) {
+ //return 0;
+ //}
+ return mCovered.length;
+ }
+
+ /**
+ * Get the last value updated
+ *
+ * @return The last value updated
+ */
+ public float getLastValue() {
+ // ensure float division
+ return ((float)(mLastData - mLow))/mStep;
+ }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVCameraPreview.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVCameraPreview.java
new file mode 100644
index 0000000..a5b58f6
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVCameraPreview.java
@@ -0,0 +1,120 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.cts.verifier.sensors;
+
+// ----------------------------------------------------------------------
+
+import android.content.Context;
+import android.hardware.Camera;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+import java.io.IOException;
+import java.util.List;
+
+/** Camera preview class */
+public class RVCVCameraPreview extends SurfaceView implements SurfaceHolder.Callback {
+ private static final String TAG = "RVCVCameraPreview";
+ private static final boolean LOCAL_LOGD = true;
+
+ private SurfaceHolder mHolder;
+ private Camera mCamera;
+
+ /**
+ * Constructor
+ * @param context Activity context
+ * @param camera Camera object to be previewed
+ */
+ public RVCVCameraPreview(Context context, Camera camera) {
+ super(context);
+ mCamera = camera;
+ initSurface();
+ }
+
+ /**
+ * Constructor
+ * @param context Activity context
+ * @param attrs
+ */
+ public RVCVCameraPreview(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ }
+
+ public void init(Camera camera) {
+ this.mCamera = camera;
+ initSurface();
+ }
+
+ private void initSurface() {
+ // Install a SurfaceHolder.Callback so we get notified when the
+ // underlying surface is created and destroyed.
+ mHolder = getHolder();
+ mHolder.addCallback(this);
+
+ // deprecated
+ // TODO: update this code to match new API level.
+ mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
+ }
+
+ /**
+ * SurfaceHolder.Callback
+ * Surface is created, it is OK to start the camera preview now.
+ */
+ public void surfaceCreated(SurfaceHolder holder) {
+ // The Surface has been created, now tell the camera where to draw the preview.
+
+ if (mCamera == null) {
+ // preview camera does not exist
+ return;
+ }
+
+ try {
+ mCamera.setPreviewDisplay(holder);
+ mCamera.startPreview();
+ } catch (IOException e) {
+ if (LOCAL_LOGD) Log.d(TAG, "Error when starting camera preview: " + e.getMessage());
+ }
+ }
+ /**
+ * SurfaceHolder.Callback
+ */
+ public void surfaceDestroyed(SurfaceHolder holder) {
+ // empty. Take care of releasing the Camera preview in your activity.
+ }
+
+ /**
+ * SurfaceHolder.Callback
+ * Restart camera preview if surface changed
+ */
+ public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
+
+ if (mHolder.getSurface() == null || mCamera == null){
+ // preview surface or camera does not exist
+ return;
+ }
+
+ // stop preview before making changes
+ mCamera.stopPreview();
+
+ // the activity using this view is locked to this orientation, so hard code is fine
+ mCamera.setDisplayOrientation(90);
+
+ //do the same as if it is created again
+ surfaceCreated(holder);
+ }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVRecordActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVRecordActivity.java
new file mode 100644
index 0000000..9011619
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVRecordActivity.java
@@ -0,0 +1,903 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.sensors;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.Intent;
+import android.hardware.Camera;
+import android.hardware.Sensor;
+import android.hardware.SensorEvent;
+import android.hardware.SensorEventListener;
+import android.hardware.SensorManager;
+import android.media.AudioManager;
+import android.media.CamcorderProfile;
+import android.media.MediaRecorder;
+import android.media.SoundPool;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.Environment;
+import android.util.JsonWriter;
+import android.util.Log;
+import android.view.Window;
+import android.widget.ImageView;
+import android.widget.Toast;
+
+import com.android.cts.verifier.R;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+
+// ----------------------------------------------------------------------
+
+/**
+ * An activity that does recording of the camera video and rotation vector data at the same time.
+ */
+public class RVCVRecordActivity extends Activity {
+ private static final String TAG = "RVCVRecordActivity";
+ //private static final boolean LOCAL_LOGD = true;
+ private static final boolean LOCAL_LOGV = false;
+
+ private MotionIndicatorView mIndicatorView;
+
+ private SoundPool mSoundPool;
+ private int [] mSoundPoolLookup;
+
+ private File mRecordDir;
+ private RecordProcedureController mController;
+ private VideoRecorder mVideoRecorder;
+ private RVSensorLogger mRVSensorLogger;
+ private CoverageManager mCoverManager;
+ private CameraPreviewer mPreviewer;
+
+ public static final int AXIS_NONE = 0;
+ public static final int AXIS_ALL = SensorManager.AXIS_X +
+ SensorManager.AXIS_Y +
+ SensorManager.AXIS_Z;
+
+ // For Rotation Vector algorithm research use
+ private final static boolean LOG_RAW_SENSORS = false;
+ private RawSensorLogger mRawSensorLogger;
+
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ // Hide the window title.
+ requestWindowFeature(Window.FEATURE_NO_TITLE);
+
+ // inflate xml
+ setContentView(R.layout.cam_preview_overlay);
+
+ // locate views
+ mIndicatorView = (MotionIndicatorView) findViewById(R.id.cam_indicator);
+
+ initStoragePath();
+ }
+
+ @Override
+ protected void onPause() {
+ super.onPause();
+ mController.quit();
+
+ mPreviewer.end();
+ endSoundPool();
+ }
+
+ @Override
+ protected void onResume() {
+ super.onResume();
+ // delay the initialization as much as possible
+ init();
+ }
+
+ /** display toast message
+ *
+ * @param msg Message content
+ */
+ private void message(String msg) {
+
+ Context context = getApplicationContext();
+ int duration = Toast.LENGTH_SHORT;
+
+ Toast toast = Toast.makeText(context, msg, duration);
+ toast.show();
+ }
+
+ /**
+ * Initialize components
+ *
+ */
+ private void init() {
+ mPreviewer = new CameraPreviewer();
+ mPreviewer.init();
+
+ mCoverManager = new CoverageManager();
+ mIndicatorView.setDataProvider(
+ mCoverManager.getAxis(SensorManager.AXIS_X),
+ mCoverManager.getAxis(SensorManager.AXIS_Y),
+ mCoverManager.getAxis(SensorManager.AXIS_Z) );
+
+ initSoundPool();
+ mRVSensorLogger = new RVSensorLogger(this);
+
+ mVideoRecorder = new VideoRecorder(mPreviewer.getCamera());
+
+ if (LOG_RAW_SENSORS) {
+ mRawSensorLogger = new RawSensorLogger(mRecordDir);
+ }
+
+ mController = new RecordProcedureController(this);
+ }
+
+ /**
+ * Notify recording is completed. This is the successful exit.
+ */
+ public void notifyComplete() {
+ message("Capture completed!");
+
+ Uri resultUri = Uri.fromFile(mRecordDir);
+ Intent result = new Intent();
+ result.setData(resultUri);
+ setResult(Activity.RESULT_OK, result);
+
+ finish();
+ }
+
+ /**
+ * Notify the user what to do next in text
+ *
+ * @param axis SensorManager.AXIS_X or SensorManager.AXIS_Y or SensorManager.AXIS_Z
+ */
+ private void notifyPrompt(int axis) {
+ // It is not XYZ because of earlier design have different definition of
+ // X and Y
+ final String axisName = "YXZ";
+
+ message("Manipulate the device in " + axisName.charAt(axis-1) + " axis (as illustrated) about the pattern.");
+ }
+
+ /**
+ * Ask indicator view to redraw
+ */
+ private void redrawIndicator() {
+ mIndicatorView.invalidate();
+ }
+
+ /**
+ * Switch to a different axis for display and logging
+ * @param axis
+ */
+ private void switchAxis(int axis) {
+ ImageView imageView = (ImageView) findViewById(R.id.cam_overlay);
+
+ final int [] prompts = {R.drawable.prompt_x, R.drawable.prompt_y, R.drawable.prompt_z};
+
+ if (axis >=SensorManager.AXIS_X && axis <=SensorManager.AXIS_Z) {
+ imageView.setImageResource(prompts[axis-1]);
+ mIndicatorView.enableAxis(axis);
+ mRVSensorLogger.updateRegister(mCoverManager.getAxis(axis), axis);
+ notifyPrompt(axis);
+ } else {
+ imageView.setImageDrawable(null);
+ mIndicatorView.enableAxis(AXIS_NONE);
+ }
+ redrawIndicator();
+ }
+
+ /**
+ * Asynchronized way to call switchAxis. Use this if caller is not on UI thread.
+ * @param axis @param axis SensorManager.AXIS_X or SensorManager.AXIS_Y or SensorManager.AXIS_Z
+ */
+ public void switchAxisAsync(int axis) {
+ // intended to be called from a non-UI thread
+ final int fAxis = axis;
+ runOnUiThread(new Runnable() {
+ public void run() {
+ // UI code goes here
+ switchAxis(fAxis);
+ }
+ });
+ }
+
+ /**
+ * Initialize sound pool for user notification
+ */
+ private void initSoundPool() {
+ final int MAX_STREAM = 10;
+ int i=0;
+ mSoundPool = new SoundPool(MAX_STREAM, AudioManager.STREAM_MUSIC, 0);
+ mSoundPoolLookup = new int[MAX_STREAM];
+
+ // TODO: add different sound into this
+ mSoundPoolLookup[i++] = mSoundPool.load(this, R.raw.next_axis, 1);
+ mSoundPoolLookup[i++] = mSoundPool.load(this, R.raw.next_axis, 1);
+ mSoundPoolLookup[i++] = mSoundPool.load(this, R.raw.next_axis, 1);
+
+ }
+ private void endSoundPool() {
+ mSoundPool.release();
+ }
+
+ /**
+ * Play notify sound to user
+ * @param id ID of the sound to be played
+ */
+ public void playNotifySound(int id) {
+ mSoundPool.play(mSoundPoolLookup[id], 1, 1, 0, 0, 1);
+ }
+
+ /**
+ * Start the sensor recording
+ */
+ public void startRecordSensor() {
+ mRVSensorLogger.init();
+ if (LOG_RAW_SENSORS) {
+ mRawSensorLogger.init();
+ }
+ }
+
+ /**
+ * Stop the sensor recording
+ */
+ public void stopRecordSensor() {
+ mRVSensorLogger.end();
+ if (LOG_RAW_SENSORS) {
+ mRawSensorLogger.end();
+ }
+ }
+
+ /**
+ * Start video recording
+ */
+ public void startRecordVideo() {
+ mVideoRecorder.init();
+ }
+
+ /**
+ * Stop video recording
+ */
+ public void stopRecordVideo() {
+ mVideoRecorder.end();
+ }
+
+ /**
+ * Wait until a sensor recording for a certain axis is fully covered
+ * @param axis
+ */
+ public void waitUntilCovered(int axis) {
+ mCoverManager.waitUntilCovered(axis);
+ }
+
+
+ /**
+ *
+ */
+ private void initStoragePath() {
+ File rxcvRecDataDir = new File(Environment.getExternalStorageDirectory(),"RVCVRecData");
+
+ // Create the storage directory if it does not exist
+ if (! rxcvRecDataDir.exists()) {
+ if (! rxcvRecDataDir.mkdirs()) {
+ Log.e(TAG, "failed to create main data directory");
+ }
+ }
+
+ mRecordDir = new File(rxcvRecDataDir, new SimpleDateFormat("yyMMdd-hhmmss").format(new Date()));
+
+ if (! mRecordDir.mkdirs()) {
+ Log.e(TAG, "failed to create rec data directory");
+ }
+ }
+
+ /**
+ * Get the sensor log file path
+ * @return Path of the sensor log file
+ */
+ public String getSensorLogFilePath() {
+ return new File(mRecordDir, "sensor.log").getPath();
+ }
+
+ /**
+ * Get the video recording file path
+ * @return Path of the video recording file
+ */
+ public String getVideoRecFilePath() {
+ return new File(mRecordDir, "video.mp4").getPath();
+ }
+
+ /**
+ * Write out important camera/video information to a JSON file
+ * @param width width of frame
+ * @param height height of frame
+ * @param frameRate frame rate in fps
+ * @param fovW field of view in width direction
+ * @param fovH field of view in height direction
+ */
+ public void writeVideoMetaInfo(int width, int height, float frameRate, float fovW, float fovH) {
+ try {
+ JsonWriter writer =
+ new JsonWriter(
+ new OutputStreamWriter(
+ new FileOutputStream(
+ new File(mRecordDir, "videometa.json").getPath()
+ )
+ )
+ );
+ writer.beginObject();
+ writer.name("fovW").value(fovW);
+ writer.name("fovH").value(fovH);
+ writer.name("width").value(width);
+ writer.name("height").value(height);
+ writer.name("frameRate").value(frameRate);
+ writer.endObject();
+
+ writer.close();
+ }catch (FileNotFoundException e) {
+ // Not very likely to happen
+ e.printStackTrace();
+ }catch (IOException e) {
+ // do nothing
+ e.printStackTrace();
+ Log.e(TAG, "Writing video meta data failed.");
+ }
+ }
+
+ /**
+ * Camera preview control class
+ */
+ class CameraPreviewer {
+ private Camera mCamera;
+
+ CameraPreviewer() {
+ try {
+ mCamera = Camera.open(); // attempt to get a default Camera instance
+ }
+ catch (Exception e) {
+ // Camera is not available (in use or does not exist)
+ Log.e(TAG, "Cannot obtain Camera!");
+ }
+ }
+
+ /**
+ * Get the camera to be previewed
+ * @return Reference to Camera used
+ */
+ public Camera getCamera() {
+ return mCamera;
+ }
+
+ /**
+ * Setup the camera
+ */
+ public void init() {
+ if (mCamera != null) {
+ double alpha = mCamera.getParameters().getHorizontalViewAngle()*Math.PI/180.0;
+ int width = 1920;
+ double fx = width/2/Math.tan(alpha/2.0);
+
+ if (LOCAL_LOGV) Log.v(TAG, "View angle="
+ + mCamera.getParameters().getHorizontalViewAngle() +" Estimated fx = "+fx);
+
+ RVCVCameraPreview cameraPreview =
+ (RVCVCameraPreview) findViewById(R.id.cam_preview);
+ cameraPreview.init(mCamera);
+ } else {
+ message("Cannot open camera!");
+ finish();
+ }
+ }
+
+ /**
+ * End the camera preview
+ */
+ public void end() {
+ if (mCamera != null) {
+ mCamera.release(); // release the camera for other applications
+ mCamera = null;
+ }
+ }
+ }
+
+ /**
+ * Manage a set of RangeCoveredRegister objects
+ */
+ class CoverageManager {
+ // settings
+ private final int MAX_TILT_ANGLE = 60; // +/- 60
+ //private final int REQUIRED_TILT_ANGLE = 50; // +/- 50
+ private final int TILT_ANGLE_STEP = 5; // 5 degree(s) per step
+ private final int YAW_ANGLE_STEP = 10; // 10 degree(s) per step
+
+ RangeCoveredRegister[] mAxisCovered;
+
+ CoverageManager() {
+ mAxisCovered = new RangeCoveredRegister[3];
+ // X AXIS
+ mAxisCovered[0] = new RangeCoveredRegister(-MAX_TILT_ANGLE, +MAX_TILT_ANGLE, TILT_ANGLE_STEP);
+ // Y AXIS
+ mAxisCovered[1] = new RangeCoveredRegister(-MAX_TILT_ANGLE, +MAX_TILT_ANGLE, TILT_ANGLE_STEP);
+ // Z AXIS
+ mAxisCovered[2] = new RangeCoveredRegister(YAW_ANGLE_STEP);
+ }
+
+ public RangeCoveredRegister getAxis(int axis) {
+ // SensorManager.AXIS_X = 1, need offset -1 for mAxisCovered array
+ return mAxisCovered[axis-1];
+ }
+
+ public void waitUntilCovered(int axis) {
+ // SensorManager.AXIS_X = 1, need offset -1 for mAxisCovered array
+ while(!mAxisCovered[axis-1].isFullyCovered()) {
+ try {
+ Thread.sleep(500);
+ } catch (InterruptedException e) {
+ if (LOCAL_LOGV) {
+ Log.v(TAG, "waitUntilCovered axis = "+ axis + " is interrupted");
+ }
+ }
+ }
+ }
+ }
+ ////////////////////////////////////////////////////////////////////////////////////////////////
+
+ /**
+ * A class controls the video recording
+ */
+ class VideoRecorder
+ {
+ private MediaRecorder mRecorder;
+ private Camera mCamera;
+ private boolean mRunning = false;
+
+ private int [] mPreferredProfiles = { CamcorderProfile.QUALITY_480P, // smaller -> faster
+ CamcorderProfile.QUALITY_720P,
+ CamcorderProfile.QUALITY_1080P,
+ CamcorderProfile.QUALITY_HIGH // existence guaranteed
+ };
+
+
+ VideoRecorder(Camera camera) {
+ mCamera = camera;
+ }
+
+ /**
+ * Initialize and start recording
+ */
+ public void init() {
+ float fovW = mCamera.getParameters().getHorizontalViewAngle();
+ float fovH = mCamera.getParameters().getVerticalViewAngle();
+
+ mRecorder = new MediaRecorder();
+
+ mCamera.unlock();
+
+ mRecorder.setCamera(mCamera);
+
+ mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
+ mRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
+
+ CamcorderProfile profile = null;
+ for (int i: mPreferredProfiles) {
+ if (CamcorderProfile.hasProfile(i)) {
+ profile = CamcorderProfile.get(i);
+ mRecorder.setProfile(profile);
+ break;
+ }
+ }
+
+ writeVideoMetaInfo(profile.videoFrameWidth, profile.videoFrameHeight,
+ profile.videoFrameRate, fovW, fovH);
+
+ try {
+ mRecorder.setOutputFile(getVideoRecFilePath());
+ mRecorder.prepare();
+ } catch (IOException e) {
+ Log.e(TAG, "Preparation for recording failed.");
+ }
+
+ try {
+ mRecorder.start();
+ } catch (RuntimeException e) {
+ Log.e(TAG, "Starting recording failed.");
+ mRecorder.reset();
+ mRecorder.release();
+ mCamera.lock();
+ }
+ mRunning = true;
+ }
+
+ /**
+ * Stop recording
+ */
+ public void end() {
+ if (mRunning) {
+ try {
+ mRecorder.stop();
+ mRecorder.reset();
+ mRecorder.release();
+ mCamera.lock();
+ } catch (RuntimeException e) {
+ e.printStackTrace();
+ Log.e(TAG, "Runtime error in stopping recording.");
+ }
+ }
+ mRecorder = null;
+ }
+
+ }
+
+ ////////////////////////////////////////////////////////////////////////////////////////////////
+
+ /**
+ * Log all raw sensor readings, for Rotation Vector sensor algorithms research
+ */
+ class RawSensorLogger implements SensorEventListener {
+ private final String TAG = "RawSensorLogger";
+
+ private final static int SENSOR_RATE = SensorManager.SENSOR_DELAY_FASTEST;
+ private File mRecPath;
+
+ SensorManager mSensorManager;
+ Sensor mAccSensor, mGyroSensor, mMagSensor;
+ OutputStreamWriter mAccLogWriter, mGyroLogWriter, mMagLogWriter;
+
+ private float[] mRTemp = new float[16];
+
+ RawSensorLogger(File recPath) {
+ mRecPath = recPath;
+ }
+
+ /**
+ * Initialize and start recording
+ */
+ public void init() {
+ mSensorManager = (SensorManager)getSystemService(SENSOR_SERVICE);
+
+ mAccSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
+ mGyroSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE_UNCALIBRATED);
+ mMagSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD_UNCALIBRATED);
+
+ mSensorManager.registerListener(this, mAccSensor, SENSOR_RATE);
+ mSensorManager.registerListener(this, mGyroSensor, SENSOR_RATE);
+ mSensorManager.registerListener(this, mMagSensor, SENSOR_RATE);
+
+ try {
+ mAccLogWriter= new OutputStreamWriter(
+ new FileOutputStream(new File(mRecPath, "raw_acc.log")));
+ mGyroLogWriter= new OutputStreamWriter(
+ new FileOutputStream(new File(mRecPath, "raw_uncal_gyro.log")));
+ mMagLogWriter= new OutputStreamWriter(
+ new FileOutputStream(new File(mRecPath, "raw_uncal_mag.log")));
+
+ } catch (FileNotFoundException e) {
+ Log.e(TAG, "Sensor log file open failed: " + e.toString());
+ }
+ }
+
+ /**
+ * Stop recording and clean up
+ */
+ public void end() {
+ mSensorManager.flush(this);
+ mSensorManager.unregisterListener(this);
+
+ try {
+ if (mAccLogWriter != null) {
+ OutputStreamWriter writer = mAccLogWriter;
+ mAccLogWriter = null;
+ writer.close();
+ }
+ if (mGyroLogWriter != null) {
+ OutputStreamWriter writer = mGyroLogWriter;
+ mGyroLogWriter = null;
+ writer.close();
+ }
+ if (mMagLogWriter != null) {
+ OutputStreamWriter writer = mMagLogWriter;
+ mMagLogWriter = null;
+ writer.close();
+ }
+
+ } catch (IOException e) {
+ Log.e(TAG, "Sensor log file close failed: " + e.toString());
+ }
+ }
+
+ @Override
+ public void onAccuracyChanged(Sensor sensor, int i) {
+ // do not care
+ }
+
+ @Override
+ public void onSensorChanged(SensorEvent event) {
+ OutputStreamWriter writer=null;
+ switch(event.sensor.getType()) {
+ case Sensor.TYPE_ACCELEROMETER:
+ writer = mAccLogWriter;
+ break;
+ case Sensor.TYPE_GYROSCOPE_UNCALIBRATED:
+ writer = mGyroLogWriter;
+ break;
+ case Sensor.TYPE_MAGNETIC_FIELD_UNCALIBRATED:
+ writer = mMagLogWriter;
+ break;
+
+ }
+ if (writer!=null) {
+ float[] data = event.values;
+ try {
+ if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
+ writer.write(String.format("%d %f %f %f\r\n",
+ event.timestamp, data[0], data[1], data[2]));
+ }else // TYPE_GYROSCOPE_UNCALIBRATED and TYPE_MAGNETIC_FIELD_UNCALIBRATED
+ {
+ writer.write(String.format("%d %f %f %f %f %f %f\r\n", event.timestamp,
+ data[0], data[1], data[2], data[3], data[4], data[5]));
+ }
+ }catch (IOException e)
+ {
+ Log.e(TAG, "Write to raw sensor log file failed.");
+ }
+
+ }
+ }
+ }
+
+ /**
+ * Rotation sensor logger class
+ */
+ class RVSensorLogger implements SensorEventListener {
+ private final String TAG = "RVSensorLogger";
+
+ private final static int SENSOR_RATE = 100;
+ RangeCoveredRegister mRegister;
+ int mAxis;
+ RVCVRecordActivity mActivity;
+
+ SensorManager mSensorManager;
+ Sensor mRVSensor;
+ OutputStreamWriter mLogWriter;
+
+ private float[] mRTemp = new float[16];
+
+ RVSensorLogger(RVCVRecordActivity activity) {
+ mActivity = activity;
+ }
+
+ /**
+ * Initialize and start recording
+ */
+ public void init() {
+ mSensorManager = (SensorManager)getSystemService(SENSOR_SERVICE);
+ mRVSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ROTATION_VECTOR);
+ mSensorManager.registerListener(this, mRVSensor, SENSOR_RATE);
+
+ try {
+ mLogWriter= new OutputStreamWriter(
+ new FileOutputStream(mActivity.getSensorLogFilePath()));
+ } catch (FileNotFoundException e) {
+ Log.e(TAG, "Sensor log file open failed: " + e.toString());
+ }
+ }
+
+ /**
+ * Stop recording and clean up
+ */
+ public void end() {
+ mSensorManager.flush(this);
+ mSensorManager.unregisterListener(this);
+
+ try {
+ if (mLogWriter != null) {
+ OutputStreamWriter writer = mLogWriter;
+ mLogWriter = null;
+ writer.close();
+ }
+ } catch (IOException e) {
+ Log.e(TAG, "Sensor log file close failed: " + e.toString());
+ }
+
+ updateRegister(null, AXIS_NONE);
+ }
+
+ private void onNewData(float[] data, long timestamp) {
+ // LOG
+ try {
+ if (mLogWriter != null) {
+ mLogWriter.write(String.format("%d %f %f %f %f\r\n", timestamp,
+ data[3], data[0], data[1], data[2]));
+ }
+ } catch (IOException e) {
+ Log.e(TAG, "Sensor log file write failed: " + e.toString());
+ }
+
+ // Update UI
+ if (mRegister != null) {
+ int d = 0;
+ int dx, dy, dz;
+ boolean valid = false;
+ SensorManager.getRotationMatrixFromVector(mRTemp, data);
+
+ dx = (int)(Math.asin(mRTemp[8])*(180.0/Math.PI));
+ dy = (int)(Math.asin(mRTemp[9])*(180.0/Math.PI));
+ dz = (int)((Math.atan2(mRTemp[4], mRTemp[0])+Math.PI)*(180.0/Math.PI));
+
+ switch(mAxis) {
+ case SensorManager.AXIS_X:
+ d = dx;
+ valid = (Math.abs(dy) < 30);
+ break;
+ case SensorManager.AXIS_Y:
+ d = dy;
+ valid = (Math.abs(dx) < 30);
+ break;
+ case SensorManager.AXIS_Z:
+ d = dz;
+ valid = (Math.abs(dx) < 20 && Math.abs(dy) < 20);
+ break;
+ }
+
+ if (valid) {
+ mRegister.update(d);
+ mActivity.redrawIndicator();
+ }
+ }
+
+ }
+
+ public void updateRegister(RangeCoveredRegister reg, int axis) {
+ mRegister = reg;
+ mAxis = axis;
+ }
+
+
+ @Override
+ public void onAccuracyChanged(Sensor sensor, int i) {
+ // do not care
+ }
+
+ @Override
+ public void onSensorChanged(SensorEvent event) {
+ if (event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR) {
+ onNewData(event.values, event.timestamp);
+ }
+ }
+ }
+
+
+ ////////////////////////////////////////////////////////////////////////////////////////////////
+
+ /**
+ * Controls the over all logic of record procedure: first x-direction, then y-direction and
+ * then z-direction.
+ */
+ class RecordProcedureController implements Runnable {
+ private static final boolean LOCAL_LOGV = false;
+
+ private final RVCVRecordActivity mActivity;
+ private Thread mThread = null;
+
+ RecordProcedureController(RVCVRecordActivity activity) {
+ mActivity = activity;
+ mThread = new Thread(this);
+ mThread.start();
+ }
+
+ /**
+ * Run the record procedure
+ */
+ public void run() {
+ if (LOCAL_LOGV) Log.v(TAG, "Controller Thread Started.");
+ //start recording & logging
+ delay(2000);
+
+ init();
+ if (LOCAL_LOGV) Log.v(TAG, "Controller Thread init() finished.");
+
+ // test 3 axis
+ // It is in YXZ order because UI element design use opposite definition
+ // of XY axis. To ensure the user see X Y Z, it is flipped here.
+ recordAxis(SensorManager.AXIS_Y);
+ if (LOCAL_LOGV) Log.v(TAG, "Controller Thread axis 0 finished.");
+
+ recordAxis(SensorManager.AXIS_X);
+ if (LOCAL_LOGV) Log.v(TAG, "Controller Thread axis 1 finished.");
+
+ recordAxis(SensorManager.AXIS_Z);
+ if (LOCAL_LOGV) Log.v(TAG, "Controller Thread axis 2 finished.");
+
+ delay(1000);
+ end();
+ if (LOCAL_LOGV) Log.v(TAG, "Controller Thread End.");
+ }
+
+ private void delay(int milli) {
+ try{
+ Thread.sleep(milli);
+ } catch(InterruptedException e) {
+ if (LOCAL_LOGV) Log.v(TAG, "Controller Thread Interrupted.");
+ }
+ }
+ private void init() {
+ // start video recording
+ mActivity.startRecordVideo();
+
+ // start sensor logging & listening
+ mActivity.startRecordSensor();
+ }
+
+ private void end() {
+ // stop video recording
+ mActivity.stopRecordVideo();
+
+ // stop sensor logging
+ mActivity.stopRecordSensor();
+
+ // notify ui complete
+ runOnUiThread(new Runnable(){
+ public void run() {
+ mActivity.notifyComplete();
+ }
+ });
+ }
+
+ private void recordAxis(int axis) {
+ // delay 2 seconds?
+ delay(1000);
+
+ // change ui
+ mActivity.switchAxisAsync(axis);
+
+ // play start sound
+ mActivity.playNotifySound(0);
+
+ // wait until axis covered
+ mActivity.waitUntilCovered(axis);
+
+ // play stop sound
+ mActivity.playNotifySound(1);
+ }
+
+ /**
+ * Force quit
+ */
+ public void quit() {
+ mThread.interrupt();
+ try {
+ if (LOCAL_LOGV) Log.v(TAG, "Wait for controller to end");
+
+ // stop video recording
+ mActivity.stopRecordVideo();
+
+ // stop sensor logging
+ mActivity.stopRecordSensor();
+
+ } catch (Exception e)
+ {
+ e.printStackTrace();
+ }
+ }
+ }
+
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckAnalyzer.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckAnalyzer.java
new file mode 100644
index 0000000..128aaa3
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckAnalyzer.java
@@ -0,0 +1,1290 @@
+package com.android.cts.verifier.sensors;
+
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import android.media.MediaCodec;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.os.Debug;
+import android.os.Environment;
+import android.util.JsonWriter;
+import android.util.Log;
+
+import org.opencv.core.Mat;
+import org.opencv.core.CvType;
+import org.opencv.core.MatOfDouble;
+import org.opencv.core.MatOfFloat;
+import org.opencv.core.MatOfPoint2f;
+import org.opencv.core.MatOfPoint3f;
+import org.opencv.core.Size;
+import org.opencv.highgui.Highgui;
+import org.opencv.imgproc.Imgproc;
+import org.opencv.calib3d.Calib3d;
+import org.opencv.core.Core;
+
+import org.json.JSONObject;
+import org.json.JSONException;
+
+import java.io.BufferedReader;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+
+import android.opengl.GLES20;
+import javax.microedition.khronos.opengles.GL10;
+
+/**
+ * This class does analysis on the recorded RVCVCXCheck data sets.
+ */
+public class RVCVXCheckAnalyzer {
+ private static final String TAG = "RVCXAnalysis";
+ private static final boolean LOCAL_LOGV = false;
+ private static final boolean LOCAL_LOGD = true;
+ private final String mPath;
+
+ private static final boolean OUTPUT_DEBUG_IMAGE = false;
+ private static final double VALID_FRAME_THRESHOLD = 0.8;
+ private static final double REPROJECTION_THREASHOLD = 4.0;
+ private static final boolean FORCE_CV_ANALYSIS = false;
+ private static final boolean TRACE_VIDEO_ANALYSIS = false;
+ private static final double DECIMATION_FPS_TARGET = 15.0;
+
+ RVCVXCheckAnalyzer(String path)
+ {
+ mPath = path;
+ }
+
+ /**
+ * A class that contains the analysis results
+ *
+ */
+ class AnalyzeReport {
+ public boolean error=true;
+ public String reason = "incomplete";
+
+ // roll pitch yaw RMS error ( \sqrt{\frac{1}{n} \sum e_i^2 })
+ // unit in rad
+ public double roll_rms_error;
+ public double pitch_rms_error;
+ public double yaw_rms_error;
+
+ // roll pitch yaw max error
+ // unit in rad
+ public double roll_max_error;
+ public double pitch_max_error;
+ public double yaw_max_error;
+
+ // optimal t delta between sensor and camera data set to make best match
+ public double optimal_delta_t;
+ // the associate yaw offset based on initial values
+ public double yaw_offset;
+
+ public int n_of_frame;
+ public int n_of_valid_frame;
+
+ // both data below are in [sec]
+ public double sensor_period_avg;
+ public double sensor_period_stdev;
+
+ /**
+ * write Json format serialization to a file in case future processing need the data
+ */
+ public void writeToFile(File file) {
+ try {
+ writeJSONToStream(new FileOutputStream(file));
+ } catch (FileNotFoundException e) {
+ e.printStackTrace();
+ Log.e(TAG, "Cannot create analyze report file.");
+ }
+ }
+
+ /**
+ * Get the JSON format serialization
+ *@return Json format serialization as String
+ */
+ @Override
+ public String toString() {
+ ByteArrayOutputStream s = new ByteArrayOutputStream();
+ writeJSONToStream(s);
+ return new String(s.toByteArray(), java.nio.charset.StandardCharsets.UTF_8);
+ }
+
+ private void writeJSONToStream(OutputStream s) {
+ try{
+ JsonWriter writer =
+ new JsonWriter(
+ new OutputStreamWriter( s )
+ );
+ writer.beginObject();
+ writer.setLenient(true);
+
+ writer.name("roll_rms_error").value(roll_rms_error);
+ writer.name("pitch_rms_error").value(pitch_rms_error);
+ writer.name("yaw_rms_error").value(yaw_rms_error);
+ writer.name("roll_max_error").value(roll_max_error);
+ writer.name("pitch_max_error").value(pitch_max_error);
+ writer.name("yaw_max_error").value(yaw_max_error);
+ writer.name("optimal_delta_t").value(optimal_delta_t);
+ writer.name("yaw_offset").value(yaw_offset);
+ writer.name("n_of_frame").value(n_of_frame);
+ writer.name("n_of_valid_frame").value(n_of_valid_frame);
+ writer.name("sensor_period_avg").value(sensor_period_avg);
+ writer.name("sensor_period_stdev").value(sensor_period_stdev);
+
+ writer.endObject();
+
+ writer.close();
+ } catch (IOException e) {
+ // do nothing
+ Log.e(TAG, "Error in serialize analyze report to JSON");
+ } catch (IllegalArgumentException e) {
+ e.printStackTrace();
+ Log.e(TAG, "Invalid parameter to write into JSON format");
+ }
+ }
+ }
+
+ /**
+ * Process data set stored in the path specified in constructor
+ * and return an analyze report to caller
+ *
+ * @return An AnalyzeReport that contains detailed information about analysis
+ */
+ public AnalyzeReport processDataSet() {
+ int nframe;// number of frames in video
+ int nslog; // number of sensor log
+ int nvlog; // number of video generated log
+
+
+ AnalyzeReport report = new AnalyzeReport();
+
+ ArrayList<AttitudeRec> srecs = new ArrayList<>();
+ ArrayList<AttitudeRec> vrecs = new ArrayList<>();
+ ArrayList<AttitudeRec> srecs2 = new ArrayList<>();
+
+
+ final boolean use_solved = new File(mPath, "vision_rpy.log").exists() && !FORCE_CV_ANALYSIS;
+
+ if (use_solved) {
+ nframe = nvlog = loadAttitudeRecs(new File(mPath, "vision_rpy.log"), vrecs);
+ nslog = loadAttitudeRecs(new File(mPath, "sensor_rpy.log"),srecs);
+ }else {
+ nframe = analyzeVideo(vrecs);
+ nvlog = vrecs.size();
+
+ if (LOCAL_LOGV) {
+ Log.v(TAG, "Post video analysis nvlog = " + nvlog + " nframe=" + nframe);
+ }
+ if (nvlog <= 0 || nframe <= 0) {
+ // invalid results
+ report.reason = "Unable to to load recorded video.";
+ return report;
+ }
+ if ((double) nvlog / nframe < VALID_FRAME_THRESHOLD) {
+ // to many invalid frames
+ report.reason = "Too many invalid frames.";
+ return null;
+ }
+
+ fixFlippedAxis(vrecs);
+
+ nslog = loadSensorLog(srecs);
+ }
+
+ // Gradient descent will have faster performance than this simple search,
+ // but the performance is dominated by the vision part, so it is not very necessary.
+ double delta_t;
+ double min_rms = Double.MAX_VALUE;
+ double min_delta_t =0.;
+ double min_yaw_offset =0.;
+
+ // pre-allocation
+ for (AttitudeRec i: vrecs) {
+ srecs2.add(new AttitudeRec(0,0,0,0));
+ }
+
+ // find optimal offset
+ for (delta_t = -2.0; delta_t<2.0; delta_t +=0.01) {
+ double rms;
+ resampleSensorLog(srecs, vrecs, delta_t, 0.0, srecs2);
+ rms = Math.sqrt(calcSqrErr(vrecs, srecs2, 0)+ calcSqrErr(vrecs, srecs2, 1));
+ if (rms < min_rms) {
+ min_rms = rms;
+ min_delta_t = delta_t;
+ min_yaw_offset = vrecs.get(0).yaw - srecs2.get(0).yaw;
+ }
+ }
+ // sample at optimal offset
+ resampleSensorLog(srecs, vrecs, min_delta_t, min_yaw_offset, srecs2);
+
+ if (!use_solved) {
+ dumpAttitudeRecs(new File(mPath, "vision_rpy.log"), vrecs);
+ dumpAttitudeRecs(new File(mPath, "sensor_rpy.log"), srecs);
+ }
+ dumpAttitudeRecs(new File(mPath, "sensor_rpy_resampled.log"), srecs2);
+ dumpAttitudeError(new File(mPath, "attitude_error.log"), vrecs, srecs2);
+
+ // fill report fields
+ report.roll_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 0));
+ report.pitch_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 1));
+ report.yaw_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 2));
+
+ report.roll_max_error = calcMaxErr(vrecs, srecs2, 0);
+ report.pitch_max_error = calcMaxErr(vrecs, srecs2, 1);
+ report.yaw_max_error = calcMaxErr(vrecs, srecs2, 2);
+
+ report.optimal_delta_t = min_delta_t;
+ report.yaw_offset = (min_yaw_offset);
+
+ report.n_of_frame = nframe;
+ report.n_of_valid_frame = nvlog;
+
+ double [] sensor_period_stat = calcSensorPeriodStat(srecs);
+ report.sensor_period_avg = sensor_period_stat[0];
+ report.sensor_period_stdev = sensor_period_stat[1];
+
+ // output report to file and log in JSON format as well
+ report.writeToFile(new File(mPath, "report.json"));
+ if (LOCAL_LOGV) Log.v(TAG, "Report in JSON:" + report.toString());
+
+ report.reason = "Completed";
+ report.error = false;
+ return report;
+ }
+
+ /**
+ * Generate pattern geometry like this one
+ * http://docs.opencv.org/trunk/_downloads/acircles_pattern.png
+ *
+ * @return Array of 3D points
+ */
+ private MatOfPoint3f asymmetricalCircleGrid(Size size) {
+ final int cn = 3;
+
+ int n = (int)(size.width * size.height);
+ float positions[] = new float[n * cn];
+ float unit=0.02f;
+ MatOfPoint3f grid = new MatOfPoint3f();
+
+ for (int i = 0; i < size.height; i++) {
+ for (int j = 0; j < size.width * cn; j += cn) {
+ positions[(int) (i * size.width * cn + j + 0)] =
+ (2 * (j / cn) + i % 2) * (float) unit;
+ positions[(int) (i * size.width * cn + j + 1)] =
+ i * unit;
+ positions[(int) (i * size.width * cn + j + 2)] = 0;
+ }
+ }
+ grid.create(n, 1, CvType.CV_32FC3);
+ grid.put(0, 0, positions);
+ return grid;
+ }
+
+ /**
+ * Create a camera intrinsic matrix using input parameters
+ *
+ * The camera intrinsic matrix will be like:
+ *
+ * +- -+
+ * | f 0 center.width |
+ * A = | 0 f center.height |
+ * | 0 0 1 |
+ * +- -+
+ *
+ * @return An approximated (not actually calibrated) camera matrix
+ */
+ private static Mat cameraMatrix(float f, Size center) {
+ final double [] data = {f, 0, center.width, 0, f, center.height, 0, 0, 1f};
+ Mat m = new Mat(3,3, CvType.CV_64F);
+ m.put(0, 0, data);
+ return m;
+ }
+
+ /**
+ * Attitude record in time roll pitch yaw format.
+ *
+ */
+ private class AttitudeRec {
+ public double time;
+ public double roll;
+ public double pitch;
+ public double yaw;
+
+ // ctor
+ AttitudeRec(double atime, double aroll, double apitch, double ayaw) {
+ time = atime;
+ roll = aroll;
+ pitch = apitch;
+ yaw = ayaw;
+ }
+
+ // ctor
+ AttitudeRec(double atime, double [] rpy) {
+ time = atime;
+ roll = rpy[0];
+ pitch = rpy[1];
+ yaw = rpy[2];
+ }
+
+ // copy value of another to this
+ void assign(AttitudeRec rec) {
+ time = rec.time;
+ roll = rec.time;
+ pitch = rec.pitch;
+ yaw = rec.yaw;
+ }
+
+ // copy roll-pitch-yaw value but leave the time specified by atime
+ void assign(AttitudeRec rec, double atime) {
+ time = atime;
+ roll = rec.time;
+ pitch = rec.pitch;
+ yaw = rec.yaw;
+ }
+
+ // set each field separately
+ void set(double atime, double aroll, double apitch, double ayaw) {
+ time = atime;
+ roll = aroll;
+ pitch = apitch;
+ yaw = ayaw;
+ }
+ }
+
+
+ /**
+ * Load the sensor log in (time Roll-pitch-yaw) format to a ArrayList<AttitudeRec>
+ *
+ * @return the number of sensor log items
+ */
+ private int loadSensorLog(ArrayList<AttitudeRec> recs) {
+ //ArrayList<AttitudeRec> recs = new ArrayList<AttitudeRec>();
+ File csvFile = new File(mPath, "sensor.log");
+ BufferedReader br=null;
+ String line;
+
+ // preallocate and reuse
+ double [] quat = new double[4];
+ double [] rpy = new double[3];
+
+ double t0 = -1;
+
+ try {
+ br = new BufferedReader(new FileReader(csvFile));
+ while ((line = br.readLine()) != null) {
+ //space separator
+ String[] items = line.split(" ");
+
+ if (items.length != 5) {
+ recs.clear();
+ return -1;
+ }
+
+ quat[0] = Double.parseDouble(items[1]);
+ quat[1] = Double.parseDouble(items[2]);
+ quat[2] = Double.parseDouble(items[3]);
+ quat[3] = Double.parseDouble(items[4]);
+
+ //
+ quat2rpy(quat, rpy);
+
+ if (t0 < 0) {
+ t0 = Long.parseLong(items[0])/1e9;
+ }
+ recs.add(new AttitudeRec(Long.parseLong(items[0])/1e9-t0, rpy));
+ }
+
+ } catch (FileNotFoundException e) {
+ e.printStackTrace();
+ Log.e(TAG, "Cannot find sensor logging data");
+ } catch (IOException e) {
+ e.printStackTrace();
+ Log.e(TAG, "Cannot read sensor logging data");
+ } finally {
+ if (br != null) {
+ try {
+ br.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ return recs.size();
+ }
+
+ /**
+ * Read video meta info
+ */
+ private class VideoMetaInfo {
+ public double fps;
+ public int frameWidth;
+ public int frameHeight;
+ public double fovWidth;
+ public double fovHeight;
+ public boolean valid = false;
+
+ VideoMetaInfo(File file) {
+
+ BufferedReader br=null;
+ String line;
+ String content="";
+ try {
+ br = new BufferedReader(new FileReader(file));
+ while ((line = br.readLine()) != null) {
+ content = content +line;
+ }
+
+ } catch (FileNotFoundException e) {
+ e.printStackTrace();
+ Log.e(TAG, "Cannot find video meta info file");
+ } catch (IOException e) {
+ e.printStackTrace();
+ Log.e(TAG, "Cannot read video meta info file");
+ } finally {
+ if (br != null) {
+ try {
+ br.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ if (content.isEmpty()) {
+ return;
+ }
+
+ try {
+ JSONObject json = new JSONObject(content);
+ frameWidth = json.getInt("width");
+ frameHeight = json.getInt("height");
+ fps = json.getDouble("frameRate");
+ fovWidth = json.getDouble("fovW")*Math.PI/180.0;
+ fovHeight = json.getDouble("fovH")*Math.PI/180.0;
+ } catch (JSONException e) {
+ return;
+ }
+
+ valid = true;
+
+ }
+ }
+
+
+
+ /**
+ * Debugging helper function, load ArrayList<AttitudeRec> from a file dumped out by
+ * dumpAttitudeRecs
+ */
+ private int loadAttitudeRecs(File file, ArrayList<AttitudeRec> recs) {
+ BufferedReader br=null;
+ String line;
+ double time;
+ double [] rpy = new double[3];
+
+ try {
+ br = new BufferedReader(new FileReader(file));
+ while ((line = br.readLine()) != null) {
+ //space separator
+ String[] items = line.split(" ");
+
+ if (items.length != 4) {
+ recs.clear();
+ return -1;
+ }
+
+ time = Double.parseDouble(items[0]);
+ rpy[0] = Double.parseDouble(items[1]);
+ rpy[1] = Double.parseDouble(items[2]);
+ rpy[2] = Double.parseDouble(items[3]);
+
+ recs.add(new AttitudeRec(time, rpy));
+ }
+
+ } catch (FileNotFoundException e) {
+ e.printStackTrace();
+ Log.e(TAG, "Cannot find AttitudeRecs file specified.");
+ } catch (IOException e) {
+ e.printStackTrace();
+ Log.e(TAG, "Read AttitudeRecs file failure");
+ } finally {
+ if (br != null) {
+ try {
+ br.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ return recs.size();
+ }
+ /**
+ * Debugging helper function, Dump an ArrayList<AttitudeRec> to a file
+ */
+ private void dumpAttitudeRecs(File file, ArrayList<AttitudeRec> recs) {
+ OutputStreamWriter w=null;
+ try {
+ w = new OutputStreamWriter(new FileOutputStream(file));
+
+ for (AttitudeRec r : recs) {
+ w.write(String.format("%f %f %f %f\r\n", r.time, r.roll, r.pitch, r.yaw));
+ }
+ w.close();
+ } catch(FileNotFoundException e) {
+ e.printStackTrace();
+ Log.e(TAG, "Cannot create AttitudeRecs file.");
+ } catch (IOException e) {
+ Log.e(TAG, "Write AttitudeRecs file failure");
+ } finally {
+ if (w!=null) {
+ try {
+ w.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+
+ /**
+ * Read the sensor log in ArrayList<AttitudeRec> format and find out the sensor sample time
+ * statistics: mean and standard deviation.
+ *
+ * @return The returned value will be a double array with exact 2 items, first [0] will be
+ * mean and the second [1] will be the standard deviation.
+ *
+ */
+ private double [] calcSensorPeriodStat(ArrayList<AttitudeRec> srec) {
+ double tp = srec.get(0).time;
+ int i;
+ double sum = 0.0;
+ double sumsq = 0.0;
+ for(i=1; i<srec.size(); ++i) {
+ double dt;
+ dt = srec.get(i).time - tp;
+ sum += dt;
+ sumsq += dt*dt;
+ tp += dt;
+ }
+ double [] ret = new double[2];
+ ret[0] = sum/srec.size();
+ ret[1] = Math.sqrt(sumsq/srec.size() - ret[0]*ret[0]);
+ return ret;
+ }
+
+ /**
+ * Flipping the axis as the image are flipped upside down in OpenGL frames
+ */
+ private void fixFlippedAxis(ArrayList<AttitudeRec> vrecs) {
+ for (AttitudeRec i: vrecs) {
+ i.yaw = -i.yaw;
+ }
+ }
+
+ /**
+ * Calculate the maximum error on the specified axis between two time aligned (resampled)
+ * ArrayList<AttitudeRec>. Yaw axis needs special treatment as 0 and 2pi error are same thing
+ *
+ * @param ra one ArrayList of AttitudeRec
+ * @param rb the other ArrayList of AttitudeRec
+ * @param axis axis id for the comparison (0 = roll, 1 = pitch, 2 = yaw)
+ * @return Maximum error
+ */
+ private double calcMaxErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis) {
+ // check if they are valid and comparable data
+ if (ra.size() != rb.size()) {
+ throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
+ }
+ // check input parameter validity
+ if (axis<0 || axis > 2) {
+ throw new IllegalArgumentException("Invalid data axis.");
+ }
+
+ int i;
+ double max = 0.0;
+ double diff = 0.0;
+ for(i=0; i<ra.size(); ++i) {
+ // make sure they are aligned data
+ if (ra.get(i).time != rb.get(i).time) {
+ throw new IllegalArgumentException("Element "+i+
+ " of two inputs has different time.");
+ }
+ switch(axis) {
+ case 0:
+ diff = ra.get(i).roll - rb.get(i).roll; // they always opposite of each other..
+ break;
+ case 1:
+ diff = ra.get(i).pitch - rb.get(i).pitch;
+ break;
+ case 2:
+ diff = Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))
+ -Math.PI)-Math.PI;
+ break;
+ }
+ diff = Math.abs(diff);
+ if (diff>max) {
+ max = diff;
+ }
+ }
+ return max;
+ }
+
+ /**
+ * Calculate the RMS error on the specified axis between two time aligned (resampled)
+ * ArrayList<AttitudeRec>. Yaw axis needs special treatment as 0 and 2pi error are same thing
+ *
+ * @param ra one ArrayList of AttitudeRec
+ * @param rb the other ArrayList of AttitudeRec
+ * @param axis axis id for the comparison (0 = roll, 1 = pitch, 2 = yaw)
+ * @return Mean square error
+ */
+ private double calcSqrErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis) {
+ // check if they are valid and comparable data
+ if (ra.size() != rb.size()) {
+ throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
+ }
+ // check input parameter validity
+ if (axis<0 || axis > 2) {
+ throw new IllegalArgumentException("Invalid data axis.");
+ }
+
+ int i;
+ double sum = 0.0;
+ double diff = 0.0;
+ for(i=0; i<ra.size(); ++i) {
+ // check input data validity
+ if (ra.get(i).time != rb.get(i).time) {
+ throw new IllegalArgumentException("Element "+i+
+ " of two inputs has different time.");
+ }
+
+ switch(axis) {
+ case 0:
+ diff = ra.get(i).roll - rb.get(i).roll;
+ break;
+ case 1:
+ diff = ra.get(i).pitch - rb.get(i).pitch;
+ break;
+ case 2:
+ diff = Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))-
+ Math.PI)-Math.PI;
+ break;
+ }
+
+ sum += diff*diff;
+ }
+ return sum/ra.size();
+ }
+
+ /**
+ * Debugging helper function. Dump the error between two time aligned ArrayList<AttitudeRec>'s
+ *
+ * @param file File to write to
+ * @param ra one ArrayList of AttitudeRec
+ * @param rb the other ArrayList of AttitudeRec
+ */
+ private void dumpAttitudeError(File file, ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb){
+ if (ra.size() != rb.size()) {
+ throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
+ }
+
+ int i;
+
+ ArrayList<AttitudeRec> rerr = new ArrayList<>();
+ for(i=0; i<ra.size(); ++i) {
+ if (ra.get(i).time != rb.get(i).time) {
+ throw new IllegalArgumentException("Element "+ i
+ + " of two inputs has different time.");
+ }
+
+ rerr.add(new AttitudeRec(ra.get(i).time, ra.get(i).roll - rb.get(i).roll,
+ ra.get(i).pitch - rb.get(i).pitch,
+ (Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))
+ -Math.PI)-Math.PI)));
+
+ }
+ dumpAttitudeRecs(file, rerr);
+ }
+
+ /**
+ * Resample one ArrayList<AttitudeRec> with respect to another ArrayList<AttitudeRec>
+ *
+ * @param rec the ArrayList of AttitudeRec to be sampled
+ * @param timebase the other ArrayList of AttitudeRec that serves as time base
+ * @param delta_t offset in time before resample
+ * @param yaw_offset offset in yaw axis
+ * @param resampled output ArrayList of AttitudeRec
+ */
+
+ private void resampleSensorLog(ArrayList<AttitudeRec> rec, ArrayList<AttitudeRec> timebase,
+ double delta_t, double yaw_offset, ArrayList<AttitudeRec> resampled) {
+ int i;
+ int j = -1;
+ for(i=0; i<timebase.size(); i++) {
+ double time = timebase.get(i).time + delta_t;
+
+ while(j<rec.size()-1 && rec.get(j+1).time < time) j++;
+
+ if (j == -1) {
+ //use first
+ resampled.get(i).assign(rec.get(0), timebase.get(i).time);
+ } else if (j == rec.size()-1) {
+ // use last
+ resampled.get(i).assign(rec.get(j), timebase.get(i).time);
+ } else {
+ // do linear resample
+ double alpha = (time - rec.get(j).time)/((rec.get(j+1).time - rec.get(j).time));
+ double roll = (1-alpha) * rec.get(j).roll + alpha * rec.get(j+1).roll;
+ double pitch = (1-alpha) * rec.get(j).pitch + alpha * rec.get(j+1).pitch;
+ double yaw = (1-alpha) * rec.get(j).yaw + alpha * rec.get(j+1).yaw + yaw_offset;
+ resampled.get(i).set(timebase.get(i).time, roll, pitch, yaw);
+ }
+ }
+ }
+
+ /**
+ * Analyze video frames using computer vision approach and generate a ArrayList<AttitudeRec>
+ *
+ * @param recs output ArrayList of AttitudeRec
+ * @return total number of frame of the video
+ */
+ private int analyzeVideo(ArrayList<AttitudeRec> recs) {
+ VideoMetaInfo meta = new VideoMetaInfo(new File(mPath, "videometa.json"));
+
+ int decimation = 1;
+
+ if (meta.fps > DECIMATION_FPS_TARGET) {
+ decimation = (int)(meta.fps / DECIMATION_FPS_TARGET);
+ meta.fps /=decimation;
+ }
+
+ VideoDecoderForOpenCV videoDecoder = new VideoDecoderForOpenCV(
+ new File(mPath, "video.mp4"), decimation); // every 3 frame process 1 frame
+
+
+ Mat frame;
+ Mat gray = new Mat();
+ int i = -1;
+
+ Size frameSize = videoDecoder.getSize();
+
+ if (frameSize.width != meta.frameWidth || frameSize.height != meta.frameHeight) {
+ // this is very unlikely
+ return -1;
+ }
+
+ if (TRACE_VIDEO_ANALYSIS) {
+ Debug.startMethodTracing("cvprocess");
+ }
+
+ Size patternSize = new Size(4,11);
+
+ float fc = (float)(meta.frameWidth/2.0/Math.tan(meta.fovWidth/2.0));
+ Mat camMat = cameraMatrix(fc, new Size(frameSize.width/2, frameSize.height/2));
+ MatOfDouble coeff = new MatOfDouble(); // dummy
+
+ MatOfPoint2f centers = new MatOfPoint2f();
+ MatOfPoint3f grid = asymmetricalCircleGrid(patternSize);
+ Mat rvec = new MatOfFloat();
+ Mat tvec = new MatOfFloat();
+
+ MatOfPoint2f reprojCenters = new MatOfPoint2f();
+
+ if (LOCAL_LOGV) {
+ Log.v(TAG, "Camera Mat = \n" + camMat.dump());
+ }
+
+ long startTime = System.nanoTime();
+
+ while ((frame = videoDecoder.getFrame()) !=null) {
+ if (LOCAL_LOGV) {
+ Log.v(TAG, "got a frame " + i);
+ }
+
+ // has to be in front, as there are cases where execution
+ // will skip the later part of this while
+ i++;
+
+ // convert to gray manually as by default findCirclesGridDefault uses COLOR_BGR2GRAY
+ Imgproc.cvtColor(frame, gray, Imgproc.COLOR_RGB2GRAY);
+
+ boolean foundPattern = Calib3d.findCirclesGridDefault(
+ gray, patternSize, centers, Calib3d.CALIB_CB_ASYMMETRIC_GRID);
+
+ if (!foundPattern) {
+ // skip to next frame
+ continue;
+ }
+
+ if (OUTPUT_DEBUG_IMAGE) {
+ Calib3d.drawChessboardCorners(frame, patternSize, centers, true);
+ }
+
+ // figure out the extrinsic parameters using real ground truth 3D points and the pixel
+ // position of blobs found in findCircleGrid, an estimated camera matrix and
+ // no-distortion are assumed.
+ boolean foundSolution =
+ Calib3d.solvePnP(grid, centers, camMat, coeff, rvec, tvec,
+ false, Calib3d.CV_ITERATIVE);
+
+ if (!foundSolution) {
+ // skip to next frame
+ continue;
+ }
+
+ // reproject points to for evaluation of result accuracy of solvePnP
+ Calib3d.projectPoints(grid, rvec, tvec, camMat, coeff, reprojCenters);
+
+ // error is evaluated in norm2, which is real error in pixel distance / sqrt(2)
+ double error = Core.norm(centers, reprojCenters, Core.NORM_L2);
+
+ if (LOCAL_LOGV) {
+ Log.v(TAG, "Found attitude, re-projection error = " + error);
+ }
+
+ // if error is reasonable, add it into the results
+ if (error < REPROJECTION_THREASHOLD) {
+ double [] rv = new double[3];
+ rvec.get(0,0, rv);
+ recs.add(new AttitudeRec((double) i / meta.fps, rodr2rpy(rv)));
+ }
+
+ if (OUTPUT_DEBUG_IMAGE) {
+ Calib3d.drawChessboardCorners(frame, patternSize, reprojCenters, true);
+ Highgui.imwrite(Environment.getExternalStorageDirectory().getPath()
+ + "/RVCVRecData/DebugCV/img" + i + ".png", frame);
+ }
+ }
+
+ if (LOCAL_LOGV) {
+ Log.v(TAG, "Finished decoding");
+ }
+
+ if (TRACE_VIDEO_ANALYSIS) {
+ Debug.stopMethodTracing();
+ }
+
+ if (LOCAL_LOGV) {
+ // time analysis
+ double totalTime = (System.nanoTime()-startTime)/1e9;
+ Log.i(TAG, "Total time: "+totalTime +"s, Per frame time: "+totalTime/i );
+ }
+ return i;
+ }
+
+ /**
+ * OpenCV for Android have not support the VideoCapture from file
+ * This is a make shift solution before it is supported.
+ * One issue right now is that the glReadPixels is quite slow .. around 6.5ms for a 720p frame
+ */
+ private class VideoDecoderForOpenCV implements Runnable {
+ private MediaExtractor extractor=null;
+ private MediaCodec decoder=null;
+ private CtsMediaOutputSurface surface=null;
+
+ private MatBuffer mMatBuffer;
+
+ private final File mVideoFile;
+
+ private boolean valid;
+ private Object setupSignal;
+
+ private Thread mThread;
+ private int mDecimation;
+
+ /**
+ * Constructor
+ * @param file video file
+ * @param decimation process every "decimation" number of frame
+ */
+ VideoDecoderForOpenCV(File file, int decimation) {
+ mVideoFile = file;
+ mDecimation = decimation;
+ valid = false;
+
+ start();
+ }
+
+ /**
+ * Constructor
+ * @param file video file
+ */
+ VideoDecoderForOpenCV(File file) {
+ this(file, 1);
+ }
+
+ /**
+ * Test if video decoder is in valid states ready to output video.
+ * @return true of force.
+ */
+ public boolean isValid() {
+ return valid;
+ }
+
+ private void start() {
+ setupSignal = new Object();
+ mThread = new Thread(this);
+ mThread.start();
+
+ synchronized (setupSignal) {
+ try {
+ setupSignal.wait();
+ } catch (InterruptedException e) {
+ Log.e(TAG, "Interrupted when waiting for video decoder setup ready");
+ }
+ }
+ }
+ private void stop() {
+ if (mThread != null) {
+ mThread.interrupt();
+ try {
+ mThread.join();
+ } catch (InterruptedException e) {
+ Log.e(TAG, "Interrupted when waiting for video decoder thread to stop");
+ }
+ try {
+ decoder.stop();
+ }catch (IllegalStateException e) {
+ Log.e(TAG, "Video decoder is not in a state that can be stopped");
+ }
+ }
+ mThread = null;
+ }
+
+ void teardown() {
+ if (decoder!=null) {
+ decoder.release();
+ decoder = null;
+ }
+ if (surface!=null) {
+ surface.release();
+ surface = null;
+ }
+ if (extractor!=null) {
+ extractor.release();
+ extractor = null;
+ }
+ }
+
+ void setup() {
+ int width=0, height=0;
+
+ extractor = new MediaExtractor();
+
+ try {
+ extractor.setDataSource(mVideoFile.getPath());
+ } catch (IOException e) {
+ return;
+ }
+
+ for (int i = 0; i < extractor.getTrackCount(); i++) {
+ MediaFormat format = extractor.getTrackFormat(i);
+ String mime = format.getString(MediaFormat.KEY_MIME);
+ width = format.getInteger(MediaFormat.KEY_WIDTH);
+ height = format.getInteger(MediaFormat.KEY_HEIGHT);
+
+ if (mime.startsWith("video/")) {
+ extractor.selectTrack(i);
+ try {
+ decoder = MediaCodec.createDecoderByType(mime);
+ }catch (IOException e) {
+ continue;
+ }
+ // Decode to surface
+ //decoder.configure(format, surface, null, 0);
+
+ // Decode to offscreen surface
+ surface = new CtsMediaOutputSurface(width, height);
+ mMatBuffer = new MatBuffer(width, height);
+
+ decoder.configure(format, surface.getSurface(), null, 0);
+ break;
+ }
+ }
+
+ if (decoder == null) {
+ Log.e("VideoDecoderForOpenCV", "Can't find video info!");
+ return;
+ }
+ valid = true;
+ }
+
+ @Override
+ public void run() {
+ setup();
+
+ synchronized (setupSignal) {
+ setupSignal.notify();
+ }
+
+ if (!valid) {
+ return;
+ }
+
+ decoder.start();
+
+ ByteBuffer[] inputBuffers = decoder.getInputBuffers();
+ ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+
+ boolean isEOS = false;
+ long startMs = System.currentTimeMillis();
+ long timeoutUs = 10000;
+
+ int iframe = 0;
+
+ while (!Thread.interrupted()) {
+ if (!isEOS) {
+ int inIndex = decoder.dequeueInputBuffer(10000);
+ if (inIndex >= 0) {
+ ByteBuffer buffer = inputBuffers[inIndex];
+ int sampleSize = extractor.readSampleData(buffer, 0);
+ if (sampleSize < 0) {
+ if (LOCAL_LOGD) {
+ Log.d("VideoDecoderForOpenCV",
+ "InputBuffer BUFFER_FLAG_END_OF_STREAM");
+ }
+ decoder.queueInputBuffer(inIndex, 0, 0, 0,
+ MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+ isEOS = true;
+ } else {
+ decoder.queueInputBuffer(inIndex, 0, sampleSize,
+ extractor.getSampleTime(), 0);
+ extractor.advance();
+ }
+ }
+ }
+
+ int outIndex = decoder.dequeueOutputBuffer(info, 10000);
+ MediaFormat outFormat;
+ switch (outIndex) {
+ case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
+ if (LOCAL_LOGD) {
+ Log.d("VideoDecoderForOpenCV", "INFO_OUTPUT_BUFFERS_CHANGED");
+ }
+ outputBuffers = decoder.getOutputBuffers();
+ break;
+ case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
+ outFormat = decoder.getOutputFormat();
+ if (LOCAL_LOGD) {
+ Log.d("VideoDecoderForOpenCV", "New format " + outFormat);
+ }
+ break;
+ case MediaCodec.INFO_TRY_AGAIN_LATER:
+ if (LOCAL_LOGD) {
+ Log.d("VideoDecoderForOpenCV", "dequeueOutputBuffer timed out!");
+ }
+ break;
+ default:
+
+ ByteBuffer buffer = outputBuffers[outIndex];
+ boolean doRender = (info.size != 0);
+
+ // As soon as we call releaseOutputBuffer, the buffer will be forwarded
+ // to SurfaceTexture to convert to a texture. The API doesn't
+ // guarantee that the texture will be available before the call
+ // returns, so we need to wait for the onFrameAvailable callback to
+ // fire. If we don't wait, we risk rendering from the previous frame.
+ decoder.releaseOutputBuffer(outIndex, doRender);
+
+ if (doRender) {
+ surface.awaitNewImage();
+ surface.drawImage();
+ if (LOCAL_LOGD) {
+ Log.d("VideoDecoderForOpenCV", "Finish drawing a frame!");
+ }
+ if ((iframe++ % mDecimation) == 0) {
+ //Send the frame for processing
+ mMatBuffer.put();
+ }
+ }
+ break;
+ }
+
+ if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ if (LOCAL_LOGD) {
+ Log.d("VideoDecoderForOpenCV", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
+ }
+ break;
+ }
+ }
+ mMatBuffer.invalidate();
+
+ decoder.stop();
+
+ teardown();
+ mThread = null;
+ }
+
+
+ /**
+ * Get next valid frame
+ * @return Frame in OpenCV mat
+ */
+ public Mat getFrame() {
+ return mMatBuffer.get();
+ }
+
+ /**
+ * Get the size of the frame
+ * @return size of the frame
+ */
+ Size getSize() {
+ return mMatBuffer.getSize();
+ }
+
+ /**
+ * A synchronized buffer
+ */
+ class MatBuffer {
+ private Mat mat;
+ private byte[] bytes;
+ private ByteBuffer buf;
+ private boolean full;
+
+ private int mWidth, mHeight;
+ private boolean mValid = false;
+
+ MatBuffer(int width, int height) {
+ mWidth = width;
+ mHeight = height;
+
+ mat = new Mat(height, width, CvType.CV_8UC4); //RGBA
+ buf = ByteBuffer.allocateDirect(width*height*4);
+ bytes = new byte[width*height*4];
+
+ mValid = true;
+ full = false;
+ }
+
+ public synchronized void invalidate() {
+ mValid = false;
+ notifyAll();
+ }
+
+ public synchronized Mat get() {
+
+ if (!mValid) return null;
+ while (full == false) {
+ try {
+ wait();
+ if (!mValid) return null;
+ } catch (InterruptedException e) {
+ return null;
+ }
+ }
+ mat.put(0,0, bytes);
+ full = false;
+ notifyAll();
+ return mat;
+ }
+ public synchronized void put() {
+ while (full) {
+ try {
+ wait();
+ } catch (InterruptedException e) {
+ Log.e(TAG, "Interrupted when waiting for space in buffer");
+ }
+ }
+ GLES20.glReadPixels(0, 0, mWidth, mHeight, GL10.GL_RGBA,
+ GL10.GL_UNSIGNED_BYTE, buf);
+ buf.get(bytes);
+ buf.rewind();
+
+ full = true;
+ notifyAll();
+ }
+
+ public Size getSize() {
+ if (valid) {
+ return mat.size();
+ }
+ return new Size();
+ }
+ }
+ }
+
+
+ /* a small set of math functions */
+ private static double [] quat2rpy( double [] q) {
+ double [] rpy = {Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2])),
+ Math.asin(2*(q[0]*q[2] - q[3]*q[1])),
+ Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]))};
+ return rpy;
+ }
+
+ private static void quat2rpy( double [] q, double[] rpy) {
+ rpy[0] = Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2]));
+ rpy[1] = Math.asin(2*(q[0]*q[2] - q[3]*q[1]));
+ rpy[2] = Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]));
+ }
+
+ private static Mat quat2rpy(Mat quat) {
+ double [] q = new double[4];
+ quat.get(0,0,q);
+
+ double [] rpy = {Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2])),
+ Math.asin(2*(q[0]*q[2] - q[3]*q[1])),
+ Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]))};
+
+ Mat rpym = new Mat(3,1, CvType.CV_64F);
+ rpym.put(0,0, rpy);
+ return rpym;
+ }
+
+ private static double [] rodr2quat( double [] r) {
+ double t = Math.sqrt(r[0]*r[0]+r[1]*r[1]+r[2]*r[2]);
+ double [] quat = {Math.cos(t/2), Math.sin(t/2)*r[0]/t,Math.sin(t/2)*r[1]/t,
+ Math.sin(t/2)*r[2]/t};
+ return quat;
+ }
+
+ private static void rodr2quat( double [] r, double [] quat) {
+ double t = Math.sqrt(r[0]*r[0]+r[1]*r[1]+r[2]*r[2]);
+ quat[0] = Math.cos(t/2);
+ quat[1] = Math.sin(t/2)*r[0]/t;
+ quat[2] = Math.sin(t/2)*r[1]/t;
+ quat[3] = Math.sin(t/2)*r[2]/t;
+ }
+
+ private static Mat rodr2quat(Mat rodr) {
+ double t = Core.norm(rodr);
+ double [] r = new double[3];
+ rodr.get(0,0,r);
+
+ double [] quat = {Math.cos(t/2), Math.sin(t/2)*r[0]/t,Math.sin(t/2)*r[1]/t,
+ Math.sin(t/2)*r[2]/t};
+ Mat quatm = new Mat(4,1, CvType.CV_64F);
+ quatm.put(0, 0, quat);
+ return quatm;
+ }
+
+ private static double [] rodr2rpy( double [] r) {
+ return quat2rpy(rodr2quat(r));
+ }
+ //////////////////
+
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckTestActivity.java
new file mode 100644
index 0000000..ffb0d85
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckTestActivity.java
@@ -0,0 +1,318 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.sensors;
+
+
+import android.hardware.cts.helpers.SensorTestStateNotSupportedException;
+import android.os.Bundle;
+
+import com.android.cts.verifier.sensors.base.SensorCtsVerifierTestActivity;
+import com.android.cts.verifier.sensors.helpers.OpenCVLibrary;
+
+import junit.framework.Assert;
+
+import android.content.Intent;
+
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * This test (Rotation Vector - Computer Vision Cross Check, or RXCVXCheck for short) verifies that
+ * mobile device can detect the orientation of itself in a relatively accurate manner.
+ *
+ * Currently only ROTATION_VECTOR sensor is used.
+ *
+ */
+public class RVCVXCheckTestActivity
+ extends SensorCtsVerifierTestActivity {
+ public RVCVXCheckTestActivity() {
+ super(RVCVXCheckTestActivity.class);
+ }
+
+ CountDownLatch mRecordActivityFinishedSignal = null;
+
+ private static final int REQ_CODE_TXCVRECORD = 0x012345678;
+ private static final boolean TEST_USING_DEBUGGING_DATA = false;
+ private static final String PATH_DEBUGGING_DATA = "/sdcard/RXCVRecData/150313-014443/";
+
+ private String mRecPath;
+
+ RVCVXCheckAnalyzer.AnalyzeReport mReport = null;
+
+ private boolean mRecordSuccessful = false;
+ private boolean mOpenCVLoadSuccessful = false;
+
+
+ /**
+ * The activity setup collects all the required data for test cases.
+ * This approach allows to test all sensors at once.
+ */
+ @Override
+ protected void activitySetUp() throws InterruptedException {
+
+ mRecPath = "";
+
+ showUserMessage("Loading OpenCV Library...");
+ int retry = 10;
+
+ while(retry-->0) {
+ try {
+ Thread.sleep(100);
+ } catch (InterruptedException e) {
+ //
+ }
+ if (OpenCVLibrary.isLoaded()) {
+ break;
+ }
+ }
+ if (!OpenCVLibrary.isLoaded()) {
+ // failed requirement test
+ clearText();
+ return;
+ }
+ showUserMessage("OpenCV Library Successfully Loaded");
+
+ mOpenCVLoadSuccessful = true;
+
+ if (TEST_USING_DEBUGGING_DATA) {
+ mRecPath = PATH_DEBUGGING_DATA;
+
+ // assume the data is there already
+ mRecordSuccessful = true;
+ } else {
+ showUserMessage("Take the test as instructed below:\n" +
+ "1. Print out the test pattern and place it on a "+
+ "horizontal surface.\n" +
+ "2. Start the test and align the yellow square on the screen "+
+ "roughly to the yellow sqaure.\n" +
+ "3. Follow the prompt to rotate the phone while keeping the "+
+ "entire test pattern inside view of camera. This requires " +
+ "orbiting the phone around and aiming the "+
+ "camera at the test pattern at the same time.\n" +
+ "4. Wait patiently for the analysis to finish.\n");
+
+ waitForUserToContinue();
+
+ // prepare sync signal
+ mRecordActivityFinishedSignal = new CountDownLatch(1);
+
+ // record both sensor and camera
+ Intent intent = new Intent(this, RVCVRecordActivity.class);
+ startActivityForResult(intent, REQ_CODE_TXCVRECORD);
+
+ // wait for record finish
+ mRecordActivityFinishedSignal.await();
+
+ if ("".equals(mRecPath)) {
+ showUserMessage("Recording failed or exited prematurely.");
+ waitForUserToContinue();
+ } else {
+ showUserMessage("Recording is done!");
+ showUserMessage("Result are in path: " + mRecPath);
+ mRecordSuccessful = true;
+ }
+ }
+
+
+ if (mRecordSuccessful) {
+ showUserMessage("Please wait for the analysis ... \n"+
+ "It may take a few minutes, you will be noted when "+
+ "its finished by sound and vibration. ");
+
+ // Analysis of recorded video and sensor data using RVCXAnalyzer
+ RVCVXCheckAnalyzer analyzer = new RVCVXCheckAnalyzer(mRecPath);
+ mReport = analyzer.processDataSet();
+
+ playSound();
+ vibrate(500);
+
+ if (mReport == null) {
+ showUserMessage("Analysis failed due to unknown reason!");
+ } else {
+ if (mReport.error) {
+ showUserMessage("Analysis failed: " + mReport.reason);
+ } else {
+ showUserMessage(String.format("Analysis finished!\n" +
+ "Roll error (Rms, max) = %4.3f, %4.3f rad\n" +
+ "Pitch error (Rms, max) = %4.3f, %4.3f rad\n" +
+ "Yaw error (Rms, max) = %4.3f, %4.3f rad\n" +
+ "N of Frame (valid, total) = %d, %d\n" +
+ "Sensor period (mean, stdev) = %4.3f, %4.3f ms\n" +
+ "Time offset: %4.3f s \n" +
+ "Yaw offset: %4.3f rad \n\n",
+ mReport.roll_rms_error, mReport.roll_max_error,
+ mReport.pitch_rms_error, mReport.pitch_max_error,
+ mReport.yaw_rms_error, mReport.yaw_max_error,
+ mReport.n_of_valid_frame, mReport.n_of_frame,
+ mReport.sensor_period_avg * 1000.0, mReport.sensor_period_stdev*1000.0,
+ mReport.optimal_delta_t, mReport.yaw_offset));
+ showUserMessage("Please click next after details reviewed.");
+ waitForUserToContinue();
+ }
+ }
+ }
+ clearText();
+ }
+
+ /**
+ Receiving the results from the RVCVRecordActivity, which is a patch where the recorded
+ video and sensor data is stored.
+ */
+ @Override
+ protected void onActivityResult(int requestCode, int resultCode, Intent data) {
+ // Check which request we're responding to
+ if (requestCode == REQ_CODE_TXCVRECORD) {
+ // Make sure the request was successful
+
+ if (resultCode == RESULT_OK) {
+ mRecPath = data.getData().getPath();
+ }
+
+ // notify it is finished
+ mRecordActivityFinishedSignal.countDown();
+ }
+ super.onActivityResult(requestCode, resultCode, data);
+ }
+
+ /**
+ * Test cases.
+ */
+
+ public String test00OpenCV() throws Throwable {
+
+ String message = "OpenCV is loaded";
+ Assert.assertTrue("OpenCV library cannot be loaded.", mOpenCVLoadSuccessful);
+ return message;
+ }
+
+
+ public String test01Recording() throws Throwable {
+
+ loadOpenCVSuccessfulOrSkip();
+
+ String message = "Record is successful.";
+ Assert.assertTrue("Record is not successful.", mRecordSuccessful);
+ return message;
+ }
+
+ public String test02Analysis() throws Throwable {
+
+ loadOpenCVSuccessfulOrSkip();
+ recordSuccessfulOrSkip();
+
+ String message = "Analysis result: " + mReport.reason;
+ Assert.assertTrue(message, (mReport!=null && !mReport.error));
+ return message;
+ }
+
+ public String test1RollAxis() throws Throwable {
+
+ loadOpenCVSuccessfulOrSkip();
+ recordSuccessfulOrSkip();
+ analyzeSuccessfulOrSkip();
+
+ String message = "Test Roll Axis Accuracy";
+
+ Assert.assertEquals("Roll RMS error", 0.0, mReport.roll_rms_error, 0.15);
+ Assert.assertEquals("Roll max error", 0.0, mReport.roll_max_error, 0.35);
+ return message;
+ }
+
+ public String test2PitchAxis() throws Throwable {
+
+ loadOpenCVSuccessfulOrSkip();
+ recordSuccessfulOrSkip();
+ analyzeSuccessfulOrSkip();
+
+ String message = "Test Pitch Axis Accuracy";
+
+ Assert.assertEquals("Pitch RMS error", 0.0, mReport.pitch_rms_error, 0.15);
+ Assert.assertEquals("Pitch max error", 0.0, mReport.pitch_max_error, 0.35);
+ return message;
+ }
+
+ public String test3YawAxis() throws Throwable {
+
+ loadOpenCVSuccessfulOrSkip();
+ recordSuccessfulOrSkip();
+ analyzeSuccessfulOrSkip();
+
+ String message = "Test Yaw Axis Accuracy";
+
+ Assert.assertEquals("Yaw RMS error", 0.0, mReport.yaw_rms_error, 0.2);
+ Assert.assertEquals("Yaw max error", 0.0, mReport.yaw_max_error, 0.4);
+ return message;
+ }
+
+ public String test4SensorPeriod() throws Throwable {
+
+ loadOpenCVSuccessfulOrSkip();
+ recordSuccessfulOrSkip();
+ analyzeSuccessfulOrSkip();
+
+ String message = "Test Sensor Period";
+
+ Assert.assertEquals("Sensor Period Mean", 5e-3, mReport.sensor_period_avg, 0.2e-3);
+ Assert.assertEquals("Sensor Period Stdev", 0.0, mReport.sensor_period_stdev, 0.5e-3);
+ return message;
+ }
+
+ private void loadOpenCVSuccessfulOrSkip() throws SensorTestStateNotSupportedException {
+ if (!mOpenCVLoadSuccessful)
+ throw new SensorTestStateNotSupportedException("Skipped due to OpenCV cannot be loaded");
+ }
+
+ private void recordSuccessfulOrSkip() throws SensorTestStateNotSupportedException {
+ if (!mRecordSuccessful)
+ throw new SensorTestStateNotSupportedException("Skipped due to record failure.");
+ }
+
+ private void analyzeSuccessfulOrSkip() throws SensorTestStateNotSupportedException {
+ if (mReport == null || mReport.error)
+ throw new SensorTestStateNotSupportedException("Skipped due to CV Analysis failure.");
+ }
+
+ /*
+ * This function serves as a proxy as showUserMessage is marked to be deprecated.
+ * When appendText is removed, this function will have a different implementation.
+ *
+ */
+ void showUserMessage(String s) {
+ appendText(s);
+ }
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+
+ super.onCreate(savedInstanceState);
+
+ // GlSurfaceView is not necessary for this test
+ closeGlSurfaceView();
+
+ OpenCVLibrary.loadAsync(this);
+ }
+
+ @Override
+ protected void onPause() {
+ super.onPause();
+ }
+
+ @Override
+ protected void onResume() {
+ super.onResume();
+
+ }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/helpers/OpenCVLibrary.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/helpers/OpenCVLibrary.java
new file mode 100644
index 0000000..2f5c873
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/helpers/OpenCVLibrary.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.cts.verifier.sensors.helpers;
+
+import android.content.Context;
+import android.os.Looper;
+import android.util.Log;
+
+import org.opencv.android.BaseLoaderCallback;
+import org.opencv.android.LoaderCallbackInterface;
+import org.opencv.android.OpenCVLoader;
+
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * OpenCV library loader class
+ */
+public class OpenCVLibrary {
+
+ private static String TAG = "OpenCVLibraryProbe";
+ private static boolean mLoaded = false;
+
+ /**
+ * Load OpenCV Library in async mode
+ * @param context Activity context
+ */
+ public static void loadAsync(Context context) {
+ // only need to load once
+ if (isLoaded()) return;
+
+ // Load the library through loader
+ OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_9, context,
+ new BaseLoaderCallback(context) {
+ @Override
+ public void onManagerConnected(int status) {
+ Log.v(TAG, "New Loading status: "+status);
+ switch (status) {
+ case LoaderCallbackInterface.SUCCESS: {
+ mLoaded = true;
+ }
+ break;
+ default: {
+ super.onManagerConnected(status);
+ }
+ break;
+ }
+ }
+ });
+ }
+
+ /**
+ * Test if the library is loaded
+ * @return a boolean indicates whether the OpenCV library is loaded.
+ */
+ public static boolean isLoaded() {
+ return mLoaded;
+ }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/tv/MockTvInputService.java b/apps/CtsVerifier/src/com/android/cts/verifier/tv/MockTvInputService.java
index f4460de..e7d1d79 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/tv/MockTvInputService.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/tv/MockTvInputService.java
@@ -33,15 +33,11 @@
import android.media.tv.TvTrackInfo;
import android.net.Uri;
import android.os.Bundle;
-import android.os.Handler;
-import android.os.Looper;
import android.view.Surface;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.TextView;
-import com.android.cts.verifier.R;
-
import java.util.ArrayList;
import java.util.List;
@@ -78,9 +74,9 @@
new TvTrackInfo.Builder(TvTrackInfo.TYPE_SUBTITLE, "subtitle_eng")
.setLanguage("eng")
.build();
- static final TvTrackInfo sSpaSubtitleTrack =
- new TvTrackInfo.Builder(TvTrackInfo.TYPE_SUBTITLE, "subtitle_spa")
- .setLanguage("spa")
+ static final TvTrackInfo sKorSubtitleTrack =
+ new TvTrackInfo.Builder(TvTrackInfo.TYPE_SUBTITLE, "subtitle_kor")
+ .setLanguage("kor")
.build();
private final BroadcastReceiver mBroadcastReceiver = new BroadcastReceiver() {
@@ -179,7 +175,7 @@
mTracks.add(sEngAudioTrack);
mTracks.add(sSpaAudioTrack);
mTracks.add(sEngSubtitleTrack);
- mTracks.add(sSpaSubtitleTrack);
+ mTracks.add(sKorSubtitleTrack);
}
@Override
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/tv/MockTvInputSetupActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/tv/MockTvInputSetupActivity.java
index 81a8edc..1d3fd40 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/tv/MockTvInputSetupActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/tv/MockTvInputSetupActivity.java
@@ -21,18 +21,26 @@
import android.content.ContentValues;
import android.database.Cursor;
import android.media.tv.TvContract;
+import android.media.tv.TvContract.Programs;
import android.media.tv.TvInputInfo;
import android.net.Uri;
import android.os.Bundle;
import android.util.Pair;
import android.view.View;
+import java.util.ArrayList;
+
public class MockTvInputSetupActivity extends Activity {
private static final String TAG = "MockTvInputSetupActivity";
private static final String CHANNEL_NUMBER = "999-0";
private static final String CHANNEL_NAME = "Dummy";
+ private static final String PROGRAM_TITLE = "Dummy Program";
+ private static final String PROGRAM_DESCRIPTION = "Dummy Program Description";
+ private static final long PROGRAM_LENGTH_MILLIS = 60 * 60 * 1000;
+ private static final int PROGRAM_COUNT = 24;
+
private static Object sLock = new Object();
private static Pair<View, Runnable> sLaunchCallback = null;
@@ -55,6 +63,8 @@
return;
}
}
+
+ // Add a channel.
ContentValues values = new ContentValues();
values.put(TvContract.Channels.COLUMN_INPUT_ID, inputId);
values.put(TvContract.Channels.COLUMN_DISPLAY_NUMBER, CHANNEL_NUMBER);
@@ -62,9 +72,27 @@
Uri channelUri = getContentResolver().insert(uri, values);
// If the channel's ID happens to be zero, we add another and delete the one.
if (ContentUris.parseId(channelUri) == 0) {
- getContentResolver().insert(uri, values);
getContentResolver().delete(channelUri, null, null);
+ channelUri = getContentResolver().insert(uri, values);
}
+
+ // Add Programs.
+ values = new ContentValues();
+ values.put(Programs.COLUMN_CHANNEL_ID, ContentUris.parseId(channelUri));
+ values.put(Programs.COLUMN_TITLE, PROGRAM_TITLE);
+ values.put(Programs.COLUMN_SHORT_DESCRIPTION, PROGRAM_DESCRIPTION);
+ long nowMs = System.currentTimeMillis();
+ long startTimeMs = nowMs - nowMs % PROGRAM_LENGTH_MILLIS;
+ ArrayList<ContentValues> list = new ArrayList<>();
+ for (int i = 0; i < PROGRAM_COUNT; ++i) {
+ values.put(Programs.COLUMN_START_TIME_UTC_MILLIS, startTimeMs);
+ values.put(Programs.COLUMN_END_TIME_UTC_MILLIS,
+ startTimeMs + PROGRAM_LENGTH_MILLIS);
+ startTimeMs += PROGRAM_LENGTH_MILLIS;
+ list.add(new ContentValues(values));
+ }
+ getContentResolver().bulkInsert(Programs.CONTENT_URI, list.toArray(
+ new ContentValues[0]));
} finally {
Pair<View, Runnable> launchCallback = null;
synchronized (sLock) {
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/tv/TvInputDiscoveryTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/tv/TvInputDiscoveryTestActivity.java
index 3d17a1a..4d12d52 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/tv/TvInputDiscoveryTestActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/tv/TvInputDiscoveryTestActivity.java
@@ -30,7 +30,9 @@
private static final String TAG = "TvInputDiscoveryTestActivity";
private static final Intent TV_APP_INTENT = new Intent(Intent.ACTION_VIEW,
- TvContract.buildChannelUri(0));
+ TvContract.Channels.CONTENT_URI);
+ private static final Intent EPG_INTENT = new Intent(Intent.ACTION_VIEW,
+ TvContract.Programs.CONTENT_URI);
private static final long TIMEOUT_MS = 5l * 60l * 1000l; // 5 mins.
@@ -39,6 +41,8 @@
private View mTuneToChannelItem;
private View mVerifyTuneItem;
private View mVerifyOverlayViewItem;
+ private View mGoToEpgItem;
+ private View mVerifyEpgItem;
private boolean mTuneVerified;
private boolean mOverlayViewVerified;
@@ -63,6 +67,7 @@
setButtonEnabled(mTuneToChannelItem, true);
}
});
+ startActivity(TV_APP_INTENT);
} else if (containsButton(mTuneToChannelItem, v)) {
final Runnable failCallback = new Runnable() {
@Override
@@ -78,7 +83,7 @@
setPassState(mVerifyTuneItem, true);
mTuneVerified = true;
- updatePassState(postTarget, failCallback);
+ goToNextState(postTarget, failCallback);
}
});
MockTvInputService.expectOverlayView(postTarget, new Runnable() {
@@ -88,11 +93,18 @@
setPassState(mVerifyOverlayViewItem, true);
mOverlayViewVerified = true;
- updatePassState(postTarget, failCallback);
+ goToNextState(postTarget, failCallback);
}
});
+ startActivity(TV_APP_INTENT);
+ } else if (containsButton(mGoToEpgItem, v)) {
+ startActivity(EPG_INTENT);
+ setPassState(mGoToEpgItem, true);
+ setButtonEnabled(mVerifyEpgItem, true);
+ } else if (containsButton(mVerifyEpgItem, v)) {
+ setPassState(mVerifyEpgItem, true);
+ getPassButton().setEnabled(true);
}
- startActivity(TV_APP_INTENT);
}
@Override
@@ -106,12 +118,16 @@
mVerifyTuneItem = createAutoItem(R.string.tv_input_discover_test_verify_tune);
mVerifyOverlayViewItem = createAutoItem(
R.string.tv_input_discover_test_verify_overlay_view);
+ mGoToEpgItem = createUserItem(R.string.tv_input_discover_test_go_to_epg,
+ R.string.tv_launch_epg, this);
+ mVerifyEpgItem = createUserItem(R.string.tv_input_discover_test_verify_epg,
+ R.string.tv_input_discover_test_yes, this);
}
- private void updatePassState(View postTarget, Runnable failCallback) {
+ private void goToNextState(View postTarget, Runnable failCallback) {
if (mTuneVerified && mOverlayViewVerified) {
postTarget.removeCallbacks(failCallback);
- getPassButton().setEnabled(true);
+ setButtonEnabled(mGoToEpgItem, true);
}
}
diff --git a/build/module_test_config.mk b/build/module_test_config.mk
index 1a397ac..6584ef2 100644
--- a/build/module_test_config.mk
+++ b/build/module_test_config.mk
@@ -14,7 +14,7 @@
cts_module_test_config := $(if $(wildcard \
$(LOCAL_PATH)/$(CTS_MODULE_TEST_CONFIG)), \
- $(CTS_TESTCASES_OUT)/$(LOCAL_PACKAGE_NAME).config)
+ $(CTS_TESTCASES_OUT)/$(LOCAL_MODULE).config)
ifneq ($(cts_module_test_config),)
$(cts_module_test_config): $(LOCAL_PATH)/$(CTS_MODULE_TEST_CONFIG) | $(ACP)
$(call copy-file-to-target)
diff --git a/build/test_executable.mk b/build/test_executable.mk
index 3a2cb8e..979f59e 100644
--- a/build/test_executable.mk
+++ b/build/test_executable.mk
@@ -25,6 +25,7 @@
LOCAL_CXX_STL := libc++
include $(BUILD_EXECUTABLE)
+include $(BUILD_CTS_MODULE_TEST_CONFIG)
cts_executable_bin :=
$(foreach fp, $(ALL_MODULES.$(LOCAL_MODULE).BUILT) $(ALL_MODULES.$(LOCAL_MODULE)$(TARGET_2ND_ARCH_MODULE_SUFFIX).BUILT),\
@@ -38,6 +39,7 @@
$(cts_executable_xml): PRIVATE_LIST_EXECUTABLE := $(HOST_OUT_EXECUTABLES)/$(LOCAL_MODULE)_list
$(cts_executable_xml): $(HOST_OUT_EXECUTABLES)/$(LOCAL_MODULE)_list
$(cts_executable_xml): $(cts_executable_bin)
+$(cts_executable_xml): $(cts_module_test_config)
$(cts_executable_xml): $(addprefix $(LOCAL_PATH)/,$(LOCAL_SRC_FILES)) $(CTS_EXPECTATIONS) $(CTS_UNSUPPORTED_ABIS) $(CTS_NATIVE_TEST_SCANNER) $(CTS_XML_GENERATOR)
$(hide) echo Generating test description for native package $(PRIVATE_TEST_PACKAGE)
$(hide) mkdir -p $(CTS_TESTCASES_OUT)
@@ -52,4 +54,4 @@
-o $@
# Have the module name depend on the cts files; so the cts files get generated when you run mm/mmm/mma/mmma.
-$(my_register_name) : $(cts_executable_bin) $(cts_executable_xml)
+$(my_register_name) : $(cts_executable_bin) $(cts_executable_xml) $(cts_module_test_config)
diff --git a/build/test_gtest_package.mk b/build/test_gtest_package.mk
index fc468d0..6f71830 100644
--- a/build/test_gtest_package.mk
+++ b/build/test_gtest_package.mk
@@ -24,6 +24,7 @@
LOCAL_PROGUARD_ENABLED := disabled
include $(BUILD_CTS_SUPPORT_PACKAGE)
+include $(BUILD_CTS_MODULE_TEST_CONFIG)
cts_package_xml := $(CTS_TESTCASES_OUT)/$(LOCAL_PACKAGE_NAME).xml
$(cts_package_xml): PRIVATE_PATH := $(LOCAL_PATH)
@@ -33,6 +34,7 @@
$(cts_package_xml): PRIVATE_TEST_LIST := $(LOCAL_PATH)/$(LOCAL_MODULE)_list.txt
$(cts_package_xml): $(LOCAL_PATH)/$(LOCAL_MODULE)_list.txt
$(cts_package_xml): $(cts_support_apks)
+$(cts_package_xml): $(cts_module_test_config)
$(cts_package_xml): $(addprefix $(LOCAL_PATH)/,$(LOCAL_SRC_FILES)) $(CTS_NATIVE_TEST_SCANNER) $(CTS_XML_GENERATOR)
$(hide) echo Generating test description for wrapped native package $(PRIVATE_EXECUTABLE)
$(hide) mkdir -p $(CTS_TESTCASES_OUT)
@@ -48,4 +50,4 @@
-o $@
# Have the module name depend on the cts files; so the cts files get generated when you run mm/mmm/mma/mmma.
-$(my_register_name) : $(cts_package_xml)
+$(my_register_name) : $(cts_package_xml) $(cts_module_test_config)
diff --git a/build/test_host_java_library.mk b/build/test_host_java_library.mk
index 7e86ac9..7fdefb5 100644
--- a/build/test_host_java_library.mk
+++ b/build/test_host_java_library.mk
@@ -18,6 +18,7 @@
#
include $(BUILD_HOST_JAVA_LIBRARY)
+include $(BUILD_CTS_MODULE_TEST_CONFIG)
cts_library_jar := $(CTS_TESTCASES_OUT)/$(LOCAL_MODULE).jar
$(cts_library_jar): $(LOCAL_BUILT_MODULE)
@@ -33,6 +34,7 @@
$(cts_library_xml): PRIVATE_LIBRARY := $(LOCAL_MODULE)
$(cts_library_xml): PRIVATE_JAR_PATH := $(LOCAL_MODULE).jar
$(cts_library_xml): $(cts_library_jar)
+$(cts_library_xml): $(cts_module_test_config)
$(cts_library_xml): $(CTS_EXPECTATIONS) $(CTS_UNSUPPORTED_ABIS) $(CTS_JAVA_TEST_SCANNER_DOCLET) $(CTS_JAVA_TEST_SCANNER) $(CTS_XML_GENERATOR)
$(hide) echo Generating test description for host library $(PRIVATE_LIBRARY)
$(hide) mkdir -p $(CTS_TESTCASES_OUT)
@@ -48,4 +50,4 @@
-o $@
# Have the module name depend on the cts files; so the cts files get generated when you run mm/mmm/mma/mmma.
-$(my_register_name) : $(cts_library_jar) $(cts_library_xml)
+$(my_register_name) : $(cts_library_jar) $(cts_library_xml) $(cts_module_test_config)
diff --git a/build/test_target_java_library.mk b/build/test_target_java_library.mk
index 2d3abfb..fe1000a 100644
--- a/build/test_target_java_library.mk
+++ b/build/test_target_java_library.mk
@@ -19,6 +19,7 @@
# Disable by default so "m cts" will work in emulator builds
LOCAL_DEX_PREOPT := false
include $(BUILD_JAVA_LIBRARY)
+include $(BUILD_CTS_MODULE_TEST_CONFIG)
cts_library_jar := $(CTS_TESTCASES_OUT)/$(LOCAL_MODULE).jar
$(cts_library_jar): $(LOCAL_BUILT_MODULE)
@@ -32,6 +33,7 @@
$(cts_library_xml): PRIVATE_JAR_PATH := $(LOCAL_MODULE).jar
$(cts_library_xml): PRIVATE_RUNTIME_ARGS := $(LOCAL_CTS_TARGET_RUNTIME_ARGS)
$(cts_library_xml): $(cts_library_jar)
+$(cts_library_xml): $(cts_module_test_config)
$(cts_library_xml): $(CTS_EXPECTATIONS) $(CTS_UNSUPPORTED_ABIS) $(CTS_JAVA_TEST_SCANNER_DOCLET) $(CTS_JAVA_TEST_SCANNER) $(CTS_XML_GENERATOR)
$(hide) echo Generating test description for target library $(PRIVATE_LIBRARY)
$(hide) mkdir -p $(CTS_TESTCASES_OUT)
@@ -48,4 +50,4 @@
-o $@
# Have the module name depend on the cts files; so the cts files get generated when you run mm/mmm/mma/mmma.
-$(my_register_name) : $(cts_library_jar) $(cts_library_xml)
+$(my_register_name) : $(cts_library_jar) $(cts_library_xml $(cts_module_test_config))
diff --git a/build/test_uiautomator.mk b/build/test_uiautomator.mk
index b573d25..a191d72 100644
--- a/build/test_uiautomator.mk
+++ b/build/test_uiautomator.mk
@@ -20,6 +20,7 @@
LOCAL_DEX_PREOPT := false
include $(BUILD_JAVA_LIBRARY)
+include $(BUILD_CTS_MODULE_TEST_CONFIG)
cts_library_jar := $(CTS_TESTCASES_OUT)/$(LOCAL_MODULE).jar
$(cts_library_jar): $(LOCAL_BUILT_MODULE)
@@ -37,6 +38,7 @@
$(cts_library_xml): PRIVATE_LIBRARY := $(LOCAL_MODULE)
$(cts_library_xml): PRIVATE_JAR_PATH := $(LOCAL_MODULE).jar
$(cts_library_xml): $(cts_library_jar)
+$(cts_library_xml): $(cts_module_test_config)
$(cts_library_xml): $(CTS_EXPECTATIONS) $(CTS_UNSUPPORTED_ABIS) $(CTS_JAVA_TEST_SCANNER_DOCLET) $(CTS_JAVA_TEST_SCANNER) $(CTS_XML_GENERATOR)
$(hide) echo Generating test description for uiautomator library $(PRIVATE_LIBRARY)
$(hide) mkdir -p $(CTS_TESTCASES_OUT)
@@ -55,4 +57,4 @@
-o $@
# Have the module name depend on the cts files; so the cts files get generated when you run mm/mmm/mma/mmma.
-$(my_register_name) : $(cts_library_jar) $(cts_library_xml)
+$(my_register_name) : $(cts_library_jar) $(cts_library_xml) $(cts_module_test_config)
diff --git a/hostsidetests/devicepolicy/app/ManagedProfile/src/com/android/cts/managedprofile/ContactsTest.java b/hostsidetests/devicepolicy/app/ManagedProfile/src/com/android/cts/managedprofile/ContactsTest.java
index 093a402..b76d9e4 100644
--- a/hostsidetests/devicepolicy/app/ManagedProfile/src/com/android/cts/managedprofile/ContactsTest.java
+++ b/hostsidetests/devicepolicy/app/ManagedProfile/src/com/android/cts/managedprofile/ContactsTest.java
@@ -28,6 +28,7 @@
import android.os.Build;
import android.os.RemoteException;
import android.provider.ContactsContract;
+import android.provider.ContactsContract.CommonDataKinds.Email;
import android.provider.ContactsContract.CommonDataKinds.Phone;
import android.provider.ContactsContract.CommonDataKinds.Photo;
import android.provider.ContactsContract.PhoneLookup;
@@ -44,10 +45,19 @@
private static final String TEST_ACCOUNT_NAME = "CTS";
private static final String TEST_ACCOUNT_TYPE = "com.android.cts.test";
+ // details of a sample primary contact
private static final String PRIMARY_CONTACT_DISPLAY_NAME = "Primary";
private static final String PRIMARY_CONTACT_PHONE = "00000001";
+ private static final String PRIMARY_CONTACT_EMAIL = "one@primary.com";
+ // details of a sample managed contact
private static final String MANAGED_CONTACT_DISPLAY_NAME = "Managed";
private static final String MANAGED_CONTACT_PHONE = "6891999";
+ private static final String MANAGED_CONTACT_EMAIL = "one@managed.com";
+ // details of a sample primary and a sample managed contact, with the same phone & email
+ private static final String PRIMARY_CONTACT_DISPLAY_NAME_2 = "PrimaryShared";
+ private static final String MANAGED_CONTACT_DISPLAY_NAME_2 = "ManagedShared";
+ private static final String SHARED_CONTACT_PHONE = "00000002";
+ private static final String SHARED_CONTACT_EMAIL = "shared@shared.com";
private DevicePolicyManager mDevicePolicyManager;
private ContentResolver mResolver;
@@ -85,11 +95,12 @@
.getSystemService(Context.DEVICE_POLICY_SERVICE);
}
- public void testPrimaryProfilePhoneLookup_insertedAndfound() throws RemoteException,
+ public void testPrimaryProfilePhoneAndEmailLookup_insertedAndfound() throws RemoteException,
OperationApplicationException, NotFoundException, IOException {
assertFalse(isManagedProfile());
// Do not insert to primary contact
- insertContact(PRIMARY_CONTACT_DISPLAY_NAME, PRIMARY_CONTACT_PHONE, 0);
+ insertContact(PRIMARY_CONTACT_DISPLAY_NAME, PRIMARY_CONTACT_PHONE,
+ PRIMARY_CONTACT_EMAIL, 0);
ContactInfo contactInfo = getContactInfo(PRIMARY_CONTACT_PHONE);
assertNotNull(contactInfo);
@@ -97,13 +108,23 @@
assertFalse(contactInfo.hasPhotoUri());
assertFalse(contactInfo.hasPhotoId());
assertFalse(isEnterpriseContactId(contactInfo.contactId));
+
+ contactInfo = getContactInfoFromEmail(PRIMARY_CONTACT_EMAIL);
+ assertNotNull(contactInfo);
+ assertEquals(PRIMARY_CONTACT_DISPLAY_NAME, contactInfo.displayName);
+ assertFalse(contactInfo.hasPhotoUri());
+ assertFalse(contactInfo.hasPhotoId());
+ assertFalse(isEnterpriseContactId(contactInfo.contactId));
+
}
- public void testManagedProfilePhoneLookup_insertedAndfound() throws RemoteException,
+ public void testManagedProfilePhoneAndEmailLookup_insertedAndfound() throws RemoteException,
OperationApplicationException, NotFoundException, IOException {
assertTrue(isManagedProfile());
// Insert ic_contact_picture as photo in managed contact
- insertContact(MANAGED_CONTACT_DISPLAY_NAME, MANAGED_CONTACT_PHONE,
+ insertContact(MANAGED_CONTACT_DISPLAY_NAME,
+ MANAGED_CONTACT_PHONE,
+ MANAGED_CONTACT_EMAIL,
com.android.cts.managedprofile.R.raw.ic_contact_picture);
ContactInfo contactInfo = getContactInfo(MANAGED_CONTACT_PHONE);
@@ -112,6 +133,56 @@
assertTrue(contactInfo.hasPhotoUri());
assertTrue(contactInfo.hasPhotoId());
assertFalse(isEnterpriseContactId(contactInfo.contactId));
+
+ contactInfo = getContactInfoFromEmail(MANAGED_CONTACT_EMAIL);
+ assertNotNull(contactInfo);
+ assertEquals(MANAGED_CONTACT_DISPLAY_NAME, contactInfo.displayName);
+ assertTrue(contactInfo.hasPhotoUri());
+ assertTrue(contactInfo.hasPhotoId());
+ assertFalse(isEnterpriseContactId(contactInfo.contactId));
+ }
+
+ public void testPrimaryProfileDuplicatedPhoneEmailContact_insertedAndfound() throws
+ RemoteException, OperationApplicationException, NotFoundException, IOException {
+ assertFalse(isManagedProfile());
+ insertContact(PRIMARY_CONTACT_DISPLAY_NAME_2, SHARED_CONTACT_PHONE,
+ SHARED_CONTACT_EMAIL,
+ com.android.cts.managedprofile.R.raw.ic_contact_picture);
+
+ ContactInfo contactInfo = getContactInfo(SHARED_CONTACT_PHONE);
+ assertNotNull(contactInfo);
+ assertEquals(PRIMARY_CONTACT_DISPLAY_NAME_2, contactInfo.displayName);
+ assertTrue(contactInfo.hasPhotoUri());
+ assertTrue(contactInfo.hasPhotoId());
+ assertFalse(isEnterpriseContactId(contactInfo.contactId));
+
+ contactInfo = getContactInfoFromEmail(SHARED_CONTACT_EMAIL);
+ assertNotNull(contactInfo);
+ assertEquals(PRIMARY_CONTACT_DISPLAY_NAME_2, contactInfo.displayName);
+ assertTrue(contactInfo.hasPhotoUri());
+ assertTrue(contactInfo.hasPhotoId());
+ assertFalse(isEnterpriseContactId(contactInfo.contactId));
+ }
+
+ public void testManagedProfileDuplicatedPhoneEmailContact_insertedAndfound() throws
+ RemoteException, OperationApplicationException, NotFoundException, IOException {
+ assertTrue(isManagedProfile());
+ insertContact(MANAGED_CONTACT_DISPLAY_NAME_2, SHARED_CONTACT_PHONE,
+ SHARED_CONTACT_EMAIL, 0);
+
+ ContactInfo contactInfo = getContactInfo(SHARED_CONTACT_PHONE);
+ assertNotNull(contactInfo);
+ assertEquals(MANAGED_CONTACT_DISPLAY_NAME_2, contactInfo.displayName);
+ assertFalse(contactInfo.hasPhotoUri());
+ assertFalse(contactInfo.hasPhotoId());
+ assertFalse(isEnterpriseContactId(contactInfo.contactId));
+
+ contactInfo = getContactInfoFromEmail(SHARED_CONTACT_EMAIL);
+ assertNotNull(contactInfo);
+ assertEquals(MANAGED_CONTACT_DISPLAY_NAME_2, contactInfo.displayName);
+ assertFalse(contactInfo.hasPhotoUri());
+ assertFalse(contactInfo.hasPhotoId());
+ assertFalse(isEnterpriseContactId(contactInfo.contactId));
}
public void testPrimaryProfileEnterprisePhoneLookup_canAccessEnterpriseContact() {
@@ -124,7 +195,66 @@
assertTrue(isEnterpriseContactId(contactInfo.contactId));
}
- public void testPrimaryProfilePhoneLookup_canAccessPrimaryContact() {
+ public void testPrimaryProfileEnterpriseEmailLookup_canAccessEnterpriseContact() {
+ assertFalse(isManagedProfile());
+ ContactInfo contactInfo = getEnterpriseContactInfoFromEmail(MANAGED_CONTACT_EMAIL);
+ assertEquals(MANAGED_CONTACT_DISPLAY_NAME, contactInfo.displayName);
+ assertTrue(contactInfo.hasPhotoUri());
+ // Cannot get photo id in ENTERPRISE_CONTENT_FILTER_URI
+ assertFalse(contactInfo.hasPhotoId());
+ assertTrue(isEnterpriseContactId(contactInfo.contactId));
+ }
+
+ public void testPrimaryProfileEnterprisePhoneLookupDuplicated_canAccessPrimaryContact() {
+ assertFalse(isManagedProfile());
+ ContactInfo contactInfo = getEnterpriseContactInfo(SHARED_CONTACT_PHONE);
+ assertEquals(PRIMARY_CONTACT_DISPLAY_NAME_2, contactInfo.displayName);
+ assertTrue(contactInfo.hasPhotoUri());
+ assertTrue(contactInfo.hasPhotoId());
+ assertFalse(isEnterpriseContactId(contactInfo.contactId));
+ }
+
+ public void testPrimaryProfileEnterpriseEmailLookupDuplicated_canAccessPrimaryContact() {
+ assertFalse(isManagedProfile());
+ ContactInfo contactInfo = getEnterpriseContactInfoFromEmail(SHARED_CONTACT_EMAIL);
+ assertEquals(PRIMARY_CONTACT_DISPLAY_NAME_2, contactInfo.displayName);
+ assertTrue(contactInfo.hasPhotoUri());
+ assertTrue(contactInfo.hasPhotoId());
+ assertFalse(isEnterpriseContactId(contactInfo.contactId));
+ }
+
+ public void testManagedProfileEnterprisePhoneLookupDuplicated_canAccessEnterpriseContact() {
+ assertTrue(isManagedProfile());
+ ContactInfo contactInfo = getEnterpriseContactInfo(SHARED_CONTACT_PHONE);
+ assertEquals(MANAGED_CONTACT_DISPLAY_NAME_2, contactInfo.displayName);
+ assertFalse(contactInfo.hasPhotoUri());
+ assertFalse(contactInfo.hasPhotoId());
+ assertFalse(isEnterpriseContactId(contactInfo.contactId));
+ }
+
+ public void testManagedProfileEnterpriseEmailLookupDuplicated_canAccessEnterpriseContact() {
+ assertTrue(isManagedProfile());
+ ContactInfo contactInfo = getEnterpriseContactInfoFromEmail(SHARED_CONTACT_EMAIL);
+ assertEquals(MANAGED_CONTACT_DISPLAY_NAME_2, contactInfo.displayName);
+ assertFalse(contactInfo.hasPhotoUri());
+ assertFalse(contactInfo.hasPhotoId());
+ assertFalse(isEnterpriseContactId(contactInfo.contactId));
+ }
+
+
+ public void testPrimaryProfilePhoneLookup_canNotAccessEnterpriseContact() {
+ assertFalse(isManagedProfile());
+ ContactInfo contactInfo = getContactInfo(MANAGED_CONTACT_PHONE);
+ assertNull(contactInfo);
+ }
+
+ public void testPrimaryProfileEmailLookup_canNotAccessEnterpriseContact() {
+ assertFalse(isManagedProfile());
+ ContactInfo contactInfo = getContactInfoFromEmail(MANAGED_CONTACT_EMAIL);
+ assertNull(contactInfo);
+ }
+
+ public void testPrimaryProfileEnterprisePhoneLookup_canAccessPrimaryContact() {
assertFalse(isManagedProfile());
ContactInfo contactInfo = getEnterpriseContactInfo(PRIMARY_CONTACT_PHONE);
assertEquals(PRIMARY_CONTACT_DISPLAY_NAME, contactInfo.displayName);
@@ -133,7 +263,16 @@
assertFalse(isEnterpriseContactId(contactInfo.contactId));
}
- public void testManagedProfilePhoneLookup_canAccessEnterpriseContact() {
+ public void testPrimaryProfileEnterpriseEmailLookup_canAccessPrimaryContact() {
+ assertFalse(isManagedProfile());
+ ContactInfo contactInfo = getEnterpriseContactInfoFromEmail(PRIMARY_CONTACT_EMAIL);
+ assertEquals(PRIMARY_CONTACT_DISPLAY_NAME, contactInfo.displayName);
+ assertFalse(contactInfo.hasPhotoUri());
+ assertFalse(contactInfo.hasPhotoId());
+ assertFalse(isEnterpriseContactId(contactInfo.contactId));
+ }
+
+ public void testManagedProfileEnterprisePhoneLookup_canAccessEnterpriseContact() {
assertTrue(isManagedProfile());
ContactInfo contactInfo = getEnterpriseContactInfo(MANAGED_CONTACT_PHONE);
assertEquals(MANAGED_CONTACT_DISPLAY_NAME, contactInfo.displayName);
@@ -142,15 +281,48 @@
assertFalse(isEnterpriseContactId(contactInfo.contactId));
}
- public void testPrimaryProfilePhoneLookup_canNotAccessEnterpriseContact() {
- assertFalse(isManagedProfile());
- ContactInfo contactInfo = getEnterpriseContactInfo(MANAGED_CONTACT_PHONE);
+ public void testManagedProfileEnterpriseEmailLookup_canAccessEnterpriseContact() {
+ assertTrue(isManagedProfile());
+ ContactInfo contactInfo = getEnterpriseContactInfoFromEmail(MANAGED_CONTACT_EMAIL);
+ assertEquals(MANAGED_CONTACT_DISPLAY_NAME, contactInfo.displayName);
+ assertTrue(contactInfo.hasPhotoUri());
+ assertTrue(contactInfo.hasPhotoId());
+ assertFalse(isEnterpriseContactId(contactInfo.contactId));
+ }
+
+ public void testManagedProfileEnterprisePhoneLookup_canNotAccessPrimaryContact() {
+ assertTrue(isManagedProfile());
+ ContactInfo contactInfo = getEnterpriseContactInfo(PRIMARY_CONTACT_PHONE);
+ assertNull(contactInfo);
+ }
+
+ public void testManagedProfileEnterpriseEmailLookup_canNotAccessPrimaryContact() {
+ assertTrue(isManagedProfile());
+ ContactInfo contactInfo = getEnterpriseContactInfoFromEmail(PRIMARY_CONTACT_EMAIL);
assertNull(contactInfo);
}
public void testManagedProfilePhoneLookup_canNotAccessPrimaryContact() {
assertTrue(isManagedProfile());
- ContactInfo contactInfo = getEnterpriseContactInfo(PRIMARY_CONTACT_PHONE);
+ ContactInfo contactInfo = getContactInfo(PRIMARY_CONTACT_PHONE);
+ assertNull(contactInfo);
+ }
+
+ public void testManagedProfileEmailLookup_canNotAccessPrimaryContact() {
+ assertTrue(isManagedProfile());
+ ContactInfo contactInfo = getContactInfoFromEmail(PRIMARY_CONTACT_EMAIL);
+ assertNull(contactInfo);
+ }
+
+ public void testPrimaryProfileEnterpriseEmailLookup_canNotAccessEnterpriseContact() {
+ assertFalse(isManagedProfile());
+ ContactInfo contactInfo = getEnterpriseContactInfoFromEmail(MANAGED_CONTACT_EMAIL);
+ assertNull(contactInfo);
+ }
+
+ public void testPrimaryProfileEnterprisePhoneLookup_canNotAccessEnterpriseContact() {
+ assertFalse(isManagedProfile());
+ ContactInfo contactInfo = getEnterpriseContactInfo(MANAGED_CONTACT_PHONE);
assertNull(contactInfo);
}
@@ -186,9 +358,13 @@
return mDevicePolicyManager.isProfileOwnerApp(adminPackage);
}
- private void insertContact(String displayName, String phoneNumber, int photoResId)
- throws RemoteException,
- OperationApplicationException, NotFoundException, IOException {
+ private void insertContact(String displayName, String phoneNumber, int photoResId) throws
+ RemoteException, OperationApplicationException, NotFoundException, IOException {
+ insertContact(displayName, phoneNumber, null, photoResId);
+ }
+
+ private void insertContact(String displayName, String phoneNumber, String email, int photoResId)
+ throws RemoteException, OperationApplicationException, NotFoundException, IOException {
ArrayList<ContentProviderOperation> ops = new ArrayList<ContentProviderOperation>();
ops.add(ContentProviderOperation
.newInsert(ContactsContract.RawContacts.CONTENT_URI)
@@ -216,6 +392,18 @@
.withValue(ContactsContract.CommonDataKinds.Phone.TYPE,
Phone.TYPE_MOBILE)
.build());
+ ops.add(ContentProviderOperation
+ .newInsert(ContactsContract.Data.CONTENT_URI)
+ .withValueBackReference(ContactsContract.Data.RAW_CONTACT_ID, 0)
+ .withValue(
+ ContactsContract.Data.MIMETYPE,
+ ContactsContract.CommonDataKinds.Email.CONTENT_ITEM_TYPE)
+ .withValue(ContactsContract.CommonDataKinds.Email.ADDRESS,
+ email)
+ .withValue(ContactsContract.CommonDataKinds.Email.TYPE,
+ Email.TYPE_WORK)
+ .build());
+
if (photoResId != 0) {
InputStream phoneInputStream = mContext.getResources().openRawResource(photoResId);
byte[] rawPhoto = getByteFromStream(phoneInputStream);
@@ -256,17 +444,56 @@
return result;
}
+ private ContactInfo getContactInfoFromEmailUri(Uri emailLookupUri, String email) {
+ Uri uri = Uri.withAppendedPath(emailLookupUri, Uri.encode(email));
+ Cursor cursor = mResolver.query(uri,
+ new String[] {
+ Email.CONTACT_ID,
+ Email.DISPLAY_NAME_PRIMARY,
+ Email.PHOTO_URI,
+ Email.PHOTO_ID,
+ Email.PHOTO_THUMBNAIL_URI,
+ }, null, null, null);
+ if (cursor == null) {
+ return null;
+ }
+ ContactInfo result = null;
+ if (cursor.moveToFirst()) {
+ result = new ContactInfo(
+ cursor.getString(cursor.getColumnIndexOrThrow(
+ Email.CONTACT_ID)),
+ cursor.getString(cursor.getColumnIndexOrThrow(
+ Email.DISPLAY_NAME_PRIMARY)),
+ cursor.getString(cursor.getColumnIndexOrThrow(
+ Email.PHOTO_URI)),
+ cursor.getString(cursor.getColumnIndexOrThrow(
+ Email.PHOTO_THUMBNAIL_URI)),
+ cursor.getString(cursor.getColumnIndexOrThrow(
+ Email.PHOTO_ID)));
+ }
+ cursor.close();
+ return result;
+ }
+
private ContactInfo getContactInfo(String phoneNumber) {
return getContactInfoFromUri(PhoneLookup.CONTENT_FILTER_URI,
phoneNumber);
}
+ private ContactInfo getContactInfoFromEmail(String email) {
+ return getContactInfoFromEmailUri(Email.CONTENT_LOOKUP_URI, email);
+ }
+
private ContactInfo getEnterpriseContactInfo(String phoneNumber) {
return getContactInfoFromUri(
PhoneLookup.ENTERPRISE_CONTENT_FILTER_URI,
phoneNumber);
}
+ private ContactInfo getEnterpriseContactInfoFromEmail(String email) {
+ return getContactInfoFromEmailUri(Email.ENTERPRISE_CONTENT_LOOKUP_URI, email);
+ }
+
private void removeAllTestContactsInProfile() {
ArrayList<ContentProviderOperation> ops = new ArrayList<ContentProviderOperation>();
ops.add(ContentProviderOperation.newDelete(RawContacts.CONTENT_URI)
diff --git a/hostsidetests/devicepolicy/src/com/android/cts/devicepolicy/LauncherAppsProfileTest.java b/hostsidetests/devicepolicy/src/com/android/cts/devicepolicy/LauncherAppsProfileTest.java
index f8c2e7d..43f1f5a 100644
--- a/hostsidetests/devicepolicy/src/com/android/cts/devicepolicy/LauncherAppsProfileTest.java
+++ b/hostsidetests/devicepolicy/src/com/android/cts/devicepolicy/LauncherAppsProfileTest.java
@@ -32,10 +32,7 @@
@Override
protected void setUp() throws Exception {
super.setUp();
-
- // We need multi user to be supported in order to create a profile of the user owner.
- mHasFeature = mHasFeature && (getMaxNumberOfUsersSupported() > 1);
-
+ mHasFeature = mHasFeature && hasDeviceFeature("android.software.managed_users");
if (mHasFeature) {
removeTestUsers();
installTestApps();
diff --git a/hostsidetests/devicepolicy/src/com/android/cts/devicepolicy/ManagedProfileTest.java b/hostsidetests/devicepolicy/src/com/android/cts/devicepolicy/ManagedProfileTest.java
index 5c0126d..24ba691 100644
--- a/hostsidetests/devicepolicy/src/com/android/cts/devicepolicy/ManagedProfileTest.java
+++ b/hostsidetests/devicepolicy/src/com/android/cts/devicepolicy/ManagedProfileTest.java
@@ -45,7 +45,7 @@
super.setUp();
// We need multi user to be supported in order to create a profile of the user owner.
- mHasFeature = mHasFeature && (getMaxNumberOfUsersSupported() > 1) && hasDeviceFeature(
+ mHasFeature = mHasFeature && hasDeviceFeature(
"android.software.managed_users");
if (mHasFeature) {
@@ -91,14 +91,6 @@
assertFalse(listUsers().contains(mUserId));
}
- public void testMaxUsersStrictlyMoreThanOne() throws Exception {
- if (hasDeviceFeature("android.software.managed_users")) {
- assertTrue("Device must support more than 1 user "
- + "if android.software.managed_users feature is available",
- getMaxNumberOfUsersSupported() > 1);
- }
- }
-
public void testCrossProfileIntentFilters() throws Exception {
if (!mHasFeature) {
return;
@@ -247,39 +239,111 @@
try {
// Insert Primary profile Contacts
assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
- "testPrimaryProfilePhoneLookup_insertedAndfound", 0));
+ "testPrimaryProfilePhoneAndEmailLookup_insertedAndfound", 0));
// Insert Managed profile Contacts
assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
- "testManagedProfilePhoneLookup_insertedAndfound", mUserId));
+ "testManagedProfilePhoneAndEmailLookup_insertedAndfound", mUserId));
+ // Insert a primary contact with same phone & email as other enterprise contacts
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testPrimaryProfileDuplicatedPhoneEmailContact_insertedAndfound", 0));
+ // Insert a enterprise contact with same phone & email as other primary contacts
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testManagedProfileDuplicatedPhoneEmailContact_insertedAndfound", mUserId));
+
// Set cross profile caller id to enabled
assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
"testSetCrossProfileCallerIdDisabled_false", mUserId));
- // Managed user can use ENTERPRISE_CONTENT_FILTER_URI
- // To access managed contacts but not primary contacts
+ // Primary user cannot use ordinary phone/email lookup api to access managed contacts
assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
- "testManagedProfilePhoneLookup_canAccessEnterpriseContact", mUserId));
+ "testPrimaryProfilePhoneLookup_canNotAccessEnterpriseContact", 0));
assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
- "testManagedProfilePhoneLookup_canNotAccessPrimaryContact", mUserId));
-
- // Primary user can use ENTERPRISE_CONTENT_FILTER_URI
- // To access both primary and managed contacts
+ "testPrimaryProfileEmailLookup_canNotAccessEnterpriseContact", 0));
+ // Primary user can use ENTERPRISE_CONTENT_FILTER_URI to access primary contacts
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testPrimaryProfileEnterprisePhoneLookup_canAccessPrimaryContact", 0));
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testPrimaryProfileEnterpriseEmailLookup_canAccessPrimaryContact", 0));
+ // Primary user can use ENTERPRISE_CONTENT_FILTER_URI to access managed profile contacts
assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
"testPrimaryProfileEnterprisePhoneLookup_canAccessEnterpriseContact", 0));
assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
- "testPrimaryProfilePhoneLookup_canAccessPrimaryContact", 0));
+ "testPrimaryProfileEnterpriseEmailLookup_canAccessEnterpriseContact", 0));
+ // When there exist contacts with the same phone/email in primary & enterprise,
+ // primary user can use ENTERPRISE_CONTENT_FILTER_URI to access the primary contact.
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testPrimaryProfileEnterpriseEmailLookupDuplicated_canAccessPrimaryContact",
+ 0));
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testPrimaryProfileEnterprisePhoneLookupDuplicated_canAccessPrimaryContact",
+ 0));
+
+ // Managed user cannot use ordinary phone/email lookup api to access primary contacts
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testManagedProfilePhoneLookup_canNotAccessPrimaryContact", mUserId));
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testManagedProfileEmailLookup_canNotAccessPrimaryContact", mUserId));
+ // Managed user can use ENTERPRISE_CONTENT_FILTER_URI to access enterprise contacts
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testManagedProfileEnterprisePhoneLookup_canAccessEnterpriseContact", mUserId));
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testManagedProfileEnterpriseEmailLookup_canAccessEnterpriseContact", mUserId));
+ // Managed user cannot use ENTERPRISE_CONTENT_FILTER_URI to access primary contacts
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testManagedProfileEnterprisePhoneLookup_canNotAccessPrimaryContact", mUserId));
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testManagedProfileEnterpriseEmailLookup_canNotAccessPrimaryContact", mUserId));
+ // When there exist contacts with the same phone/email in primary & enterprise,
+ // managed user can use ENTERPRISE_CONTENT_FILTER_URI to access the enterprise contact.
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testManagedProfileEnterpriseEmailLookupDuplicated_canAccessEnterpriseContact",
+ mUserId));
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testManagedProfileEnterprisePhoneLookupDuplicated_canAccessEnterpriseContact",
+ mUserId));
// Set cross profile caller id to disabled
assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
"testSetCrossProfileCallerIdDisabled_true", mUserId));
- // Primary user cannot use ENTERPRISE_CONTENT_FILTER_URI to access managed contacts
+ // Primary user cannot use ordinary phone/email lookup api to access managed contacts
assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
"testPrimaryProfilePhoneLookup_canNotAccessEnterpriseContact", 0));
- // Managed user cannot use ENTERPRISE_CONTENT_FILTER_URI to access primary contacts
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testPrimaryProfileEmailLookup_canNotAccessEnterpriseContact", 0));
+ // Primary user cannot use ENTERPRISE_CONTENT_FILTER_URI to access managed contacts
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testPrimaryProfileEnterprisePhoneLookup_canNotAccessEnterpriseContact", 0));
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testPrimaryProfileEnterpriseEmailLookup_canNotAccessEnterpriseContact", 0));
+ // When there exist contacts with the same phone/email in primary & enterprise,
+ // primary user can use ENTERPRISE_CONTENT_FILTER_URI to access primary contacts
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testPrimaryProfileEnterpriseEmailLookupDuplicated_canAccessPrimaryContact",
+ 0));
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testPrimaryProfileEnterprisePhoneLookupDuplicated_canAccessPrimaryContact",
+ 0));
+
+ // Managed user cannot use ordinary phone/email lookup api to access primary contacts
assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
"testManagedProfilePhoneLookup_canNotAccessPrimaryContact", mUserId));
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testManagedProfileEmailLookup_canNotAccessPrimaryContact", mUserId));
+ // Managed user cannot use ENTERPRISE_CONTENT_FILTER_URI to access primary contacts
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testManagedProfileEnterprisePhoneLookup_canNotAccessPrimaryContact", mUserId));
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testManagedProfileEnterpriseEmailLookup_canNotAccessPrimaryContact", mUserId));
+ // When there exist contacts with the same phone/email in primary & enterprise,
+ // managed user can use ENTERPRISE_CONTENT_FILTER_URI to access enterprise contacts
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testManagedProfileEnterpriseEmailLookupDuplicated_canAccessEnterpriseContact",
+ mUserId));
+ assertTrue(runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
+ "testManagedProfileEnterprisePhoneLookupDuplicated_canAccessEnterpriseContact",
+ mUserId));
} finally {
// Clean up in managed profile and primary profile
runDeviceTestsAsUser(MANAGED_PROFILE_PKG, ".ContactsTest",
diff --git a/hostsidetests/security/src/android/cts/security/SELinuxHostTest.java b/hostsidetests/security/src/android/cts/security/SELinuxHostTest.java
index 6e2c90e..a0d3167 100644
--- a/hostsidetests/security/src/android/cts/security/SELinuxHostTest.java
+++ b/hostsidetests/security/src/android/cts/security/SELinuxHostTest.java
@@ -106,32 +106,27 @@
/* obtain sepolicy file from running device */
devicePolicyFile = File.createTempFile("sepolicy", ".tmp");
devicePolicyFile.deleteOnExit();
- mDevice.executeAdbCommand("pull", "/sys/fs/selinux/policy",
- devicePolicyFile.getAbsolutePath());
+ mDevice.pullFile("/sys/fs/selinux/policy", devicePolicyFile);
/* obtain seapp_contexts file from running device */
deviceSeappFile = File.createTempFile("seapp_contexts", ".tmp");
deviceSeappFile.deleteOnExit();
- mDevice.executeAdbCommand("pull", "/seapp_contexts",
- deviceSeappFile.getAbsolutePath());
+ mDevice.pullFile("/seapp_contexts", deviceSeappFile);
/* obtain file_contexts file from running device */
deviceFcFile = File.createTempFile("file_contexts", ".tmp");
deviceFcFile.deleteOnExit();
- mDevice.executeAdbCommand("pull", "/file_contexts",
- deviceFcFile.getAbsolutePath());
+ mDevice.pullFile("/file_contexts", deviceFcFile);
/* obtain property_contexts file from running device */
devicePcFile = File.createTempFile("property_contexts", ".tmp");
devicePcFile.deleteOnExit();
- mDevice.executeAdbCommand("pull", "/property_contexts",
- devicePcFile.getAbsolutePath());
+ mDevice.pullFile("/property_contexts", devicePcFile);
/* obtain service_contexts file from running device */
deviceSvcFile = File.createTempFile("service_contexts", ".tmp");
deviceSvcFile.deleteOnExit();
- mDevice.executeAdbCommand("pull", "/service_contexts",
- deviceSvcFile.getAbsolutePath());
+ mDevice.pullFile("/service_contexts", deviceSvcFile);
/* retrieve the AOSP *_contexts files from jar */
aospSeappFile = copyResourceToTempFile("/general_seapp_contexts");
diff --git a/hostsidetests/theme/app/Android.mk b/hostsidetests/theme/app/Android.mk
index 1be2983..70623cb 100644
--- a/hostsidetests/theme/app/Android.mk
+++ b/hostsidetests/theme/app/Android.mk
@@ -26,8 +26,6 @@
LOCAL_PROGUARD_ENABLED := disabled
-LOCAL_STATIC_JAVA_LIBRARIES := android-support-test
-
LOCAL_SRC_FILES := $(call all-java-files-under, src)
#Flags to tell the Android Asset Packaging Tool not to strip for some densities
diff --git a/hostsidetests/theme/app/AndroidManifest.xml b/hostsidetests/theme/app/AndroidManifest.xml
index 2f8fb3b..81a4d9d 100755
--- a/hostsidetests/theme/app/AndroidManifest.xml
+++ b/hostsidetests/theme/app/AndroidManifest.xml
@@ -24,7 +24,7 @@
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<application>
<uses-library android:name="android.test.runner" />
- <activity android:name=".HoloDeviceActivity" >
+ <activity android:name=".HoloDeviceActivity">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
@@ -37,13 +37,6 @@
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
- <activity android:name=".CaptureActivity" />
</application>
- <!-- self-instrumenting test package. -->
- <instrumentation android:name="android.support.test.runner.AndroidJUnitRunner"
- android:targetPackage="android.theme.app"
- android:label="Generates Theme reference images"/>
-
</manifest>
-
diff --git a/hostsidetests/theme/app/res/layout/holo_test.xml b/hostsidetests/theme/app/res/layout/holo_test.xml
index 0aef953..3eed4ba 100644
--- a/hostsidetests/theme/app/res/layout/holo_test.xml
+++ b/hostsidetests/theme/app/res/layout/holo_test.xml
@@ -15,6 +15,8 @@
-->
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
+ android:focusable="true"
+ android:keepScreenOn="true"
android:layout_width="match_parent"
android:layout_height="match_parent">
<android.theme.app.ReferenceViewGroup
diff --git a/hostsidetests/theme/app/src/android/theme/app/CaptureActivity.java b/hostsidetests/theme/app/src/android/theme/app/CaptureActivity.java
deleted file mode 100644
index d241ff6..0000000
--- a/hostsidetests/theme/app/src/android/theme/app/CaptureActivity.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.theme.app;
-
-import android.app.Activity;
-import android.content.Intent;
-import android.os.Bundle;
-
-import java.util.concurrent.CountDownLatch;
-
-/**
- * Iterates through all themes and all layouts, starting the Activity to capture the images.
- */
-public class CaptureActivity extends Activity {
-
- private static final int REQUEST_CODE = 1;
-
- private static final int NUM_THEMES = 24;
-
- private static final int NUM_LAYOUTS = 47;
-
- private final CountDownLatch mLatch = new CountDownLatch(1);
-
- private int mCurrentTheme = 0;
-
- private int mCurrentLayout = 0;
-
- @Override
- protected void onCreate(Bundle savedInstanceState) {
- super.onCreate(savedInstanceState);
- generateNextImage();
- }
-
- /**
- * Starts the activity to generate the next image.
- */
- private void generateNextImage() {
- Intent intent = new Intent(this, HoloDeviceActivity.class);
- intent.setFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP);
- intent.putExtra(HoloDeviceActivity.EXTRA_THEME, mCurrentTheme);
- intent.putExtra(HoloDeviceActivity.EXTRA_LAYOUT, mCurrentLayout);
- startActivityForResult(intent, REQUEST_CODE);
- }
-
- @Override
- protected void onActivityResult(int requestCode, int resultCode, Intent data) {
- if (requestCode == REQUEST_CODE) {
- if (resultCode == RESULT_OK) {
- mCurrentLayout++;
- if (mCurrentLayout >= NUM_LAYOUTS) {
- mCurrentLayout = 0;
- mCurrentTheme++;
- }
- if (mCurrentTheme < NUM_THEMES) {
- generateNextImage();
- } else {
- finish();
- }
- } else {
- finish();
- }
- }
- }
-
- public void finish() {
- mLatch.countDown();
- super.finish();
- }
-
- public void waitForCompletion() throws InterruptedException {
- mLatch.await();
- }
-}
diff --git a/hostsidetests/theme/app/src/android/theme/app/CaptureHolo.java b/hostsidetests/theme/app/src/android/theme/app/CaptureHolo.java
deleted file mode 100644
index 7e2b2c9..0000000
--- a/hostsidetests/theme/app/src/android/theme/app/CaptureHolo.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.theme.app;
-
-import android.app.KeyguardManager;
-import android.content.Context;
-import android.test.ActivityInstrumentationTestCase2;
-
-public class CaptureHolo extends ActivityInstrumentationTestCase2<CaptureActivity> {
-
- public CaptureHolo() {
- super(CaptureActivity.class);
- }
-
- public void testCaptureHolo() throws Exception {
- setActivityInitialTouchMode(true);
- CaptureActivity activity = getActivity();
- KeyguardManager keyguardManager =
- (KeyguardManager) activity.getSystemService(Context.KEYGUARD_SERVICE);
- keyguardManager.newKeyguardLock("holo_capture").disableKeyguard();
- activity.waitForCompletion();
- }
-}
diff --git a/hostsidetests/theme/app/src/android/theme/app/HoloDeviceActivity.java b/hostsidetests/theme/app/src/android/theme/app/HoloDeviceActivity.java
index 3939979..8ae9fc8 100644
--- a/hostsidetests/theme/app/src/android/theme/app/HoloDeviceActivity.java
+++ b/hostsidetests/theme/app/src/android/theme/app/HoloDeviceActivity.java
@@ -36,6 +36,7 @@
import android.util.Log;
import android.view.View;
import android.widget.CheckBox;
+import android.widget.DatePicker;
import android.widget.LinearLayout;
import java.io.File;
@@ -50,65 +51,88 @@
public static final String EXTRA_THEME = "holo_theme_extra";
- public static final String EXTRA_LAYOUT = "holo_layout_extra";
-
- public static final String EXTRA_TIMEOUT = "holo_timeout_extra";
-
private static final String TAG = HoloDeviceActivity.class.getSimpleName();
- private static final int TIMEOUT = 1 * 1000;//1 sec
+ /**
+ * The duration of the CalendarView adjustement to settle to its final position.
+ */
+ private static final long CALENDAR_VIEW_ADJUSTMENT_DURATION = 540;
- private View mView;
-
- private String mName;
-
- private Bitmap mBitmap;
+ private Theme mTheme;
private ReferenceViewGroup mViewGroup;
+ private int mLayoutIndex;
+
@Override
- public void onCreate(Bundle icicle) {
+ protected void onCreate(Bundle icicle) {
super.onCreate(icicle);
- setUpUi(getIntent());
+
+ mTheme = THEMES[getIntent().getIntExtra(EXTRA_THEME, 0)];
+ setTheme(mTheme.mId);
+ setContentView(R.layout.holo_test);
+ mViewGroup = (ReferenceViewGroup) findViewById(R.id.reference_view_group);
}
@Override
- public void onNewIntent(Intent intent) {
- super.onNewIntent(intent);
- setUpUi(intent);
+ protected void onResume() {
+ super.onResume();
+ setNextLayout();
+ }
+
+ @Override
+ protected void onPause() {
+ if (!isFinishing()) {
+ // The Activity got paused for some reasons, for finish it as the host won't move on to
+ // the next theme otherwise.
+ Log.w(TAG, "onPause called without a call to finish().");
+ finish();
+ }
+ super.onPause();
+ }
+
+ @Override
+ protected void onDestroy() {
+ if (mLayoutIndex != LAYOUTS.length) {
+ Log.w(TAG, "Not all layouts got rendered: " + mLayoutIndex);
+ }
+ Log.i(TAG, "OKAY:" + mTheme.mName);
+ super.onDestroy();
}
/**
- * Configures the UI with the given intent
+ * Sets the next layout in the UI.
*/
- private void setUpUi(Intent intent) {
- final Theme theme = themes[intent.getIntExtra(EXTRA_THEME, 0)];
- final Layout layout = layouts[intent.getIntExtra(EXTRA_LAYOUT, 0)];
- final int timeout = intent.getIntExtra(EXTRA_TIMEOUT, TIMEOUT);
-
- setTheme(theme.mId);
- setContentView(R.layout.holo_test);
-
- mViewGroup = (ReferenceViewGroup) findViewById(R.id.reference_view_group);
-
- mView = getLayoutInflater().inflate(layout.mId, mViewGroup, false);
- mViewGroup.addView(mView);
- if (layout.mModifier != null) {
- layout.mModifier.modifyView(mView);
+ private void setNextLayout() {
+ if (mLayoutIndex >= LAYOUTS.length) {
+ finish();
+ return;
}
- mViewGroup.measure(0, 0);
- mViewGroup.layout(0, 0, mViewGroup.getMeasuredWidth(), mViewGroup.getMeasuredHeight());
- mView.setFocusable(false);
- mName = String.format("%s_%s", theme.mName, layout.mName);
+ final Layout layout = LAYOUTS[mLayoutIndex++];
+ final String layoutName = String.format("%s_%s", mTheme.mName, layout.mName);
- final Handler handler = new Handler();
- handler.postDelayed(new Runnable() {
+ mViewGroup.removeAllViews();
+ final View view = getLayoutInflater().inflate(layout.mId, mViewGroup, false);
+ if (layout.mModifier != null) {
+ layout.mModifier.modifyView(view);
+ }
+ mViewGroup.addView(view);
+ view.setFocusable(false);
+
+ final Runnable generateBitmapRunnable = new Runnable() {
@Override
public void run() {
- new GenerateBitmapTask().execute();
+ new GenerateBitmapTask(view, layoutName).execute();
}
- }, timeout);
- setResult(RESULT_CANCELED);//On success will be changed to OK
+ };
+
+ if (view instanceof DatePicker) {
+ // DatePicker uses a CalendarView that has a non-configurable adjustment duration of
+ // 540ms
+ view.postDelayed(generateBitmapRunnable, CALENDAR_VIEW_ADJUSTMENT_DURATION);
+ } else {
+ view.post(generateBitmapRunnable);
+ }
}
/**
@@ -117,12 +141,14 @@
*/
private class GenerateBitmapTask extends AsyncTask<Void, Void, Boolean> {
- @Override
- protected void onPreExecute() {
- final View v = mView;
- mBitmap = Bitmap.createBitmap(v.getWidth(), v.getHeight(), Bitmap.Config.ARGB_8888);
- final Canvas canvas = new Canvas(mBitmap);
- v.draw(canvas);
+ private final View mView;
+
+ private final String mName;
+
+ public GenerateBitmapTask(final View view, final String name) {
+ super();
+ mView = view;
+ mName = name;
}
@Override
@@ -131,6 +157,16 @@
Log.i(TAG, "External storage for saving bitmaps is not mounted");
return false;
}
+ if (mView.getWidth() == 0 || mView.getHeight() == 0) {
+ Log.w(TAG, "Unable to draw View due to incorrect size: " + mName);
+ return false;
+ }
+
+ final Bitmap bitmap = Bitmap.createBitmap(
+ mView.getWidth(), mView.getHeight(), Bitmap.Config.ARGB_8888);
+ final Canvas canvas = new Canvas(bitmap);
+
+ mView.draw(canvas);
final File dir = new File(Environment.getExternalStorageDirectory(), "cts-holo-assets");
dir.mkdirs();
boolean success = false;
@@ -139,27 +175,23 @@
FileOutputStream stream = null;
try {
stream = new FileOutputStream(file);
- mBitmap.compress(CompressFormat.PNG, 100, stream);
+ success = bitmap.compress(CompressFormat.PNG, 100, stream);
} finally {
if (stream != null) {
stream.close();
}
}
- success = true;
} catch (Exception e) {
Log.e(TAG, e.getMessage());
} finally {
- mBitmap.recycle();
- mBitmap = null;
+ bitmap.recycle();
}
return success;
}
@Override
protected void onPostExecute(Boolean success) {
- Log.i(TAG, (success ? "OKAY" : "ERROR") + ":" + mName);
- setResult(RESULT_OK);
- finish();
+ setNextLayout();
}
}
@@ -178,7 +210,7 @@
}
}
- private static final Theme[] themes = {
+ private static final Theme[] THEMES = {
new Theme(android.R.style.Theme_Holo,
"holo"),
new Theme(android.R.style.Theme_Holo_Dialog,
@@ -247,7 +279,7 @@
}
}
- private static final Layout[] layouts = {
+ private static final Layout[] LAYOUTS = {
new Layout(R.layout.button, "button", null),
new Layout(R.layout.button, "button_pressed", new ViewPressedModifier()),
new Layout(R.layout.checkbox, "checkbox", null),
diff --git a/hostsidetests/theme/app/src/android/theme/app/ReferenceViewGroup.java b/hostsidetests/theme/app/src/android/theme/app/ReferenceViewGroup.java
index 077d8d7..8d2461b 100644
--- a/hostsidetests/theme/app/src/android/theme/app/ReferenceViewGroup.java
+++ b/hostsidetests/theme/app/src/android/theme/app/ReferenceViewGroup.java
@@ -72,10 +72,6 @@
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
- if (!changed) {
- return;
- }
-
int childCount = getChildCount();
for (int i = 0; i < childCount; i++) {
View child = getChildAt(i);
diff --git a/hostsidetests/theme/src/android/theme/cts/ThemeHostTest.java b/hostsidetests/theme/src/android/theme/cts/ThemeHostTest.java
index da94b15..8326b1f 100644
--- a/hostsidetests/theme/src/android/theme/cts/ThemeHostTest.java
+++ b/hostsidetests/theme/src/android/theme/cts/ThemeHostTest.java
@@ -52,8 +52,6 @@
private static final String TAG = ThemeHostTest.class.getSimpleName();
- private static final int CAPTURE_TIMEOUT = 500;//0.5sec in ms
-
private static final int ADB_TIMEOUT = 60 * 60 * 1000;//60mins in ms
/** The package name of the APK. */
@@ -69,6 +67,8 @@
private static final String START_CMD = String.format(
"am start -W -a android.intent.action.MAIN -n %s/%s.%s", PACKAGE, PACKAGE, CLASS);
+ private static final String CLEAR_GENERATED_CMD = "rm -rf /sdcard/cts-holo-assets/*.png";
+
private static final String STOP_CMD = String.format("am force-stop %s", PACKAGE);
private static final String HARDWARE_TYPE_CMD = "dumpsys | grep android.hardware.type";
@@ -87,10 +87,6 @@
// Intent extra keys
private static final String EXTRA_THEME = "holo_theme_extra";
- private static final String EXTRA_LAYOUT = "holo_layout_extra";
-
- private static final String EXTRA_TIMEOUT = "holo_timeout_extra";
-
private static final String[] THEMES = {
"holo",
"holo_dialog",
@@ -211,7 +207,8 @@
String[] options = {AbiUtils.createAbiFlag(mAbi.getName())};
// Install the APK on the device.
mDevice.installPackage(app, false, options);
-
+ // Remove previously generated images.
+ mDevice.executeShellCommand(CLEAR_GENERATED_CMD);
final String densityProp;
if (mDevice.getSerialNumber().startsWith("emulator-")) {
@@ -261,6 +258,8 @@
mExecutionService.shutdown();
// Remove the APK.
mDevice.uninstallPackage(PACKAGE);
+ // Remove generated images.
+ mDevice.executeShellCommand(CLEAR_GENERATED_CMD);
super.tearDown();
}
@@ -272,7 +271,6 @@
return;
}
-
if (mReferences.isEmpty()) {
Log.logAndDisplay(LogLevel.INFO, TAG,
"Skipped HoloThemes test due to no reference images");
@@ -282,20 +280,18 @@
int numTasks = 0;
for (int i = 0; i < NUM_THEMES; i++) {
final String themeName = THEMES[i];
+ runCapture(i, themeName);
for (int j = 0; j < NUM_LAYOUTS; j++) {
final String name = String.format("%s_%s", themeName, LAYOUTS[j]);
- if (runCapture(i, j, name)) {
- final File ref = mReferences.get(name + ".png");
- if (!ref.exists()) {
- Log.logAndDisplay(LogLevel.INFO, TAG,
- "Skipping theme test due to missing reference for reference image " + name);
- continue;
- }
- mCompletionService.submit(new ComparisonTask(mDevice, ref, name));
- numTasks++;
- } else {
- Log.logAndDisplay(LogLevel.ERROR, TAG, "Capture failed: " + name);
+ final File ref = mReferences.get(name + ".png");
+ if (!ref.exists()) {
+ Log.logAndDisplay(LogLevel.INFO, TAG,
+ "Skipping theme test due to missing reference for reference image " +
+ name);
+ continue;
}
+ mCompletionService.submit(new ComparisonTask(mDevice, ref, name));
+ numTasks++;
}
}
int failures = 0;
@@ -305,11 +301,9 @@
assertTrue(failures + " failures in theme test", failures == 0);
}
- private boolean runCapture(int themeId, int layoutId, String imageName) throws Exception {
+ private void runCapture(int themeId, String themeName) throws Exception {
final StringBuilder sb = new StringBuilder(START_CMD);
sb.append(String.format(INTENT_INTEGER_EXTRA, EXTRA_THEME, themeId));
- sb.append(String.format(INTENT_INTEGER_EXTRA, EXTRA_LAYOUT, layoutId));
- sb.append(String.format(INTENT_INTEGER_EXTRA, EXTRA_TIMEOUT, CAPTURE_TIMEOUT));
final String startCommand = sb.toString();
// Clear logcat
mDevice.executeAdbCommand("logcat", "-c");
@@ -318,9 +312,8 @@
// Start activity
mDevice.executeShellCommand(startCommand);
- boolean success = false;
boolean waiting = true;
- while (waiting) {
+ do {
// Dump logcat.
final String logs = mDevice.executeAdbCommand(
"logcat", "-v", "brief", "-d", CLASS + ":I", "*:S");
@@ -331,20 +324,14 @@
if (line.startsWith("I/" + CLASS)) {
final String[] lineSplit = line.split(":");
final String s = lineSplit[1].trim();
- final String imageNameGenerated = lineSplit[2].trim();
- if (s.equals("OKAY") && imageNameGenerated.equals(imageName)) {
- success = true;
- waiting = false;
- } else if (s.equals("ERROR") && imageNameGenerated.equals(imageName)) {
- success = false;
+ final String themeNameGenerated = lineSplit[2].trim();
+ if (s.equals("OKAY") && themeNameGenerated.equals(themeName)) {
waiting = false;
}
}
}
in.close();
- }
-
- return success;
+ } while (waiting);
}
private static String getDensityBucket(int density) {
diff --git a/libs/deviceutillegacy/src/android/webkit/cts/WebViewOnUiThread.java b/libs/deviceutillegacy/src/android/webkit/cts/WebViewOnUiThread.java
index 5cd6f30..2933b0b 100644
--- a/libs/deviceutillegacy/src/android/webkit/cts/WebViewOnUiThread.java
+++ b/libs/deviceutillegacy/src/android/webkit/cts/WebViewOnUiThread.java
@@ -21,6 +21,7 @@
import android.graphics.Bitmap;
import android.graphics.Picture;
import android.graphics.Rect;
+import android.net.Uri;
import android.os.Bundle;
import android.os.Looper;
import android.os.Message;
@@ -36,6 +37,8 @@
import android.webkit.ValueCallback;
import android.webkit.WebBackForwardList;
import android.webkit.WebChromeClient;
+import android.webkit.WebMessage;
+import android.webkit.WebMessagePort;
import android.webkit.WebSettings;
import android.webkit.WebView.HitTestResult;
import android.webkit.WebView.PictureListener;
@@ -307,6 +310,24 @@
});
}
+ public WebMessagePort[] createWebMessageChannel() {
+ return getValue(new ValueGetter<WebMessagePort[]>() {
+ @Override
+ public WebMessagePort[] capture() {
+ return mWebView.createWebMessageChannel();
+ }
+ });
+ }
+
+ public void postWebMessage(final WebMessage message, final Uri targetOrigin) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ mWebView.postWebMessage(message, targetOrigin);
+ }
+ });
+ }
+
public void addJavascriptInterface(final Object object, final String name) {
runOnUiThread(new Runnable() {
@Override
@@ -629,11 +650,11 @@
});
}
- public void insertVisualStateCallback(final long requestId, final VisualStateCallback callback) {
+ public void postVisualStateCallback(final long requestId, final VisualStateCallback callback) {
runOnUiThread(new Runnable() {
@Override
public void run() {
- mWebView.insertVisualStateCallback(requestId, callback);
+ mWebView.postVisualStateCallback(requestId, callback);
}
});
}
diff --git a/suite/cts/deviceTests/tvproviderperf/src/com/android/cts/tvproviderperf/TvProviderPerfTest.java b/suite/cts/deviceTests/tvproviderperf/src/com/android/cts/tvproviderperf/TvProviderPerfTest.java
index df89cae..286d4fd 100644
--- a/suite/cts/deviceTests/tvproviderperf/src/com/android/cts/tvproviderperf/TvProviderPerfTest.java
+++ b/suite/cts/deviceTests/tvproviderperf/src/com/android/cts/tvproviderperf/TvProviderPerfTest.java
@@ -50,6 +50,7 @@
*/
public class TvProviderPerfTest extends CtsAndroidTestCase {
private static final int TRANSACTION_RUNS = 100;
+ private static final int QUERY_RUNS = 10;
private ContentResolver mContentResolver;
private String mInputId;
@@ -76,7 +77,7 @@
}
}
- @TimeoutReq(minutes = 10)
+ @TimeoutReq(minutes = 8)
public void testChannels() throws Exception {
if (!mHasTvInputFramework) return;
double[] averages = new double[4];
@@ -138,11 +139,11 @@
averages[1] = Stat.getAverage(applyBatchTimes);
// Query
- applyBatchTimes = MeasureTime.measure(TRANSACTION_RUNS, new MeasureRun() {
+ applyBatchTimes = MeasureTime.measure(QUERY_RUNS, new MeasureRun() {
@Override
public void run(int i) {
int j = 0;
- try (final Cursor cursor = mContentResolver.query(Channels.CONTENT_URI, null, null,
+ try (Cursor cursor = mContentResolver.query(Channels.CONTENT_URI, null, null,
null, null)) {
while (cursor.moveToNext()) {
++j;
@@ -169,7 +170,7 @@
averages, ResultType.LOWER_BETTER, ResultUnit.MS);
}
- @TimeoutReq(minutes = 15)
+ @TimeoutReq(minutes = 12)
public void testPrograms() throws Exception {
if (!mHasTvInputFramework) return;
double[] averages = new double[6];
@@ -234,7 +235,7 @@
public void run(int i) {
Uri channelUri = channelUris.get(i);
operations.clear();
- try (final Cursor cursor = mContentResolver.query(
+ try (Cursor cursor = mContentResolver.query(
TvContract.buildProgramsUriForChannel(channelUri),
projection, null, null, null)) {
long startTimeMs = 0;
@@ -262,11 +263,11 @@
averages[1] = Stat.getAverage(applyBatchTimes);
// Query
- applyBatchTimes = MeasureTime.measure(TRANSACTION_RUNS, new MeasureRun() {
+ applyBatchTimes = MeasureTime.measure(QUERY_RUNS, new MeasureRun() {
@Override
public void run(int i) {
int j = 0;
- try (final Cursor cursor = mContentResolver.query(Programs.CONTENT_URI, null, null,
+ try (Cursor cursor = mContentResolver.query(Programs.CONTENT_URI, null, null,
null, null)) {
while (cursor.moveToNext()) {
++j;
@@ -279,12 +280,12 @@
averages[2] = Stat.getAverage(applyBatchTimes);
// Query programs with selection
- applyBatchTimes = MeasureTime.measure(NUM_CHANNELS, new MeasureRun() {
+ applyBatchTimes = MeasureTime.measure(QUERY_RUNS, new MeasureRun() {
@Override
public void run(int i) {
Uri channelUri = channelUris.get(i);
int j = 0;
- try (final Cursor cursor = mContentResolver.query(
+ try (Cursor cursor = mContentResolver.query(
TvContract.buildProgramsUriForChannel(
channelUri, 0,
PROGRAM_DURATION_MS * TRANSACTION_SIZE / 2),
diff --git a/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/CodecInfo.java b/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/CodecInfo.java
index 6459c86..88b005a 100644
--- a/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/CodecInfo.java
+++ b/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/CodecInfo.java
@@ -16,6 +16,7 @@
package com.android.cts.videoperf;
+import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecInfo.CodecProfileLevel;
@@ -23,7 +24,9 @@
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.util.Log;
+import android.util.Range;
+import java.io.IOException;
/**
* Utility class for getting codec information like bit rate, fps, and etc.
@@ -43,32 +46,25 @@
private static final String VIDEO_AVC = MediaFormat.MIMETYPE_VIDEO_AVC;
/**
* Check if given codec with given (w,h) is supported.
+ * @param codecName codec name
* @param mimeType codec type in mime format like MediaFormat.MIMETYPE_VIDEO_AVC
* @param w video width
* @param h video height
- * @param isEncoder whether the codec is encoder or decoder
* @return null if the configuration is not supported.
*/
public static CodecInfo getSupportedFormatInfo(
- String mimeType, int w, int h, boolean isEncoder) {
- MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
- MediaFormat format = MediaFormat.createVideoFormat(mimeType, w, h);
- String codec = isEncoder
- ? mcl.findEncoderForFormat(format)
- : mcl.findDecoderForFormat(format);
- if (codec == null) { // not supported
+ String codecName, String mimeType, int w, int h) {
+ MediaCodec codec;
+ try {
+ codec = MediaCodec.createByCodecName(codecName);
+ } catch (IOException e) {
return null;
}
- CodecCapabilities cap = null;
- for (MediaCodecInfo info : mcl.getCodecInfos()) {
- if (info.getName().equals(codec)) {
- cap = info.getCapabilitiesForType(mimeType);
- break;
- }
- }
+ CodecCapabilities cap = codec.getCodecInfo().getCapabilitiesForType(mimeType);
if (cap.colorFormats.length == 0) {
Log.w(TAG, "no supported color format");
+ codec.release();
return null;
}
@@ -84,14 +80,35 @@
printIntArray("supported colors", cap.colorFormats);
VideoCapabilities vidCap = cap.getVideoCapabilities();
- if (mimeType.equals(VIDEO_AVC)) {
+ try {
info.mFps = vidCap.getSupportedFrameRatesFor(w, h).getUpper().intValue();
- info.mBitRate = vidCap.getBitrateRange().getUpper();
- Log.i(TAG, "AVC bit rate " + info.mBitRate + " fps " + info.mFps);
+ } catch (IllegalArgumentException e) {
+ Log.w(TAG, "unsupported size");
+ codec.release();
+ return null;
}
+ info.mBitRate = vidCap.getBitrateRange().getUpper();
+ Log.i(TAG, "test bit rate " + info.mBitRate + " fps " + info.mFps);
+ codec.release();
return info;
}
+ public static Range<Double> getAchievableFrameRatesFor(
+ String codecName, String mimeType, int width, int height) {
+ MediaCodec codec;
+ try {
+ codec = MediaCodec.createByCodecName(codecName);
+ } catch (IOException e) {
+ return null;
+ }
+
+ VideoCapabilities cap =
+ codec.getCodecInfo().getCapabilitiesForType(mimeType).getVideoCapabilities();
+ Range<Double> results = cap.getAchievableFrameRatesFor(width, height);
+ codec.release();
+ return results;
+ }
+
// for debugging
private static void printIntArray(String msg, int[] data) {
StringBuilder builder = new StringBuilder();
diff --git a/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/VideoEncoderDecoderTest.java b/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/VideoEncoderDecoderTest.java
index bf02d9c..28b4feb 100644
--- a/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/VideoEncoderDecoderTest.java
+++ b/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/VideoEncoderDecoderTest.java
@@ -23,10 +23,13 @@
import android.media.Image;
import android.media.Image.Plane;
import android.media.MediaCodec;
-import android.media.MediaCodecList;
+import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.util.Log;
+import android.util.Range;
+import android.util.Size;
import android.cts.util.CtsAndroidTestCase;
import com.android.cts.util.ResultType;
@@ -36,6 +39,7 @@
import java.io.IOException;
import java.nio.ByteBuffer;
import java.lang.System;
+import java.util.ArrayList;
import java.util.Random;
import java.util.Vector;
@@ -57,6 +61,9 @@
private static final long VIDEO_CODEC_WAIT_TIME_US = 5000;
private static final boolean VERBOSE = false;
private static final String VIDEO_AVC = MediaFormat.MIMETYPE_VIDEO_AVC;
+ private static final String VIDEO_VP8 = MediaFormat.MIMETYPE_VIDEO_VP8;
+ private static final String VIDEO_H263 = MediaFormat.MIMETYPE_VIDEO_H263;
+ private static final String VIDEO_MPEG4 = MediaFormat.MIMETYPE_VIDEO_MPEG4;
private static final int TOTAL_FRAMES = 300;
private static final int NUMBER_OF_REPEAT = 10;
// i frame interval for encoder
@@ -81,14 +88,23 @@
private static final int PIXEL_CHECK_PER_FRAME = 1000;
// RMS error in pixel values above this will be treated as error.
private static final double PIXEL_RMS_ERROR_MARGAIN = 20.0;
+ private double mRmsErrorMargain = PIXEL_RMS_ERROR_MARGAIN;
private Random mRandom;
+ private class TestConfig {
+ public boolean mTestPixels = true;
+ public boolean mTestResult = true;
+ }
+
+ private TestConfig mTestConfig;
+
@Override
protected void setUp() throws Exception {
mEncodedOutputBuffer = new Vector<ByteBuffer>(TOTAL_FRAMES * 2);
// Use time as a seed, hoping to prevent checking pixels in the same pattern
long now = System.currentTimeMillis();
mRandom = new Random(now);
+ mTestConfig = new TestConfig();
super.setUp();
}
@@ -101,23 +117,76 @@
mYDirectBuffer = null;
mUVDirectBuffer = null;
mRandom = null;
+ mTestConfig = null;
super.tearDown();
}
+ private String getEncoderName(String mime) {
+ return getCodecName(mime, true /* isEncoder */);
+ }
+
+ private String getDecoderName(String mime) {
+ return getCodecName(mime, false /* isEncoder */);
+ }
+
+ private String getCodecName(String mime, boolean isEncoder) {
+ MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
+ for (MediaCodecInfo info : mcl.getCodecInfos()) {
+ if (info.isEncoder() != isEncoder) {
+ continue;
+ }
+ CodecCapabilities caps = null;
+ try {
+ caps = info.getCapabilitiesForType(mime);
+ } catch (IllegalArgumentException e) { // mime is not supported
+ continue;
+ }
+ return info.getName();
+ }
+ return null;
+ }
+
+ private String[] getEncoderName(String mime, boolean isGoog) {
+ return getCodecName(mime, isGoog, true /* isEncoder */);
+ }
+
+ private String[] getDecoderName(String mime, boolean isGoog) {
+ return getCodecName(mime, isGoog, false /* isEncoder */);
+ }
+
+ private String[] getCodecName(String mime, boolean isGoog, boolean isEncoder) {
+ MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
+ ArrayList<String> result = new ArrayList<String>();
+ for (MediaCodecInfo info : mcl.getCodecInfos()) {
+ if (info.isEncoder() != isEncoder
+ || info.getName().toLowerCase().startsWith("omx.google.") != isGoog) {
+ continue;
+ }
+ CodecCapabilities caps = null;
+ try {
+ caps = info.getCapabilitiesForType(mime);
+ } catch (IllegalArgumentException e) { // mime is not supported
+ continue;
+ }
+ result.add(info.getName());
+ }
+ return result.toArray(new String[result.size()]);
+ }
+
public void testAvc0176x0144() throws Exception {
- doTest(VIDEO_AVC, 176, 144, NUMBER_OF_REPEAT);
+ doTestDefault(VIDEO_AVC, 176, 144);
}
public void testAvc0352x0288() throws Exception {
- doTest(VIDEO_AVC, 352, 288, NUMBER_OF_REPEAT);
+ doTestDefault(VIDEO_AVC, 352, 288);
}
public void testAvc0720x0480() throws Exception {
- doTest(VIDEO_AVC, 720, 480, NUMBER_OF_REPEAT);
+ doTestDefault(VIDEO_AVC, 720, 480);
}
public void testAvc1280x0720() throws Exception {
- doTest(VIDEO_AVC, 1280, 720, NUMBER_OF_REPEAT);
+ doTestDefault(VIDEO_AVC, 1280, 720);
}
/**
@@ -126,7 +195,123 @@
* which is not specified in API documentation.
*/
public void testAvc1920x1072() throws Exception {
- doTest(VIDEO_AVC, 1920, 1072, NUMBER_OF_REPEAT);
+ doTestDefault(VIDEO_AVC, 1920, 1072);
+ }
+
+ // Avc tests
+ public void testAvc0320x0240Other() throws Exception {
+ doTestOther(VIDEO_AVC, 320, 240);
+ }
+
+ public void testAvc0320x0240Goog() throws Exception {
+ doTestGoog(VIDEO_AVC, 320, 240);
+ }
+
+ public void testAvc0720x0480Other() throws Exception {
+ doTestOther(VIDEO_AVC, 720, 480);
+ }
+
+ public void testAvc0720x0480Goog() throws Exception {
+ doTestGoog(VIDEO_AVC, 720, 480);
+ }
+
+ public void testAvc1280x0720Other() throws Exception {
+ doTestOther(VIDEO_AVC, 1280, 720);
+ }
+
+ public void testAvc1280x0720Goog() throws Exception {
+ doTestGoog(VIDEO_AVC, 1280, 720);
+ }
+
+ public void testAvc1920x1080Other() throws Exception {
+ doTestOther(VIDEO_AVC, 1920, 1080);
+ }
+
+ public void testAvc1920x1080Goog() throws Exception {
+ doTestGoog(VIDEO_AVC, 1920, 1080);
+ }
+
+ // Vp8 tests
+ public void testVp80320x0180Other() throws Exception {
+ doTestOther(VIDEO_VP8, 320, 180);
+ }
+
+ public void testVp80320x0180Goog() throws Exception {
+ doTestGoog(VIDEO_VP8, 320, 180);
+ }
+
+ public void testVp80640x0360Other() throws Exception {
+ doTestOther(VIDEO_VP8, 640, 360);
+ }
+
+ public void testVp80640x0360Goog() throws Exception {
+ doTestGoog(VIDEO_VP8, 640, 360);
+ }
+
+ public void testVp81280x0720Other() throws Exception {
+ doTestOther(VIDEO_VP8, 1280, 720);
+ }
+
+ public void testVp81280x0720Goog() throws Exception {
+ doTestGoog(VIDEO_VP8, 1280, 720);
+ }
+
+ public void testVp81920x1080Other() throws Exception {
+ doTestOther(VIDEO_VP8, 1920, 1080);
+ }
+
+ public void testVp81920x1080Goog() throws Exception {
+ doTestGoog(VIDEO_VP8, 1920, 1080);
+ }
+
+ // H263 tests
+ public void testH2630176x0144Other() throws Exception {
+ doTestOther(VIDEO_H263, 176, 144);
+ }
+
+ public void testH2630176x0144Goog() throws Exception {
+ doTestGoog(VIDEO_H263, 176, 144);
+ }
+
+ public void testH2630352x0288Other() throws Exception {
+ doTestOther(VIDEO_H263, 352, 288);
+ }
+
+ public void testH2630352x0288Goog() throws Exception {
+ doTestGoog(VIDEO_H263, 352, 288);
+ }
+
+ // Mpeg4 tests
+ public void testMpeg40176x0144Other() throws Exception {
+ doTestOther(VIDEO_MPEG4, 176, 144);
+ }
+
+ public void testMpeg40176x0144Goog() throws Exception {
+ doTestGoog(VIDEO_MPEG4, 176, 144);
+ }
+
+ public void testMpeg40352x0288Other() throws Exception {
+ doTestOther(VIDEO_MPEG4, 352, 288);
+ }
+
+ public void testMpeg40352x0288Goog() throws Exception {
+ doTestGoog(VIDEO_MPEG4, 352, 288);
+ }
+
+ public void testMpeg40640x0480Other() throws Exception {
+ doTestOther(VIDEO_MPEG4, 640, 480);
+ }
+
+ public void testMpeg40640x0480Goog() throws Exception {
+ doTestGoog(VIDEO_MPEG4, 640, 480);
+ }
+
+ public void testMpeg41280x0720Other() throws Exception {
+ doTestOther(VIDEO_MPEG4, 1280, 720);
+ }
+
+ public void testMpeg41280x0720Goog() throws Exception {
+ doTestGoog(VIDEO_MPEG4, 1280, 720);
}
private boolean isSrcSemiPlanar() {
@@ -156,20 +341,75 @@
}
}
+ private void doTestGoog(String mimeType, int w, int h) throws Exception {
+ mTestConfig.mTestPixels = false;
+ mTestConfig.mTestResult = false;
+ doTest(true /* isGoog */, mimeType, w, h, NUMBER_OF_REPEAT);
+ }
+
+ private void doTestOther(String mimeType, int w, int h) throws Exception {
+ mTestConfig.mTestPixels = false;
+ doTest(false /* isGoog */, mimeType, w, h, NUMBER_OF_REPEAT);
+ }
+
+ private void doTestDefault(String mimeType, int w, int h) throws Exception {
+ mTestConfig.mTestResult = false;
+
+ String encoderName = getEncoderName(mimeType);
+ if (encoderName == null) {
+ Log.i(TAG, "Encoder for " + mimeType + " not found");
+ return;
+ }
+
+ String decoderName = getDecoderName(mimeType);
+ if (decoderName == null) {
+ Log.i(TAG, "Encoder for " + mimeType + " not found");
+ return;
+ }
+
+ doTestByName(encoderName, decoderName, mimeType, w, h, NUMBER_OF_REPEAT);
+ }
+
/**
* Run encoding / decoding test for given mimeType of codec
+ * @param isGoog test google or non-google codec.
* @param mimeType like video/avc
* @param w video width
* @param h video height
* @param numberRepeat how many times to repeat the encoding / decoding process
*/
- private void doTest(String mimeType, int w, int h, int numberRepeat) throws Exception {
- CodecInfo infoEnc = CodecInfo.getSupportedFormatInfo(mimeType, w, h, true /* encoder */);
+ private void doTest(boolean isGoog, String mimeType, int w, int h, int numberRepeat)
+ throws Exception {
+ String[] encoderNames = getEncoderName(mimeType, isGoog);
+ if (encoderNames.length == 0) {
+ Log.i(TAG, isGoog ? "Google " : "Non-google "
+ + "encoder for " + mimeType + " not found");
+ return;
+ }
+
+ String[] decoderNames = getDecoderName(mimeType, isGoog);
+ if (decoderNames.length == 0) {
+ Log.i(TAG, isGoog ? "Google " : "Non-google "
+ + "decoder for " + mimeType + " not found");
+ return;
+ }
+
+ for (String encoderName: encoderNames) {
+ for (String decoderName: decoderNames) {
+ doTestByName(encoderName, decoderName, mimeType, w, h, numberRepeat);
+ }
+ }
+ }
+
+ private void doTestByName(
+ String encoderName, String decoderName, String mimeType, int w, int h, int numberRepeat)
+ throws Exception {
+ CodecInfo infoEnc = CodecInfo.getSupportedFormatInfo(encoderName, mimeType, w, h);
if (infoEnc == null) {
Log.i(TAG, "Encoder " + mimeType + " with " + w + "," + h + " not supported");
return;
}
- CodecInfo infoDec = CodecInfo.getSupportedFormatInfo(mimeType, w, h, false /* encoder */);
+ CodecInfo infoDec = CodecInfo.getSupportedFormatInfo(decoderName, mimeType, w, h);
assertNotNull(infoDec);
mVideoWidth = w;
mVideoHeight = h;
@@ -196,14 +436,15 @@
format.setInteger(MediaFormat.KEY_FRAME_RATE, infoEnc.mFps);
mFrameRate = infoEnc.mFps;
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, KEY_I_FRAME_INTERVAL);
- double encodingTime = runEncoder(VIDEO_AVC, format, TOTAL_FRAMES);
+
+ double encodingTime = runEncoder(encoderName, format, TOTAL_FRAMES);
// re-initialize format for decoder
format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, mimeType);
format.setInteger(MediaFormat.KEY_WIDTH, w);
format.setInteger(MediaFormat.KEY_HEIGHT, h);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, mDstColorFormat);
- double[] decoderResult = runDecoder(VIDEO_AVC, format);
+ double[] decoderResult = runDecoder(decoderName, format);
if (decoderResult == null) {
success = false;
} else {
@@ -227,26 +468,52 @@
ResultUnit.FPS);
getReportLog().printArray("encoder decoder", totalFpsResults, ResultType.HIGHER_BETTER,
ResultUnit.FPS);
+ getReportLog().printValue(mimeType + " encoder average fps for " + w + "x" + h,
+ Stat.getAverage(encoderFpsResults), ResultType.HIGHER_BETTER, ResultUnit.FPS);
+ getReportLog().printValue(mimeType + " decoder average fps for " + w + "x" + h,
+ Stat.getAverage(decoderFpsResults), ResultType.HIGHER_BETTER, ResultUnit.FPS);
getReportLog().printSummary("encoder decoder", Stat.getAverage(totalFpsResults),
ResultType.HIGHER_BETTER, ResultUnit.FPS);
// make sure that rms error is not too big.
for (int i = 0; i < numberRepeat; i++) {
- assertTrue(decoderRmsErrorResults[i] < PIXEL_RMS_ERROR_MARGAIN);
+ if (decoderRmsErrorResults[i] >= mRmsErrorMargain) {
+ fail("rms error is bigger than the limit "
+ + decoderRmsErrorResults[i] + " vs " + mRmsErrorMargain);
+ }
+ }
+
+ if (mTestConfig.mTestResult) {
+ Range<Double> reportedEncoderResults =
+ CodecInfo.getAchievableFrameRatesFor(encoderName, mimeType, w, h);
+ Range<Double> reportedDecoderResults =
+ CodecInfo.getAchievableFrameRatesFor(decoderName, mimeType, w, h);
+ if (reportedEncoderResults == null) {
+ fail("Failed to getAchievableFrameRatesFor "
+ + encoderName + " " + mimeType + " " + w + "x" + h);
+ }
+ if (reportedDecoderResults == null) {
+ fail("Failed to getAchievableFrameRatesFor "
+ + decoderName + " " + mimeType + " " + w + "x" + h);
+ }
+ if (!reportedEncoderResults.contains(Stat.getAverage(encoderFpsResults))) {
+ fail("Expecting achievable frame rate in the rang of " + reportedEncoderResults);
+ }
+ if (!reportedDecoderResults.contains(Stat.getAverage(decoderFpsResults))) {
+ fail("Expecting achievable frame rate in the rang of " + reportedDecoderResults);
+ }
}
}
/**
* run encoder benchmarking
- * @param mimeType encoder type like video/avc
+ * @param encoderName encoder name
* @param format format of media to encode
* @param totalFrames total number of frames to encode
* @return time taken in ms to encode the frames. This does not include initialization time.
*/
- private double runEncoder(String mimeType, MediaFormat format, int totalFrames) {
+ private double runEncoder(String encoderName, MediaFormat format, int totalFrames) {
MediaCodec codec = null;
try {
- MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
- String encoderName = mcl.findEncoderForFormat(format);
codec = MediaCodec.createByCodecName(encoderName);
codec.configure(
format,
@@ -254,9 +521,9 @@
null /* crypto */,
MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (IllegalStateException e) {
- Log.e(TAG, "codec '" + mimeType + "' failed configuration.");
+ Log.e(TAG, "codec '" + encoderName + "' failed configuration.");
codec.release();
- assertTrue("codec '" + mimeType + "' failed configuration.", false);
+ assertTrue("codec '" + encoderName + "' failed configuration.", false);
} catch (IOException | NullPointerException e) {
Log.i(TAG, "could not find codec for " + format);
return Double.NaN;
@@ -542,18 +809,16 @@
/**
* run encoder benchmarking with encoded stream stored from encoding phase
- * @param mimeType encoder type like video/avc
+ * @param decoderName decoder name
* @param format format of media to decode
* @return returns length-2 array with 0: time for decoding, 1 : rms error of pixels
*/
- private double[] runDecoder(String mimeType, MediaFormat format) {
- MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
- String decoderName = mcl.findDecoderForFormat(format);
+ private double[] runDecoder(String decoderName, MediaFormat format) {
MediaCodec codec = null;
try {
codec = MediaCodec.createByCodecName(decoderName);
} catch (IOException | NullPointerException e) {
- Log.i(TAG, "could not find codec for " + format);
+ Log.i(TAG, "could not find decoder for " + format);
return null;
}
codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */);
@@ -580,6 +845,7 @@
ByteBuffer src = mEncodedOutputBuffer.get(inputBufferCount);
int writeSize = src.capacity();
dstBuf.put(src.array(), 0, writeSize);
+
codec.queueInputBuffer(
inputBufIndex,
0 /* offset */,
@@ -597,29 +863,48 @@
// only do YUV compare on EOS frame if the buffer size is none-zero
if (info.size > 0) {
- Point origin = getOrigin(outFrameCount);
- int i;
+ if (mTestConfig.mTestPixels) {
+ Point origin = getOrigin(outFrameCount);
+ int i;
- // if decoder supports planar or semiplanar, check output with
- // ByteBuffer & Image each on half of the points
- int pixelCheckPerFrame = PIXEL_CHECK_PER_FRAME;
- if (!isDstFlexYUV()) {
- pixelCheckPerFrame /= 2;
- ByteBuffer buf = codec.getOutputBuffer(outputBufIndex);
- if (VERBOSE && (outFrameCount == 0)) {
- printByteBuffer("Y ", buf, 0, 20);
- printByteBuffer("UV ", buf, mVideoWidth * mVideoHeight, 20);
- printByteBuffer("UV ", buf,
- mVideoWidth * mVideoHeight + mVideoWidth * 60, 20);
+ // if decoder supports planar or semiplanar, check output with
+ // ByteBuffer & Image each on half of the points
+ int pixelCheckPerFrame = PIXEL_CHECK_PER_FRAME;
+ if (!isDstFlexYUV()) {
+ pixelCheckPerFrame /= 2;
+ ByteBuffer buf = codec.getOutputBuffer(outputBufIndex);
+ if (VERBOSE && (outFrameCount == 0)) {
+ printByteBuffer("Y ", buf, 0, 20);
+ printByteBuffer("UV ", buf, mVideoWidth * mVideoHeight, 20);
+ printByteBuffer("UV ", buf,
+ mVideoWidth * mVideoHeight + mVideoWidth * 60, 20);
+ }
+ for (i = 0; i < pixelCheckPerFrame; i++) {
+ int w = mRandom.nextInt(mVideoWidth);
+ int h = mRandom.nextInt(mVideoHeight);
+ getPixelValuesFromYUVBuffers(origin.x, origin.y, w, h, expected);
+ getPixelValuesFromOutputBuffer(buf, w, h, decoded);
+ if (VERBOSE) {
+ Log.i(TAG, outFrameCount + "-" + i + "- th round: ByteBuffer:"
+ + " expected "
+ + expected.mY + "," + expected.mU + "," + expected.mV
+ + " decoded "
+ + decoded.mY + "," + decoded.mU + "," + decoded.mV);
+ }
+ totalErrorSquared += expected.calcErrorSquared(decoded);
+ }
}
+
+ Image image = codec.getOutputImage(outputBufIndex);
+ assertTrue(image != null);
for (i = 0; i < pixelCheckPerFrame; i++) {
int w = mRandom.nextInt(mVideoWidth);
int h = mRandom.nextInt(mVideoHeight);
getPixelValuesFromYUVBuffers(origin.x, origin.y, w, h, expected);
- getPixelValuesFromOutputBuffer(buf, w, h, decoded);
+ getPixelValuesFromImage(image, w, h, decoded);
if (VERBOSE) {
- Log.i(TAG, outFrameCount + "-" + i + "- th round: ByteBuffer:"
- + " expected "
+ Log.i(TAG, outFrameCount + "-" + i + "- th round: FlexYUV:"
+ + " expcted "
+ expected.mY + "," + expected.mU + "," + expected.mV
+ " decoded "
+ decoded.mY + "," + decoded.mU + "," + decoded.mV);
@@ -627,23 +912,6 @@
totalErrorSquared += expected.calcErrorSquared(decoded);
}
}
-
- Image image = codec.getOutputImage(outputBufIndex);
- assertTrue(image != null);
- for (i = 0; i < pixelCheckPerFrame; i++) {
- int w = mRandom.nextInt(mVideoWidth);
- int h = mRandom.nextInt(mVideoHeight);
- getPixelValuesFromYUVBuffers(origin.x, origin.y, w, h, expected);
- getPixelValuesFromImage(image, w, h, decoded);
- if (VERBOSE) {
- Log.i(TAG, outFrameCount + "-" + i + "- th round: FlexYUV:"
- + " expcted "
- + expected.mY + "," + expected.mU + "," + expected.mV
- + " decoded "
- + decoded.mY + "," + decoded.mU + "," + decoded.mV);
- }
- totalErrorSquared += expected.calcErrorSquared(decoded);
- }
outFrameCount++;
}
codec.releaseOutputBuffer(outputBufIndex, false /* render */);
@@ -669,7 +937,9 @@
codec.stop();
codec.release();
codec = null;
- assertTrue(outFrameCount >= TOTAL_FRAMES);
+ if (outFrameCount < TOTAL_FRAMES) {
+ fail("Expecting " + TOTAL_FRAMES + " frames but get " + outFrameCount + " instead.");
+ }
// divide by 3 as sum is done for Y, U, V.
double errorRms = Math.sqrt(totalErrorSquared / PIXEL_CHECK_PER_FRAME / outFrameCount / 3);
double[] result = { (double) finish - start, errorRms };
diff --git a/tests/JobScheduler/src/android/jobscheduler/MockJobService.java b/tests/JobScheduler/src/android/jobscheduler/MockJobService.java
index 38a753d..4f549f8 100644
--- a/tests/JobScheduler/src/android/jobscheduler/MockJobService.java
+++ b/tests/JobScheduler/src/android/jobscheduler/MockJobService.java
@@ -46,7 +46,7 @@
public boolean onStartJob(JobParameters params) {
Log.i(TAG, "Test job executing: " + params.getJobId());
- TestEnvironment.getTestEnvironment().notifyExecution(params.getJobId());
+ TestEnvironment.getTestEnvironment().notifyExecution(params);
return false; // No work to do.
}
@@ -63,10 +63,10 @@
public static final class TestEnvironment {
private static TestEnvironment kTestEnvironment;
- public static final int INVALID_JOB_ID = -1;
+ //public static final int INVALID_JOB_ID = -1;
private CountDownLatch mLatch;
- private int mExecutedJobId;
+ private JobParameters mExecutedJobParameters;
public static TestEnvironment getTestEnvironment() {
if (kTestEnvironment == null) {
@@ -75,6 +75,10 @@
return kTestEnvironment;
}
+ public JobParameters getLastJobParameters() {
+ return mExecutedJobParameters;
+ }
+
/**
* Block the test thread, waiting on the JobScheduler to execute some previously scheduled
* job on this service.
@@ -93,9 +97,9 @@
return !mLatch.await(DEFAULT_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS);
}
- private void notifyExecution(int jobId) {
- Log.d(TAG, "Job executed:" + jobId);
- mExecutedJobId = jobId;
+ private void notifyExecution(JobParameters params) {
+ Log.d(TAG, "Job executed:" + params.getJobId());
+ mExecutedJobParameters = params;
mLatch.countDown();
}
@@ -111,7 +115,7 @@
/** Called in each testCase#setup */
public void setUp() {
mLatch = null;
- mExecutedJobId = INVALID_JOB_ID;
+ mExecutedJobParameters = null;
}
}
diff --git a/tests/JobScheduler/src/android/jobscheduler/cts/ConnectivityConstraintTest.java b/tests/JobScheduler/src/android/jobscheduler/cts/ConnectivityConstraintTest.java
index a83f7a9..547b205 100644
--- a/tests/JobScheduler/src/android/jobscheduler/cts/ConnectivityConstraintTest.java
+++ b/tests/JobScheduler/src/android/jobscheduler/cts/ConnectivityConstraintTest.java
@@ -51,6 +51,8 @@
private boolean mHasWifi;
/** Whether the device running these tests supports telephony. */
private boolean mHasTelephony;
+ /** Track whether WiFi was enabled in case we turn it off. */
+ private boolean mInitialWiFiState;
private JobInfo.Builder mBuilder;
@@ -67,6 +69,14 @@
mHasTelephony = packageManager.hasSystemFeature(PackageManager.FEATURE_TELEPHONY);
mBuilder =
new JobInfo.Builder(CONNECTIVITY_JOB_ID, kJobServiceComponent);
+
+ mInitialWiFiState = mWifiManager.isWifiEnabled();
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ // Ensure that we leave WiFi in its previous state.
+ mWifiManager.setWifiEnabled(mInitialWiFiState);
}
// --------------------------------------------------------------------------------------------
@@ -202,6 +212,14 @@
}
}
+ /**
+ * Disconnect from WiFi in an attempt to connect to cellular data. Worth noting that this is
+ * best effort - there are no public APIs to force connecting to cell data. We disable WiFi
+ * and wait for a broadcast that we're connected to cell.
+ * We will not call into this function if the device doesn't support telephony.
+ * @see #mHasTelephony
+ * @see #checkDeviceSupportsMobileData()
+ */
private void disconnectWifiToConnectToMobile() throws InterruptedException {
if (mHasWifi && mWifiManager.isWifiEnabled()) {
ConnectivityActionReceiver connectMobileReceiver =
diff --git a/tests/JobScheduler/src/android/jobscheduler/cts/TimingConstraintsTest.java b/tests/JobScheduler/src/android/jobscheduler/cts/TimingConstraintsTest.java
index ed9cadd..40b67c8 100644
--- a/tests/JobScheduler/src/android/jobscheduler/cts/TimingConstraintsTest.java
+++ b/tests/JobScheduler/src/android/jobscheduler/cts/TimingConstraintsTest.java
@@ -17,6 +17,7 @@
import android.annotation.TargetApi;
import android.app.job.JobInfo;
+import android.app.job.JobParameters;
/**
* Schedules jobs with various timing constraints and ensures that they are executed when
@@ -26,6 +27,8 @@
public class TimingConstraintsTest extends ConstraintTest {
private static final int TIMING_JOB_ID = TimingConstraintsTest.class.hashCode() + 0;
private static final int CANCEL_JOB_ID = TimingConstraintsTest.class.hashCode() + 1;
+ private static final int EXPIRED_JOB_ID = TimingConstraintsTest.class.hashCode() + 2;
+ private static final int UNEXPIRED_JOB_ID = TimingConstraintsTest.class.hashCode() + 3;
public void testScheduleOnce() throws Exception {
JobInfo oneTimeJob = new JobInfo.Builder(TIMING_JOB_ID, kJobServiceComponent)
@@ -63,4 +66,44 @@
assertTrue("Cancel failed: job executed when it shouldn't have.",
kTestEnvironment.awaitTimeout());
}
+
+ /**
+ * Ensure that when a job is executed because its deadline has expired, that
+ * {@link JobParameters#isOverrideDeadlineExpired()} returns the correct value.
+ */
+ public void testJobParameters_expiredDeadline() throws Exception {
+
+ JobInfo deadlineJob =
+ new JobInfo.Builder(EXPIRED_JOB_ID, kJobServiceComponent)
+ .setOverrideDeadline(2000L)
+ .build();
+ kTestEnvironment.setExpectedExecutions(1);
+ mJobScheduler.schedule(deadlineJob);
+ assertTrue("Failed to execute deadline job", kTestEnvironment.awaitExecution());
+ assertTrue("Job that had its deadline expire didn't have" +
+ " JobParameters#isOverrideDeadlineExpired=true",
+ kTestEnvironment.getLastJobParameters().isOverrideDeadlineExpired());
+ }
+
+
+ /**
+ * Ensure that when a job is executed and its deadline hasn't expired, that
+ * {@link JobParameters#isOverrideDeadlineExpired()} returns the correct value.
+ */
+ public void testJobParameters_unexpiredDeadline() throws Exception {
+
+ JobInfo deadlineJob =
+ new JobInfo.Builder(UNEXPIRED_JOB_ID, kJobServiceComponent)
+ .setMinimumLatency(500L)
+ .setRequiresCharging(true)
+ .build();
+ kTestEnvironment.setExpectedExecutions(1);
+ mJobScheduler.schedule(deadlineJob);
+ // Run everything by pretending the device was just plugged in.
+ sendExpediteStableChargingBroadcast();
+ assertTrue("Failed to execute non-deadline job", kTestEnvironment.awaitExecution());
+ assertFalse("Job that ran early (unexpired) didn't have" +
+ " JobParameters#isOverrideDeadlineExpired=false",
+ kTestEnvironment.getLastJobParameters().isOverrideDeadlineExpired());
+ }
}
\ No newline at end of file
diff --git a/tests/expectations/knownfailures.txt b/tests/expectations/knownfailures.txt
index 6e571a7..ecb7050 100644
--- a/tests/expectations/knownfailures.txt
+++ b/tests/expectations/knownfailures.txt
@@ -89,31 +89,6 @@
bug: 17993121
},
{
- description: "A few WebGL tests are known to fail in WebView",
- names: [
- "android.webgl.cts.WebGLTest#test_conformance_extensions_oes_texture_float_with_video_html",
- "android.webgl.cts.WebGLTest#test_conformance_renderbuffers_framebuffer_object_attachment_html",
- "android.webgl.cts.WebGLTest#test_conformance_rendering_multisample_corruption_html",
- "android.webgl.cts.WebGLTest#test_conformance_textures_tex_image_and_sub_image_2d_with_video_html",
- "android.webgl.cts.WebGLTest#test_conformance_textures_tex_image_and_sub_image_2d_with_video_rgb565_html",
- "android.webgl.cts.WebGLTest#test_conformance_textures_tex_image_and_sub_image_2d_with_video_rgba4444_html",
- "android.webgl.cts.WebGLTest#test_conformance_textures_tex_image_and_sub_image_2d_with_video_rgba5551_html",
- "android.webgl.cts.WebGLTest#test_conformance_textures_texture_npot_html",
- "android.webgl.cts.WebGLTest#test_conformance_textures_texture_npot_video_html",
- "android.webgl.cts.WebGLTest#test_conformance_glsl_misc_empty_main_vert_html",
- "android.webgl.cts.WebGLTest#test_conformance_glsl_misc_gl_position_unset_vert_html",
- "android.webgl.cts.WebGLTest#test_conformance_misc_webgl_specific_html"
- ],
- bug: 17748398
-},
-{
- description: "WebGL test uniformMatrixBadArgs is too strict. Disabled until it's fixed upstream.",
- names: [
- "android.webgl.cts.WebGLTest#test_conformance_more_functions_uniformMatrixBadArgs_html"
- ],
- bug: 18638404
-},
-{
description: "permissions for the API previously used in the test has changed, making it impossible to pass",
names: [
"android.openglperf.cts.GlAppSwitchTest#testGlActivitySwitchingFast",
@@ -169,179 +144,6 @@
bug: 17605875
},
{
- description: "Failures on these tests are known on several devices.",
- names: [
- "android.hardware.cts.SensorBatchingTests#testAccelerometer_fastest_batching",
- "android.hardware.cts.SensorBatchingTests#testAccelerometer_50hz_batching",
- "android.hardware.cts.SensorBatchingTests#testAccelerometer_fastest_flush",
- "android.hardware.cts.SensorBatchingTests#testAccelerometer_50hz_flush",
- "android.hardware.cts.SensorBatchingTests#testMagneticField_fastest_batching",
- "android.hardware.cts.SensorBatchingTests#testMagneticField_50hz_batching",
- "android.hardware.cts.SensorBatchingTests#testMagneticField_fastest_flush",
- "android.hardware.cts.SensorBatchingTests#testMagneticField_50hz_flush",
- "android.hardware.cts.SensorBatchingTests#testOrientation_fastest_batching",
- "android.hardware.cts.SensorBatchingTests#testOrientation_50hz_batching",
- "android.hardware.cts.SensorBatchingTests#testOrientation_fastest_flush",
- "android.hardware.cts.SensorBatchingTests#testOrientation_50hz_flush",
- "android.hardware.cts.SensorBatchingTests#testGyroscope_fastest_batching",
- "android.hardware.cts.SensorBatchingTests#testGyroscope_50hz_batching",
- "android.hardware.cts.SensorBatchingTests#testGyroscope_fastest_flush",
- "android.hardware.cts.SensorBatchingTests#testGyroscope_50hz_flush",
- "android.hardware.cts.SensorBatchingTests#testPressure_fastest_batching",
- "android.hardware.cts.SensorBatchingTests#testPressure_50hz_batching",
- "android.hardware.cts.SensorBatchingTests#testPressure_fastest_flush",
- "android.hardware.cts.SensorBatchingTests#testPressure_50hz_flush",
- "android.hardware.cts.SensorBatchingTests#testGravity_fastest_batching",
- "android.hardware.cts.SensorBatchingTests#testGravity_50hz_batching",
- "android.hardware.cts.SensorBatchingTests#testGravity_fastest_flush",
- "android.hardware.cts.SensorBatchingTests#testGravity_50hz_flush",
- "android.hardware.cts.SensorBatchingTests#testRotationVector_fastest_batching",
- "android.hardware.cts.SensorBatchingTests#testRotationVector_50hz_batching",
- "android.hardware.cts.SensorBatchingTests#testRotationVector_fastest_flush",
- "android.hardware.cts.SensorBatchingTests#testRotationVector_50hz_flush",
- "android.hardware.cts.SensorBatchingTests#testMagneticFieldUncalibrated_fastest_batching",
- "android.hardware.cts.SensorBatchingTests#testMagneticFieldUncalibrated_50hz_batching",
- "android.hardware.cts.SensorBatchingTests#testMagneticFieldUncalibrated_fastest_flush",
- "android.hardware.cts.SensorBatchingTests#testMagneticFieldUncalibrated_50hz_flush",
- "android.hardware.cts.SensorBatchingTests#testGameRotationVector_fastest_batching",
- "android.hardware.cts.SensorBatchingTests#testGameRotationVector_50hz_batching",
- "android.hardware.cts.SensorBatchingTests#testGameRotationVector_fastest_flush",
- "android.hardware.cts.SensorBatchingTests#testGameRotationVector_50hz_flush",
- "android.hardware.cts.SensorBatchingTests#testGyroscopeUncalibrated_fastest_batching",
- "android.hardware.cts.SensorBatchingTests#testGyroscopeUncalibrated_50hz_batching",
- "android.hardware.cts.SensorBatchingTests#testGyroscopeUncalibrated_fastest_flush",
- "android.hardware.cts.SensorBatchingTests#testGyroscopeUncalibrated_50hz_flush",
- "android.hardware.cts.SensorBatchingTests#testLinearAcceleration_fastest_batching",
- "android.hardware.cts.SensorBatchingTests#testLinearAcceleration_50hz_batching",
- "android.hardware.cts.SensorBatchingTests#testLinearAcceleration_fastest_flush",
- "android.hardware.cts.SensorBatchingTests#testLinearAcceleration_50hz_flush",
- "android.hardware.cts.SensorBatchingTests#testGeomagneticRotationVector_fastest_batching",
- "android.hardware.cts.SensorBatchingTests#testGeomagneticRotationVector_50hz_batching",
- "android.hardware.cts.SensorBatchingTests#testGeomagneticRotationVector_fastest_flush",
- "android.hardware.cts.SensorBatchingTests#testGeomagneticRotationVector_50hz_flush",
- "android.hardware.cts.SensorIntegrationTests#testSensorsWithSeveralClients",
- "android.hardware.cts.SensorIntegrationTests#testSensorsMovingRates",
- "android.hardware.cts.SensorIntegrationTests#testAccelerometerAccelerometerStopping",
- "android.hardware.cts.SensorIntegrationTests#testAccelerometerGyroscopeStopping",
- "android.hardware.cts.SensorIntegrationTests#testAccelerometerMagneticFieldStopping",
- "android.hardware.cts.SensorIntegrationTests#testGyroscopeAccelerometerStopping",
- "android.hardware.cts.SensorIntegrationTests#testGyroscopeGyroscopeStopping",
- "android.hardware.cts.SensorIntegrationTests#testGyroscopeMagneticFieldStopping",
- "android.hardware.cts.SensorIntegrationTests#testMagneticFieldAccelerometerStopping",
- "android.hardware.cts.SensorIntegrationTests#testMagneticFieldGyroscopeStopping",
- "android.hardware.cts.SensorIntegrationTests#testMagneticFieldMagneticFieldStopping",
- "android.hardware.cts.SingleSensorTests#testSensorProperties",
- "android.hardware.cts.SingleSensorTests#testAccelerometer_fastest",
- "android.hardware.cts.SingleSensorTests#testAccelerometer_100hz",
- "android.hardware.cts.SingleSensorTests#testAccelerometer_200hz",
- "android.hardware.cts.SingleSensorTests#testAccelerometer_50hz",
- "android.hardware.cts.SingleSensorTests#testAccelerometer_25hz",
- "android.hardware.cts.SingleSensorTests#testAccelerometer_15hz",
- "android.hardware.cts.SingleSensorTests#testAccelerometer_10hz",
- "android.hardware.cts.SingleSensorTests#testAccelerometer_5hz",
- "android.hardware.cts.SingleSensorTests#testAccelerometer_1hz",
- "android.hardware.cts.SingleSensorTests#testMagneticField_fastest",
- "android.hardware.cts.SingleSensorTests#testMagneticField_200hz",
- "android.hardware.cts.SingleSensorTests#testMagneticField_100hz",
- "android.hardware.cts.SingleSensorTests#testMagneticField_50hz",
- "android.hardware.cts.SingleSensorTests#testMagneticField_25hz",
- "android.hardware.cts.SingleSensorTests#testMagneticField_15hz",
- "android.hardware.cts.SingleSensorTests#testMagneticField_10hz",
- "android.hardware.cts.SingleSensorTests#testMagneticField_5hz",
- "android.hardware.cts.SingleSensorTests#testMagneticField_1hz",
- "android.hardware.cts.SingleSensorTests#testOrientation_fastest",
- "android.hardware.cts.SingleSensorTests#testOrientation_200hz",
- "android.hardware.cts.SingleSensorTests#testOrientation_100hz",
- "android.hardware.cts.SingleSensorTests#testOrientation_50hz",
- "android.hardware.cts.SingleSensorTests#testOrientation_25hz",
- "android.hardware.cts.SingleSensorTests#testOrientation_15hz",
- "android.hardware.cts.SingleSensorTests#testOrientation_10hz",
- "android.hardware.cts.SingleSensorTests#testOrientation_5hz",
- "android.hardware.cts.SingleSensorTests#testOrientation_1hz",
- "android.hardware.cts.SingleSensorTests#testGyroscope_fastest",
- "android.hardware.cts.SingleSensorTests#testGyroscope_200hz",
- "android.hardware.cts.SingleSensorTests#testGyroscope_100hz",
- "android.hardware.cts.SingleSensorTests#testGyroscope_50hz",
- "android.hardware.cts.SingleSensorTests#testGyroscope_25hz",
- "android.hardware.cts.SingleSensorTests#testGyroscope_15hz",
- "android.hardware.cts.SingleSensorTests#testGyroscope_10hz",
- "android.hardware.cts.SingleSensorTests#testGyroscope_5hz",
- "android.hardware.cts.SingleSensorTests#testGyroscope_1hz",
- "android.hardware.cts.SingleSensorTests#testPressure_fastest",
- "android.hardware.cts.SingleSensorTests#testPressure_200hz",
- "android.hardware.cts.SingleSensorTests#testPressure_100hz",
- "android.hardware.cts.SingleSensorTests#testPressure_50hz",
- "android.hardware.cts.SingleSensorTests#testPressure_25hz",
- "android.hardware.cts.SingleSensorTests#testPressure_15hz",
- "android.hardware.cts.SingleSensorTests#testPressure_10hz",
- "android.hardware.cts.SingleSensorTests#testPressure_5hz",
- "android.hardware.cts.SingleSensorTests#testPressure_1hz",
- "android.hardware.cts.SingleSensorTests#testGravity_fastest",
- "android.hardware.cts.SingleSensorTests#testGravity_200hz",
- "android.hardware.cts.SingleSensorTests#testGravity_100hz",
- "android.hardware.cts.SingleSensorTests#testGravity_50hz",
- "android.hardware.cts.SingleSensorTests#testGravity_25hz",
- "android.hardware.cts.SingleSensorTests#testGravity_15hz",
- "android.hardware.cts.SingleSensorTests#testGravity_10hz",
- "android.hardware.cts.SingleSensorTests#testGravity_5hz",
- "android.hardware.cts.SingleSensorTests#testGravity_1hz",
- "android.hardware.cts.SingleSensorTests#testRotationVector_fastest",
- "android.hardware.cts.SingleSensorTests#testRotationVector_200hz",
- "android.hardware.cts.SingleSensorTests#testRotationVector_100hz",
- "android.hardware.cts.SingleSensorTests#testRotationVector_50hz",
- "android.hardware.cts.SingleSensorTests#testRotationVector_25hz",
- "android.hardware.cts.SingleSensorTests#testRotationVector_15hz",
- "android.hardware.cts.SingleSensorTests#testRotationVector_10hz",
- "android.hardware.cts.SingleSensorTests#testRotationVector_5hz",
- "android.hardware.cts.SingleSensorTests#testRotationVector_1hz",
- "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_fastest",
- "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_200hz",
- "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_100hz",
- "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_50hz",
- "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_25hz",
- "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_15hz",
- "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_10hz",
- "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_5hz",
- "android.hardware.cts.SingleSensorTests#testMagneticFieldUncalibrated_1hz",
- "android.hardware.cts.SingleSensorTests#testGameRotationVector_fastest",
- "android.hardware.cts.SingleSensorTests#testGameRotationVector_200hz",
- "android.hardware.cts.SingleSensorTests#testGameRotationVector_100hz",
- "android.hardware.cts.SingleSensorTests#testGameRotationVector_50hz",
- "android.hardware.cts.SingleSensorTests#testGameRotationVector_25hz",
- "android.hardware.cts.SingleSensorTests#testGameRotationVector_15hz",
- "android.hardware.cts.SingleSensorTests#testGameRotationVector_10hz",
- "android.hardware.cts.SingleSensorTests#testGameRotationVector_5hz",
- "android.hardware.cts.SingleSensorTests#testGameRotationVector_1hz",
- "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_fastest",
- "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_200hz",
- "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_100hz",
- "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_50hz",
- "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_25hz",
- "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_15hz",
- "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_10hz",
- "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_5hz",
- "android.hardware.cts.SingleSensorTests#testGyroscopeUncalibrated_1hz",
- "android.hardware.cts.SingleSensorTests#testGeomagneticRotationVector_fastest",
- "android.hardware.cts.SingleSensorTests#testLinearAcceleration_200hz",
- "android.hardware.cts.SingleSensorTests#testLinearAcceleration_100hz",
- "android.hardware.cts.SingleSensorTests#testLinearAcceleration_50hz",
- "android.hardware.cts.SingleSensorTests#testLinearAcceleration_25hz",
- "android.hardware.cts.SingleSensorTests#testLinearAcceleration_15hz",
- "android.hardware.cts.SingleSensorTests#testLinearAcceleration_10hz",
- "android.hardware.cts.SingleSensorTests#testLinearAcceleration_5hz",
- "android.hardware.cts.SingleSensorTests#testLinearAcceleration_1hz",
- "android.hardware.cts.SensorTest#testSensorTimeStamps"
- ],
- bug: 17675466
-},
-{
- description: "tests will soon become mandatory",
- names: [
- "android.hardware.cts.SensorTest#testBatchAndFlush"
- ],
- bug: 18958411
-},
-{
description: "This test failed on hw decoder that doesn't output frame with the configured format.",
names: [
"android.media.cts.ImageReaderDecoderTest#testHwAVCDecode360pForFlexibleYuv"
diff --git a/tests/tests/app/src/android/app/cts/SearchDialogTest.java b/tests/tests/app/src/android/app/cts/SearchDialogTest.java
new file mode 100644
index 0000000..1cf1ebd
--- /dev/null
+++ b/tests/tests/app/src/android/app/cts/SearchDialogTest.java
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.app.cts;
+
+import android.app.SearchDialog;
+import android.content.Context;
+import android.test.ActivityInstrumentationTestCase2;
+import android.test.InstrumentationTestCase;
+import android.view.ActionMode;
+import android.view.View;
+import android.view.ViewGroup;
+
+/**
+ * Test {@link SearchDialog}.
+ */
+public class SearchDialogTest extends InstrumentationTestCase {
+
+ private Context mContext;
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ mContext = getInstrumentation().getTargetContext();
+ }
+
+ public void testPrimaryActionModesAreStopped() {
+ SearchDialog.SearchBar searchBar = new SearchDialog.SearchBar(mContext);
+ MockViewGroup viewGroup = new MockViewGroup(mContext);
+ viewGroup.addView(searchBar);
+
+ ActionMode mode = searchBar.startActionModeForChild(null, null, ActionMode.TYPE_PRIMARY);
+
+ assertNull(mode);
+ // Should not bubble up.
+ assertFalse(viewGroup.isStartActionModeForChildTypedCalled);
+ assertFalse(viewGroup.isStartActionModeForChildTypelessCalled);
+
+ mode = searchBar.startActionModeForChild(null, null);
+
+ assertNull(mode);
+ // Should not bubble up.
+ assertFalse(viewGroup.isStartActionModeForChildTypedCalled);
+ assertFalse(viewGroup.isStartActionModeForChildTypelessCalled);
+ }
+
+ public void testFloatingActionModesAreBubbledUp() {
+ SearchDialog.SearchBar searchBar = new SearchDialog.SearchBar(mContext);
+ MockViewGroup viewGroup = new MockViewGroup(mContext);
+ viewGroup.addView(searchBar);
+
+ searchBar.startActionModeForChild(null, null, ActionMode.TYPE_FLOATING);
+
+ // Should bubble up.
+ assertTrue(viewGroup.isStartActionModeForChildTypedCalled);
+ }
+
+ private static class MockViewGroup extends ViewGroup {
+ boolean isStartActionModeForChildTypedCalled = false;
+ boolean isStartActionModeForChildTypelessCalled = false;
+
+ public MockViewGroup(Context context) {
+ super(context);
+ }
+
+ @Override
+ public ActionMode startActionModeForChild(View originalView, ActionMode.Callback callback) {
+ isStartActionModeForChildTypelessCalled = true;
+ return super.startActionModeForChild(originalView, callback);
+ }
+
+ @Override
+ public ActionMode startActionModeForChild(
+ View originalView, ActionMode.Callback callback, int type) {
+ isStartActionModeForChildTypedCalled = true;
+ return super.startActionModeForChild(originalView, callback, type);
+ }
+
+ @Override
+ protected void onLayout(boolean changed, int l, int t, int r, int b) {}
+ }
+}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseCallbackTest.java b/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseCallbackTest.java
new file mode 100644
index 0000000..1c68022
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseCallbackTest.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.le.AdvertiseCallback;
+import android.bluetooth.le.AdvertiseSettings;
+import android.test.AndroidTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Test of {@link AdvertiseCallback}.
+ */
+public class AdvertiseCallbackTest extends AndroidTestCase {
+
+ private final static int ADVERTISE_TYPE_SUCCESS = 0;
+ private final static int ADVERTISE_TYPE_FAIL = 1;
+
+ private final MockAdvertiser mMockAdvertiser = new MockAdvertiser();
+ private final BleAdvertiseCallback mAdvertiseCallback = new BleAdvertiseCallback();
+
+ @SmallTest
+ public void testAdvertiseSuccess() {
+ mAdvertiseCallback.mAdvertiseType = ADVERTISE_TYPE_SUCCESS;
+ mMockAdvertiser.startAdvertise(mAdvertiseCallback);
+ }
+
+ @SmallTest
+ public void testAdvertiseFailure() {
+ mAdvertiseCallback.mAdvertiseType = ADVERTISE_TYPE_SUCCESS;
+ mMockAdvertiser.startAdvertise(mAdvertiseCallback);
+
+ // Second advertise with the same callback should fail.
+ mAdvertiseCallback.mAdvertiseType = ADVERTISE_TYPE_FAIL;
+ mMockAdvertiser.startAdvertise(mAdvertiseCallback);
+ }
+
+ // A mock advertiser which emulate BluetoothLeAdvertiser behavior.
+ private static class MockAdvertiser {
+ private Set<AdvertiseCallback> mCallbacks = new HashSet<>();
+
+ void startAdvertise(AdvertiseCallback callback) {
+ synchronized (mCallbacks) {
+ if (mCallbacks.contains(callback)) {
+ callback.onStartFailure(AdvertiseCallback.ADVERTISE_FAILED_ALREADY_STARTED);
+ } else {
+ callback.onStartSuccess(null);
+ mCallbacks.add(callback);
+ }
+ }
+ }
+ }
+
+ private static class BleAdvertiseCallback extends AdvertiseCallback {
+ int mAdvertiseType = ADVERTISE_TYPE_SUCCESS;
+
+ @Override
+ public void onStartSuccess(AdvertiseSettings settings) {
+ if (mAdvertiseType == ADVERTISE_TYPE_FAIL) {
+ fail("advertise should fail");
+ }
+ }
+
+ @Override
+ public void onStartFailure(int error) {
+ if (mAdvertiseType == ADVERTISE_TYPE_SUCCESS) {
+ assertEquals(AdvertiseCallback.ADVERTISE_FAILED_ALREADY_STARTED, error);
+ }
+ }
+ }
+}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseDataTest.java b/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseDataTest.java
new file mode 100644
index 0000000..3f2bf52
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseDataTest.java
@@ -0,0 +1,175 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.le.AdvertiseData;
+import android.os.Parcel;
+import android.os.ParcelUuid;
+import android.test.AndroidTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+
+/**
+ * Unit test cases for {@link AdvertiseData}.
+ * <p>
+ * To run the test, use adb shell am instrument -e class 'android.bluetooth.le.AdvertiseDataTest' -w
+ * 'com.android.bluetooth.tests/android.bluetooth.BluetoothTestRunner'
+ */
+public class AdvertiseDataTest extends AndroidTestCase {
+
+ private AdvertiseData.Builder mAdvertiseDataBuilder;
+
+ @Override
+ protected void setUp() {
+ mAdvertiseDataBuilder = new AdvertiseData.Builder();
+ }
+
+ @SmallTest
+ public void testEmptyData() {
+ Parcel parcel = Parcel.obtain();
+ AdvertiseData data = mAdvertiseDataBuilder.build();
+ data.writeToParcel(parcel, 0);
+ parcel.setDataPosition(0);
+ AdvertiseData dataFromParcel =
+ AdvertiseData.CREATOR.createFromParcel(parcel);
+ assertEquals(data, dataFromParcel);
+ assertFalse(dataFromParcel.getIncludeDeviceName());
+ assertFalse(dataFromParcel.getIncludeTxPowerLevel());
+ assertEquals(0, dataFromParcel.getManufacturerSpecificData().size());
+ assertTrue(dataFromParcel.getServiceData().isEmpty());
+ assertTrue(dataFromParcel.getServiceUuids().isEmpty());
+ }
+
+ @SmallTest
+ public void testEmptyServiceUuid() {
+ Parcel parcel = Parcel.obtain();
+ AdvertiseData data = mAdvertiseDataBuilder.setIncludeDeviceName(true).build();
+ data.writeToParcel(parcel, 0);
+ parcel.setDataPosition(0);
+ AdvertiseData dataFromParcel =
+ AdvertiseData.CREATOR.createFromParcel(parcel);
+ assertEquals(data, dataFromParcel);
+ assertTrue(dataFromParcel.getIncludeDeviceName());
+ assertTrue(dataFromParcel.getServiceUuids().isEmpty());
+ }
+
+ @SmallTest
+ public void testEmptyManufacturerData() {
+ Parcel parcel = Parcel.obtain();
+ int manufacturerId = 50;
+ byte[] manufacturerData = new byte[0];
+ AdvertiseData data =
+ mAdvertiseDataBuilder.setIncludeDeviceName(true)
+ .addManufacturerData(manufacturerId, manufacturerData).build();
+ data.writeToParcel(parcel, 0);
+ parcel.setDataPosition(0);
+ AdvertiseData dataFromParcel =
+ AdvertiseData.CREATOR.createFromParcel(parcel);
+ assertEquals(data, dataFromParcel);
+ TestUtils.assertArrayEquals(new byte[0], dataFromParcel.getManufacturerSpecificData().get(manufacturerId));
+ }
+
+ @SmallTest
+ public void testEmptyServiceData() {
+ Parcel parcel = Parcel.obtain();
+ ParcelUuid uuid = ParcelUuid.fromString("0000110A-0000-1000-8000-00805F9B34FB");
+ byte[] serviceData = new byte[0];
+ AdvertiseData data =
+ mAdvertiseDataBuilder.setIncludeDeviceName(true)
+ .addServiceData(uuid, serviceData).build();
+ data.writeToParcel(parcel, 0);
+ parcel.setDataPosition(0);
+ AdvertiseData dataFromParcel =
+ AdvertiseData.CREATOR.createFromParcel(parcel);
+ assertEquals(data, dataFromParcel);
+ TestUtils.assertArrayEquals(new byte[0], dataFromParcel.getServiceData().get(uuid));
+ }
+
+ @SmallTest
+ public void testServiceUuid() {
+ Parcel parcel = Parcel.obtain();
+ ParcelUuid uuid = ParcelUuid.fromString("0000110A-0000-1000-8000-00805F9B34FB");
+ ParcelUuid uuid2 = ParcelUuid.fromString("0000110B-0000-1000-8000-00805F9B34FB");
+
+ AdvertiseData data =
+ mAdvertiseDataBuilder.setIncludeDeviceName(true)
+ .addServiceUuid(uuid).addServiceUuid(uuid2).build();
+ data.writeToParcel(parcel, 0);
+ parcel.setDataPosition(0);
+ AdvertiseData dataFromParcel =
+ AdvertiseData.CREATOR.createFromParcel(parcel);
+ assertEquals(data, dataFromParcel);
+ assertTrue(dataFromParcel.getServiceUuids().contains(uuid));
+ assertTrue(dataFromParcel.getServiceUuids().contains(uuid2));
+ }
+
+ @SmallTest
+ public void testManufacturerData() {
+ Parcel parcel = Parcel.obtain();
+ ParcelUuid uuid = ParcelUuid.fromString("0000110A-0000-1000-8000-00805F9B34FB");
+ ParcelUuid uuid2 = ParcelUuid.fromString("0000110B-0000-1000-8000-00805F9B34FB");
+
+ int manufacturerId = 50;
+ byte[] manufacturerData = new byte[] {
+ (byte) 0xF0, 0x00, 0x02, 0x15 };
+ AdvertiseData data =
+ mAdvertiseDataBuilder.setIncludeDeviceName(true)
+ .addServiceUuid(uuid).addServiceUuid(uuid2)
+ .addManufacturerData(manufacturerId, manufacturerData).build();
+
+ data.writeToParcel(parcel, 0);
+ parcel.setDataPosition(0);
+ AdvertiseData dataFromParcel =
+ AdvertiseData.CREATOR.createFromParcel(parcel);
+ assertEquals(data, dataFromParcel);
+ TestUtils.assertArrayEquals(manufacturerData,
+ dataFromParcel.getManufacturerSpecificData().get(manufacturerId));
+ }
+
+ @SmallTest
+ public void testServiceData() {
+ Parcel parcel = Parcel.obtain();
+ ParcelUuid uuid = ParcelUuid.fromString("0000110A-0000-1000-8000-00805F9B34FB");
+ byte[] serviceData = new byte[] {
+ (byte) 0xF0, 0x00, 0x02, 0x15 };
+ AdvertiseData data =
+ mAdvertiseDataBuilder.setIncludeDeviceName(true)
+ .addServiceData(uuid, serviceData).build();
+ data.writeToParcel(parcel, 0);
+ parcel.setDataPosition(0);
+ AdvertiseData dataFromParcel =
+ AdvertiseData.CREATOR.createFromParcel(parcel);
+ assertEquals(data, dataFromParcel);
+ TestUtils.assertArrayEquals(serviceData, dataFromParcel.getServiceData().get(uuid));
+ }
+
+ @SmallTest
+ public void testIncludeTxPower() {
+ Parcel parcel = Parcel.obtain();
+ AdvertiseData data = mAdvertiseDataBuilder.setIncludeTxPowerLevel(true).build();
+ data.writeToParcel(parcel, 0);
+ parcel.setDataPosition(0);
+ AdvertiseData dataFromParcel =
+ AdvertiseData.CREATOR.createFromParcel(parcel);
+ assertEquals(dataFromParcel.getIncludeTxPowerLevel(), true);
+ }
+
+ @SmallTest
+ public void testDescribeContents() {
+ AdvertiseData data = new AdvertiseData.Builder().build();
+ assertEquals(0, data.describeContents());
+ }
+}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseSettingsTest.java b/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseSettingsTest.java
new file mode 100644
index 0000000..19b7c29
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/AdvertiseSettingsTest.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.le.AdvertiseSettings;
+import android.os.Parcel;
+import android.test.AndroidTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+
+/**
+ * Test for {@link AdvertiseSettings}.
+ */
+public class AdvertiseSettingsTest extends AndroidTestCase {
+
+ @SmallTest
+ public void testDefaultSettings() {
+ AdvertiseSettings settings = new AdvertiseSettings.Builder().build();
+ assertEquals(AdvertiseSettings.ADVERTISE_MODE_LOW_POWER, settings.getMode());
+ assertEquals(AdvertiseSettings.ADVERTISE_TX_POWER_MEDIUM, settings.getTxPowerLevel());
+ assertEquals(0, settings.getTimeout());
+ assertTrue(settings.isConnectable());
+ }
+
+ @SmallTest
+ public void testDescribeContents() {
+ AdvertiseSettings settings = new AdvertiseSettings.Builder().build();
+ assertEquals(0, settings.describeContents());
+ }
+
+ @SmallTest
+ public void testReadWriteParcel() {
+ final int timeoutMillis = 60 * 1000;
+ Parcel parcel = Parcel.obtain();
+ AdvertiseSettings settings = new AdvertiseSettings.Builder()
+ .setAdvertiseMode(AdvertiseSettings.ADVERTISE_MODE_LOW_LATENCY)
+ .setConnectable(false)
+ .setTimeout(timeoutMillis)
+ .setTxPowerLevel(AdvertiseSettings.ADVERTISE_TX_POWER_MEDIUM)
+ .build();
+ settings.writeToParcel(parcel, 0);
+ parcel.setDataPosition(0);
+ AdvertiseSettings settingsFromParcel = AdvertiseSettings.CREATOR.createFromParcel(parcel);
+ assertEquals(AdvertiseSettings.ADVERTISE_MODE_LOW_LATENCY, settingsFromParcel.getMode());
+ assertEquals(AdvertiseSettings.ADVERTISE_TX_POWER_MEDIUM,
+ settingsFromParcel.getTxPowerLevel());
+ assertEquals(timeoutMillis, settingsFromParcel.getTimeout());
+ assertFalse(settings.isConnectable());
+ }
+
+ @SmallTest
+ public void testIllegalTimeout() {
+ AdvertiseSettings.Builder builder = new AdvertiseSettings.Builder();
+ builder.setTimeout(0).build();
+ builder.setTimeout(180 * 1000).build();
+ // Maximum timeout is 3 minutes.
+ try {
+ builder.setTimeout(180 * 1000 + 1).build();
+ fail("should not allow setting timeout to more than 3 minutes");
+ } catch (IllegalArgumentException e) {
+ // nothing to do.
+ }
+ // Negative time out is not allowed.
+ try {
+ builder.setTimeout(-1).build();
+ fail("should not allow setting timeout to more than 3 minutes");
+ } catch (IllegalArgumentException e) {
+ // nothing to do.
+ }
+
+ }
+}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/ScanCallbackTest.java b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanCallbackTest.java
new file mode 100644
index 0000000..f447f10
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanCallbackTest.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.le.ScanCallback;
+import android.bluetooth.le.ScanResult;
+import android.bluetooth.le.ScanSettings;
+import android.test.AndroidTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * Test cases for {@link ScanCallback}.
+ */
+public class ScanCallbackTest extends AndroidTestCase {
+
+ // Scan types are used to determine which callback method is expected.
+ private final static int SCAN_TYPE_SUCCESS = 0;
+ private final static int SCAN_TYPE_FAIL = 1;
+ private final static int SCAN_TYPE_BATCH = 2;
+
+ private MockScanner mMockScanner = new MockScanner();
+ private BleScanCallback mMockScanCallback = new BleScanCallback();
+
+ @SmallTest
+ public void testScanSuccess() {
+ mMockScanCallback.mScanType = SCAN_TYPE_SUCCESS;
+ mMockScanner.startScan(new ScanSettings.Builder().build(), mMockScanCallback);
+ }
+
+ @SmallTest
+ public void testBatchScans() {
+ ScanSettings settings = new ScanSettings.Builder().setReportDelay(1000).build();
+ mMockScanCallback.mScanType = SCAN_TYPE_BATCH;
+ mMockScanner.startScan(settings, mMockScanCallback);
+ }
+
+ @SmallTest
+ public void testScanFail() {
+ ScanSettings settings = new ScanSettings.Builder().build();
+ // The first scan is success.
+ mMockScanCallback.mScanType = SCAN_TYPE_SUCCESS;
+ mMockScanner.startScan(settings, mMockScanCallback);
+ // A second scan with the same callback should fail.
+ mMockScanCallback.mScanType = SCAN_TYPE_FAIL;
+ mMockScanner.startScan(settings, mMockScanCallback);
+ }
+
+ // A mock scanner for mocking BLE scanner functionalities.
+ private static class MockScanner {
+ private Set<ScanCallback> mCallbacks = new HashSet<>();
+
+ void startScan(ScanSettings settings, ScanCallback callback) {
+ synchronized (mCallbacks) {
+ if (mCallbacks.contains(callback)) {
+ callback.onScanFailed(ScanCallback.SCAN_FAILED_ALREADY_STARTED);
+ return;
+ }
+ mCallbacks.add(callback);
+ if (settings.getReportDelayMillis() == 0) {
+ callback.onScanResult(0, null);
+ } else {
+ callback.onBatchScanResults(null);
+ }
+ }
+ }
+ }
+
+ private static class BleScanCallback extends ScanCallback {
+ int mScanType = SCAN_TYPE_SUCCESS;
+
+ @Override
+ public void onScanResult(int callbackType, ScanResult result) {
+ if (mScanType != SCAN_TYPE_SUCCESS) {
+ fail("scan should fail");
+ }
+ }
+
+ @Override
+ public void onBatchScanResults(List<ScanResult> results) {
+ if (mScanType != SCAN_TYPE_BATCH) {
+ fail("not a batch scan");
+ }
+ }
+
+ @Override
+ public void onScanFailed(int errorCode) {
+ if (mScanType != SCAN_TYPE_FAIL) {
+ fail("scan should not fail");
+ }
+ }
+
+ }
+}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/ScanFilterTest.java b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanFilterTest.java
new file mode 100644
index 0000000..16e1413
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanFilterTest.java
@@ -0,0 +1,238 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.BluetoothAdapter;
+import android.bluetooth.BluetoothDevice;
+import android.bluetooth.le.ScanFilter;
+import android.bluetooth.le.ScanResult;
+import android.os.Parcel;
+import android.os.ParcelUuid;
+import android.test.AndroidTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+
+/**
+ * Unit test cases for Bluetooth LE scan filters.
+ * <p>
+ * To run this test, use adb shell am instrument -e class 'android.bluetooth.ScanFilterTest' -w
+ * 'com.android.bluetooth.tests/android.bluetooth.BluetoothTestRunner'
+ */
+public class ScanFilterTest extends AndroidTestCase {
+
+ private static final String LOCAL_NAME = "Ped";
+ private static final String DEVICE_MAC = "01:02:03:04:05:AB";
+ private static final String UUID1 = "0000110a-0000-1000-8000-00805f9b34fb";
+ private static final String UUID2 = "0000110b-0000-1000-8000-00805f9b34fb";
+ private static final String UUID3 = "0000110c-0000-1000-8000-00805f9b34fb";
+
+ private ScanResult mScanResult;
+ private ScanFilter.Builder mFilterBuilder;
+
+ @Override
+ protected void setUp() {
+ byte[] scanRecord = new byte[] {
+ 0x02, 0x01, 0x1a, // advertising flags
+ 0x05, 0x02, 0x0b, 0x11, 0x0a, 0x11, // 16 bit service uuids
+ 0x04, 0x09, 0x50, 0x65, 0x64, // setName
+ 0x02, 0x0A, (byte) 0xec, // tx power level
+ 0x05, 0x16, 0x0b, 0x11, 0x50, 0x64, // service data
+ 0x05, (byte) 0xff, (byte) 0xe0, 0x00, 0x02, 0x15, // manufacturer specific data
+ 0x03, 0x50, 0x01, 0x02, // an unknown data type won't cause trouble
+ };
+
+ BluetoothAdapter adapter = BluetoothAdapter.getDefaultAdapter();
+ BluetoothDevice device = adapter.getRemoteDevice(DEVICE_MAC);
+ mScanResult = new ScanResult(device, TestUtils.parseScanRecord(scanRecord),
+ -10, 1397545200000000L);
+ mFilterBuilder = new ScanFilter.Builder();
+ }
+
+ @SmallTest
+ public void testsetNameFilter() {
+ ScanFilter filter = mFilterBuilder.setDeviceName(LOCAL_NAME).build();
+ assertEquals(LOCAL_NAME, filter.getDeviceName());
+ assertTrue("setName filter fails", filter.matches(mScanResult));
+
+ filter = mFilterBuilder.setDeviceName("Pem").build();
+ assertFalse("setName filter fails", filter.matches(mScanResult));
+ }
+
+ @SmallTest
+ public void testDeviceAddressFilter() {
+ ScanFilter filter = mFilterBuilder.setDeviceAddress(DEVICE_MAC).build();
+ assertEquals(DEVICE_MAC, filter.getDeviceAddress());
+ assertTrue("device filter fails", filter.matches(mScanResult));
+
+ filter = mFilterBuilder.setDeviceAddress("11:22:33:44:55:66").build();
+ assertFalse("device filter fails", filter.matches(mScanResult));
+ }
+
+ @SmallTest
+ public void testsetServiceUuidFilter() {
+ ScanFilter filter = mFilterBuilder.setServiceUuid(
+ ParcelUuid.fromString(UUID1)).build();
+ assertEquals(UUID1, filter.getServiceUuid().toString());
+ assertTrue("uuid filter fails", filter.matches(mScanResult));
+
+ filter = mFilterBuilder.setServiceUuid(
+ ParcelUuid.fromString(UUID3)).build();
+ assertEquals(UUID3, filter.getServiceUuid().toString());
+ assertFalse("uuid filter fails", filter.matches(mScanResult));
+
+ ParcelUuid mask = ParcelUuid.fromString("FFFFFFF0-FFFF-FFFF-FFFF-FFFFFFFFFFFF");
+ filter = mFilterBuilder
+ .setServiceUuid(ParcelUuid.fromString(UUID3),
+ mask)
+ .build();
+ assertEquals(mask.toString(), filter.getServiceUuidMask().toString());
+ assertTrue("uuid filter fails", filter.matches(mScanResult));
+ }
+
+ @SmallTest
+ public void testsetServiceDataFilter() {
+ byte[] setServiceData = new byte[] {
+ 0x50, 0x64 };
+ ParcelUuid serviceDataUuid = ParcelUuid.fromString(UUID2);
+ ScanFilter filter = mFilterBuilder.setServiceData(serviceDataUuid, setServiceData).build();
+ assertEquals(serviceDataUuid, filter.getServiceDataUuid());
+ assertTrue("service data filter fails", filter.matches(mScanResult));
+
+ byte[] emptyData = new byte[0];
+ filter = mFilterBuilder.setServiceData(serviceDataUuid, emptyData).build();
+ assertTrue("service data filter fails", filter.matches(mScanResult));
+
+ byte[] prefixData = new byte[] {
+ 0x50 };
+ filter = mFilterBuilder.setServiceData(serviceDataUuid, prefixData).build();
+ assertTrue("service data filter fails", filter.matches(mScanResult));
+
+ byte[] nonMatchData = new byte[] {
+ 0x51, 0x64 };
+ byte[] mask = new byte[] {
+ (byte) 0x00, (byte) 0xFF };
+ filter = mFilterBuilder.setServiceData(serviceDataUuid, nonMatchData, mask).build();
+ assertEquals(nonMatchData, filter.getServiceData());
+ assertEquals(mask, filter.getServiceDataMask());
+ assertTrue("partial service data filter fails", filter.matches(mScanResult));
+
+ filter = mFilterBuilder.setServiceData(serviceDataUuid, nonMatchData).build();
+ assertFalse("service data filter fails", filter.matches(mScanResult));
+ }
+
+ @SmallTest
+ public void testManufacturerSpecificData() {
+ byte[] manufacturerData = new byte[] {
+ 0x02, 0x15 };
+ int manufacturerId = 0xE0;
+ ScanFilter filter =
+ mFilterBuilder.setManufacturerData(manufacturerId, manufacturerData).build();
+ assertEquals(manufacturerId, filter.getManufacturerId());
+ assertEquals(manufacturerData, filter.getManufacturerData());
+ assertTrue("manufacturer data filter fails", filter.matches(mScanResult));
+
+ byte[] emptyData = new byte[0];
+ filter = mFilterBuilder.setManufacturerData(manufacturerId, emptyData).build();
+ assertTrue("manufacturer data filter fails", filter.matches(mScanResult));
+
+ byte[] prefixData = new byte[] {
+ 0x02 };
+ filter = mFilterBuilder.setManufacturerData(manufacturerId, prefixData).build();
+ assertTrue("manufacturer data filter fails", filter.matches(mScanResult));
+
+ // Test data mask
+ byte[] nonMatchData = new byte[] {
+ 0x02, 0x14 };
+ filter = mFilterBuilder.setManufacturerData(manufacturerId, nonMatchData).build();
+ assertFalse("manufacturer data filter fails", filter.matches(mScanResult));
+ byte[] mask = new byte[] {
+ (byte) 0xFF, (byte) 0x00
+ };
+ filter = mFilterBuilder.setManufacturerData(manufacturerId, nonMatchData, mask).build();
+ assertEquals(manufacturerId, filter.getManufacturerId());
+ assertEquals(nonMatchData, filter.getManufacturerData());
+ assertEquals(mask, filter.getManufacturerDataMask());
+ assertTrue("partial setManufacturerData filter fails", filter.matches(mScanResult));
+ }
+
+ @SmallTest
+ public void testReadWriteParcel() {
+ ScanFilter filter = mFilterBuilder.build();
+ testReadWriteParcelForFilter(filter);
+
+ filter = mFilterBuilder.setDeviceName(LOCAL_NAME).build();
+ testReadWriteParcelForFilter(filter);
+
+ filter = mFilterBuilder.setDeviceAddress("11:22:33:44:55:66").build();
+ testReadWriteParcelForFilter(filter);
+
+ filter = mFilterBuilder.setServiceUuid(
+ ParcelUuid.fromString(UUID3)).build();
+ testReadWriteParcelForFilter(filter);
+
+ filter = mFilterBuilder.setServiceUuid(
+ ParcelUuid.fromString(UUID3),
+ ParcelUuid.fromString("FFFFFFF0-FFFF-FFFF-FFFF-FFFFFFFFFFFF")).build();
+ testReadWriteParcelForFilter(filter);
+
+ byte[] serviceData = new byte[] {
+ 0x50, 0x64 };
+
+ ParcelUuid serviceDataUuid = ParcelUuid.fromString(UUID2);
+ filter = mFilterBuilder.setServiceData(serviceDataUuid, serviceData).build();
+ testReadWriteParcelForFilter(filter);
+
+ filter = mFilterBuilder.setServiceData(serviceDataUuid, new byte[0]).build();
+ testReadWriteParcelForFilter(filter);
+
+ byte[] serviceDataMask = new byte[] {
+ (byte) 0xFF, (byte) 0xFF };
+ filter = mFilterBuilder.setServiceData(serviceDataUuid, serviceData, serviceDataMask)
+ .build();
+ testReadWriteParcelForFilter(filter);
+
+ byte[] manufacturerData = new byte[] {
+ 0x02, 0x15 };
+ int manufacturerId = 0xE0;
+ filter = mFilterBuilder.setManufacturerData(manufacturerId, manufacturerData).build();
+ testReadWriteParcelForFilter(filter);
+
+ filter = mFilterBuilder.setServiceData(serviceDataUuid, new byte[0]).build();
+ testReadWriteParcelForFilter(filter);
+
+ byte[] manufacturerDataMask = new byte[] {
+ (byte) 0xFF, (byte) 0xFF
+ };
+ filter = mFilterBuilder.setManufacturerData(manufacturerId, manufacturerData,
+ manufacturerDataMask).build();
+ testReadWriteParcelForFilter(filter);
+ }
+
+ @SmallTest
+ public void testDescribeContents() {
+ final int expected = 0;
+ assertEquals(expected, new ScanFilter.Builder().build().describeContents());
+ }
+
+ private void testReadWriteParcelForFilter(ScanFilter filter) {
+ Parcel parcel = Parcel.obtain();
+ filter.writeToParcel(parcel, 0);
+ parcel.setDataPosition(0);
+ ScanFilter filterFromParcel =
+ ScanFilter.CREATOR.createFromParcel(parcel);
+ assertEquals(filter, filterFromParcel);
+ }
+}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/ScanRecordTest.java b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanRecordTest.java
new file mode 100644
index 0000000..30ad06f
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanRecordTest.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.le.ScanRecord;
+import android.os.ParcelUuid;
+import android.test.AndroidTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+
+/**
+ * Unit test cases for {@link ScanRecord}.
+ * <p>
+ * To run this test, use adb shell am instrument -e class 'android.bluetooth.ScanRecordTest' -w
+ * 'com.android.bluetooth.tests/android.bluetooth.BluetoothTestRunner'
+ */
+public class ScanRecordTest extends AndroidTestCase {
+
+ @SmallTest
+ public void testParser() {
+ byte[] scanRecord = new byte[] {
+ 0x02, 0x01, 0x1a, // advertising flags
+ 0x05, 0x02, 0x0b, 0x11, 0x0a, 0x11, // 16 bit service uuids
+ 0x04, 0x09, 0x50, 0x65, 0x64, // name
+ 0x02, 0x0A, (byte) 0xec, // tx power level
+ 0x05, 0x16, 0x0b, 0x11, 0x50, 0x64, // service data
+ 0x05, (byte) 0xff, (byte) 0xe0, 0x00, 0x02, 0x15, // manufacturer specific data
+ 0x03, 0x50, 0x01, 0x02, // an unknown data type won't cause trouble
+ };
+ ScanRecord data = TestUtils.parseScanRecord(scanRecord);
+ assertEquals(0x1a, data.getAdvertiseFlags());
+ ParcelUuid uuid1 = ParcelUuid.fromString("0000110A-0000-1000-8000-00805F9B34FB");
+ ParcelUuid uuid2 = ParcelUuid.fromString("0000110B-0000-1000-8000-00805F9B34FB");
+ assertTrue(data.getServiceUuids().contains(uuid1));
+ assertTrue(data.getServiceUuids().contains(uuid2));
+
+ assertEquals("Ped", data.getDeviceName());
+ assertEquals(-20, data.getTxPowerLevel());
+
+ assertTrue(data.getManufacturerSpecificData().get(0x00E0) != null);
+
+ final byte[] manufacturerData = new byte[] {
+ 0x02, 0x15 };
+ TestUtils.assertArrayEquals(manufacturerData,
+ data.getManufacturerSpecificData().get(0x00E0));
+ TestUtils.assertArrayEquals(manufacturerData, data.getManufacturerSpecificData(0x00E0));
+
+ assertTrue(data.getServiceData().containsKey(uuid2));
+ final byte[] serviceData = new byte[] {
+ 0x50, 0x64 };
+ TestUtils.assertArrayEquals(serviceData, data.getServiceData().get(uuid2));
+ TestUtils.assertArrayEquals(serviceData, data.getServiceData(uuid2));
+ }
+}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/ScanResultTest.java b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanResultTest.java
new file mode 100644
index 0000000..91da8c3
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanResultTest.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.BluetoothAdapter;
+import android.bluetooth.BluetoothDevice;
+import android.bluetooth.le.ScanResult;
+import android.os.Parcel;
+import android.test.AndroidTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+
+/**
+ * Unit test cases for Bluetooth LE scans.
+ * <p>
+ * To run this test, use adb shell am instrument -e class 'android.bluetooth.ScanResultTest' -w
+ * 'com.android.bluetooth.tests/android.bluetooth.BluetoothTestRunner'
+ */
+public class ScanResultTest extends AndroidTestCase {
+ private static final String DEVICE_ADDRESS = "01:02:03:04:05:06";
+ private static final byte[] SCAN_RECORD = new byte[] {
+ 1, 2, 3 };
+ private static final int RSSI = -10;
+ private static final long TIMESTAMP_NANOS = 10000L;
+
+ /**
+ * Test read and write parcel of ScanResult
+ */
+ @SmallTest
+ public void testScanResultParceling() {
+ BluetoothDevice device =
+ BluetoothAdapter.getDefaultAdapter().getRemoteDevice(DEVICE_ADDRESS);
+ ScanResult result = new ScanResult(device, TestUtils.parseScanRecord(SCAN_RECORD), RSSI,
+ TIMESTAMP_NANOS);
+ Parcel parcel = Parcel.obtain();
+ result.writeToParcel(parcel, 0);
+ // Need to reset parcel data position to the beginning.
+ parcel.setDataPosition(0);
+ ScanResult resultFromParcel = ScanResult.CREATOR.createFromParcel(parcel);
+
+ assertEquals(RSSI, resultFromParcel.getRssi());
+ assertEquals(TIMESTAMP_NANOS, resultFromParcel.getTimestampNanos());
+ assertEquals(device, resultFromParcel.getDevice());
+ TestUtils.assertArrayEquals(SCAN_RECORD, resultFromParcel.getScanRecord().getBytes());
+ }
+
+ @SmallTest
+ public void testDescribeContents() {
+ BluetoothDevice device =
+ BluetoothAdapter.getDefaultAdapter().getRemoteDevice(DEVICE_ADDRESS);
+ ScanResult result = new ScanResult(device, TestUtils.parseScanRecord(SCAN_RECORD), RSSI,
+ TIMESTAMP_NANOS);
+ assertEquals(0, result.describeContents());
+ }
+}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/ScanSettingsTest.java b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanSettingsTest.java
new file mode 100644
index 0000000..7033c3c
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/ScanSettingsTest.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.le.ScanSettings;
+import android.os.Parcel;
+import android.test.AndroidTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+
+/**
+ * Test for Bluetooth LE {@link ScanSettings}.
+ */
+public class ScanSettingsTest extends AndroidTestCase {
+
+ @SmallTest
+ public void testDefaultSettings() {
+ ScanSettings settings = new ScanSettings.Builder().build();
+ assertEquals(ScanSettings.CALLBACK_TYPE_ALL_MATCHES, settings.getCallbackType());
+ assertEquals(ScanSettings.SCAN_MODE_LOW_POWER, settings.getScanMode());
+ assertEquals(0, settings.getScanResultType());
+ assertEquals(0, settings.getReportDelayMillis());
+ }
+
+ @SmallTest
+ public void testDescribeContents() {
+ ScanSettings settings = new ScanSettings.Builder().build();
+ assertEquals(0, settings.describeContents());
+ }
+
+ @SmallTest
+ public void testReadWriteParcel() {
+ final long reportDelayMillis = 60 * 1000;
+ Parcel parcel = Parcel.obtain();
+ ScanSettings settings = new ScanSettings.Builder()
+ .setReportDelay(reportDelayMillis)
+ .setScanMode(ScanSettings.SCAN_MODE_LOW_LATENCY)
+ .setMatchMode(ScanSettings.MATCH_MODE_AGGRESSIVE)
+ .setNumOfMatches(ScanSettings.MATCH_NUM_MAX_ADVERTISEMENT)
+ .build();
+ settings.writeToParcel(parcel, 0);
+ parcel.setDataPosition(0);
+ ScanSettings settingsFromParcel = ScanSettings.CREATOR.createFromParcel(parcel);
+ assertEquals(reportDelayMillis, settingsFromParcel.getReportDelayMillis());
+ assertEquals(ScanSettings.SCAN_MODE_LOW_LATENCY, settings.getScanMode());
+ }
+}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/TestUtils.java b/tests/tests/bluetooth/src/android/bluetooth/cts/TestUtils.java
new file mode 100644
index 0000000..7c5db9e
--- /dev/null
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/TestUtils.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bluetooth.cts;
+
+import android.bluetooth.le.ScanRecord;
+
+import junit.framework.Assert;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.util.Arrays;
+
+/**
+ * Utility class for Bluetooth CTS test.
+ */
+class TestUtils {
+
+ /**
+ * Utility method to call hidden ScanRecord.parseFromBytes method.
+ */
+ static ScanRecord parseScanRecord(byte[] bytes) {
+ Class<?> scanRecordClass = ScanRecord.class;
+ try {
+ Method method = scanRecordClass.getDeclaredMethod("parseFromBytes", byte[].class);
+ return (ScanRecord)method.invoke(null, bytes);
+ } catch (NoSuchMethodException | IllegalAccessException | IllegalArgumentException
+ | InvocationTargetException e) {
+ return null;
+ }
+ }
+
+ // Assert two byte arrays are equal.
+ static void assertArrayEquals(byte[] expected, byte[] actual) {
+ if (!Arrays.equals(expected, actual)) {
+ Assert.fail("expected:<" + Arrays.toString(expected) +
+ "> but was:<" + Arrays.toString(actual) + ">");
+ }
+ }
+}
diff --git a/tests/tests/content/res/values-land/styles.xml b/tests/tests/content/res/values-land/styles.xml
new file mode 100644
index 0000000..7717f10
--- /dev/null
+++ b/tests/tests/content/res/values-land/styles.xml
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<resources>
+ <style name="Theme_OrientationDependent">
+ <item name="themeDimension">111px</item>
+ </style>
+</resources>
diff --git a/tests/tests/content/res/values-ldltr/styles.xml b/tests/tests/content/res/values-ldltr/styles.xml
new file mode 100644
index 0000000..063fc4f
--- /dev/null
+++ b/tests/tests/content/res/values-ldltr/styles.xml
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<resources>
+ <style name="Theme_LayoutDirectionDependent">
+ <item name="themeInteger">111</item>
+ </style>
+ <style name="Theme_LayoutIsRTL">
+ <item name="themeBoolean">false</item>
+ </style>
+</resources>
diff --git a/tests/tests/content/res/values-ldrtl/styles.xml b/tests/tests/content/res/values-ldrtl/styles.xml
new file mode 100644
index 0000000..c586192
--- /dev/null
+++ b/tests/tests/content/res/values-ldrtl/styles.xml
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<resources>
+ <style name="Theme_LayoutIsRTL">
+ <item name="themeBoolean">true</item>
+ </style>
+</resources>
diff --git a/tests/tests/content/res/values/styles.xml b/tests/tests/content/res/values/styles.xml
index 20c80f8..72d4047 100644
--- a/tests/tests/content/res/values/styles.xml
+++ b/tests/tests/content/res/values/styles.xml
@@ -169,4 +169,11 @@
<item name="android:windowSwipeToDismiss">false</item>
</style>
+ <style name="Theme_LayoutDirectionDependent">
+ <item name="themeInteger">999</item>
+ </style>
+
+ <style name="Theme_OrientationDependent">
+ <item name="themeDimension">999px</item>
+ </style>
</resources>
diff --git a/tests/tests/content/src/android/content/res/cts/Resources_ThemeTest.java b/tests/tests/content/src/android/content/res/cts/Resources_ThemeTest.java
index 349cb47..6d1c2e4 100644
--- a/tests/tests/content/src/android/content/res/cts/Resources_ThemeTest.java
+++ b/tests/tests/content/src/android/content/res/cts/Resources_ThemeTest.java
@@ -18,6 +18,8 @@
import org.xmlpull.v1.XmlPullParser;
+import android.content.pm.ActivityInfo;
+import android.content.res.Configuration;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.content.res.Resources.Theme;
@@ -25,9 +27,12 @@
import android.util.AttributeSet;
import android.util.TypedValue;
import android.util.Xml;
+import android.view.View;
import com.android.cts.content.R;
+import java.util.Locale;
+
public class Resources_ThemeTest extends AndroidTestCase {
@@ -78,4 +83,51 @@
assertFalse(mResTheme.resolveAttribute(R.raw.testmp3, value, false));
}
+ public void testGetChangingConfigurations() {
+ Resources.Theme theme = getContext().getResources().newTheme();
+ assertEquals("Initial changing configuration mask is empty",
+ 0, theme.getChangingConfigurations());
+
+ theme.applyStyle(R.style.Theme_OrientationDependent, true);
+ assertEquals("First call to Theme.applyStyle() sets changing configuration",
+ ActivityInfo.CONFIG_ORIENTATION, theme.getChangingConfigurations());
+
+ theme.applyStyle(R.style.Theme_LayoutDirectionDependent, true);
+ assertEquals("Multiple calls to Theme.applyStyle() update changing configuration",
+ ActivityInfo.CONFIG_ORIENTATION | ActivityInfo.CONFIG_LAYOUT_DIRECTION,
+ theme.getChangingConfigurations());
+
+ Resources.Theme other = getContext().getResources().newTheme();
+ other.setTo(theme);
+ assertEquals("Theme.setTheme() copies changing confguration",
+ ActivityInfo.CONFIG_ORIENTATION | ActivityInfo.CONFIG_LAYOUT_DIRECTION,
+ theme.getChangingConfigurations());
+ }
+
+ public void testRebase() {
+ Resources res = getContext().getResources();
+ Configuration config = res.getConfiguration();
+ config.setLocale(Locale.ENGLISH);
+ assertEquals("Theme will be created in LTR config",
+ View.LAYOUT_DIRECTION_LTR, config.getLayoutDirection());
+
+ Resources.Theme theme = res.newTheme();
+ theme.applyStyle(R.style.Theme_LayoutIsRTL, true);
+
+ TypedArray t = theme.obtainStyledAttributes(new int[] { R.attr.themeBoolean });
+ assertEquals("Theme was created in LTR config", false, t.getBoolean(0, true));
+ t.recycle();
+
+ config.setLocale(new Locale("iw"));
+ res.updateConfiguration(config, null);
+
+ assertEquals("Theme will be rebased in RTL config",
+ View.LAYOUT_DIRECTION_RTL, config.getLayoutDirection());
+
+ theme.rebase();
+
+ t = theme.obtainStyledAttributes(new int[] { R.attr.themeBoolean });
+ assertEquals("Theme was rebased in RTL config", true, t.getBoolean(0, false));
+ t.recycle();
+ }
}
diff --git a/tests/tests/deqp/egl-temporary-failures.txt b/tests/tests/deqp/egl-temporary-failures.txt
new file mode 100644
index 0000000..1a3bb94
--- /dev/null
+++ b/tests/tests/deqp/egl-temporary-failures.txt
@@ -0,0 +1,780 @@
+dEQP-EGL.functional.sharing.gles2.multithread.simple.textures#gen_delete
+dEQP-EGL.functional.sharing.gles2.multithread.simple.textures#teximage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple.textures#teximage2d_texsubimage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple.textures#teximage2d_render
+dEQP-EGL.functional.sharing.gles2.multithread.simple.textures#teximage2d_texsubimage2d_render
+dEQP-EGL.functional.sharing.gles2.multithread.simple.images.texture_source#create_destroy
+dEQP-EGL.functional.sharing.gles2.multithread.simple.images.texture_source#create_texture
+dEQP-EGL.functional.sharing.gles2.multithread.simple.images.texture_source#texsubimage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple.images.texture_source#teximage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple.images.texture_source#create_texture_render
+dEQP-EGL.functional.sharing.gles2.multithread.simple.images.texture_source#texsubimage2d_render
+dEQP-EGL.functional.sharing.gles2.multithread.simple.images.texture_source#teximage2d_render
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.gen_delete#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.teximage2d#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.textures.texsubimage2d#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.gen_delete#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.bufferdata#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.buffers.buffersubdata#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.create_destroy#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.source#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.shaders.compile#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.create_destroy#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.attach_detach#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#15
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#17
+dEQP-EGL.functional.sharing.gles2.multithread.random.programs.link#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#8
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.create_destroy#19
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#1
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#4
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#6
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#9
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#16
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.teximage2d#18
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.texsubimage2d#2
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.texsubimage2d#3
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.texsubimage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.texsubimage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.texsubimage2d#12
+dEQP-EGL.functional.sharing.gles2.multithread.random.images.texsubimage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.buffers#gen_delete
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.buffers#bufferdata
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.buffers#buffersubdata
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.buffers#bufferdata_render
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.textures#gen_delete
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.textures#teximage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.textures#teximage2d_texsubimage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.textures#teximage2d_texsubimage2d_render
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.images.texture_source#texsubimage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.images.texture_source#teximage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.images.texture_source#create_texture_render
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_sync.images.texture_source#teximage2d_render
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.gen_delete#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.teximage2d#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.textures.texsubimage2d#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.gen_delete#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.bufferdata#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.buffers.buffersubdata#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.create_destroy#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.source#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.shaders.compile#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.create_destroy#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.attach_detach#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.programs.link#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.create_destroy#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.create_destroy#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.teximage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.teximage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.teximage2d#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.teximage2d#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_sync.images.texsubimage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.buffers#gen_delete
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.buffers#bufferdata
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.buffers#buffersubdata
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.textures#gen_delete
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.textures#teximage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.textures#teximage2d_texsubimage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.shaders#vtx_create_destroy
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.shaders#vtx_shadersource
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.shaders#vtx_compile
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.shaders#frag_create_destroy
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.shaders#frag_shadersource
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.shaders#frag_compile
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.images.texture_source#create_destroy
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.images.texture_source#create_texture
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.images.texture_source#texsubimage2d
+dEQP-EGL.functional.sharing.gles2.multithread.simple_egl_server_sync.images.texture_source#teximage2d
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.gen_delete#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.teximage2d#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.textures.texsubimage2d#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.gen_delete#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.bufferdata#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.buffers.buffersubdata#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.create_destroy#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.source#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.shaders.compile#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.create_destroy#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.attach_detach#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.programs.link#18
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#11
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.create_destroy#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#1
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#4
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#6
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#9
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#15
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#17
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.teximage2d#19
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#0
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#2
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#3
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#5
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#7
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#8
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#10
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#12
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#13
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#14
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#16
+dEQP-EGL.functional.sharing.gles2.multithread.random_egl_server_sync.images.texsubimage2d#19
+dEQP-EGL.functional.multithread#config
+dEQP-EGL.functional.multithread#pbuffer
+dEQP-EGL.functional.multithread#pixmap
+dEQP-EGL.functional.multithread#single_window
+dEQP-EGL.functional.multithread#context
+dEQP-EGL.functional.multithread#pbuffer_pixmap
+dEQP-EGL.functional.multithread#pixmap_window
+dEQP-EGL.functional.multithread#pixmap_single_window
+dEQP-EGL.functional.multithread#pixmap_context
+dEQP-EGL.functional.multithread#pbuffer_pixmap_context
+dEQP-EGL.functional.multithread#pixmap_single_window_context
diff --git a/tests/tests/deqp/gles3-temporary-failures.txt b/tests/tests/deqp/gles3-temporary-failures.txt
index d8b3208..c2127a6 100644
--- a/tests/tests/deqp/gles3-temporary-failures.txt
+++ b/tests/tests/deqp/gles3-temporary-failures.txt
@@ -624,6 +624,14 @@
dEQP-GLES3.functional.shaders.builtin_functions.precision.acosh.highp_vertex#vec2
dEQP-GLES3.functional.shaders.builtin_functions.precision.acosh.highp_vertex#vec3
dEQP-GLES3.functional.shaders.builtin_functions.precision.acosh.highp_vertex#vec4
+dEQP-GLES3.functional.shaders.builtin_functions.precision.atanh.highp_vertex#scalar
+dEQP-GLES3.functional.shaders.builtin_functions.precision.atanh.highp_vertex#vec2
+dEQP-GLES3.functional.shaders.builtin_functions.precision.atanh.highp_vertex#vec3
+dEQP-GLES3.functional.shaders.builtin_functions.precision.atanh.highp_vertex#vec4
+dEQP-GLES3.functional.shaders.builtin_functions.precision.atanh.highp_fragment#scalar
+dEQP-GLES3.functional.shaders.builtin_functions.precision.atanh.highp_fragment#vec2
+dEQP-GLES3.functional.shaders.builtin_functions.precision.atanh.highp_fragment#vec3
+dEQP-GLES3.functional.shaders.builtin_functions.precision.atanh.highp_fragment#vec4
dEQP-GLES3.functional.shaders.builtin_functions.precision.asin.highp_fragment#scalar
dEQP-GLES3.functional.shaders.builtin_functions.precision.asin.highp_fragment#vec2
dEQP-GLES3.functional.shaders.builtin_functions.precision.asin.highp_fragment#vec3
diff --git a/tests/tests/deqp/gles31-temporary-failures.txt b/tests/tests/deqp/gles31-temporary-failures.txt
index b46ab1e..c3986eb 100644
--- a/tests/tests/deqp/gles31-temporary-failures.txt
+++ b/tests/tests/deqp/gles31-temporary-failures.txt
@@ -387,6 +387,38 @@
dEQP-GLES31.functional.copy_image.compressed.viewclass_etc2_rgb.srgb8_etc2_srgb8_etc2#cubemap_to_cubemap
dEQP-GLES31.functional.copy_image.compressed.viewclass_etc2_rgb.srgb8_etc2_srgb8_etc2#cubemap_to_texture2d
dEQP-GLES31.functional.copy_image.compressed.viewclass_etc2_rgb.srgb8_etc2_srgb8_etc2#texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x10_khr_rgba32i#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x10_khr_rgba32ui#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x5_khr_rgba32i#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x5_khr_rgba32ui#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x6_khr_rgba32i#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x6_khr_rgba32ui#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x8_khr_rgba32i#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x8_khr_rgba32ui#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_12x10_khr_rgba32i#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_12x10_khr_rgba32ui#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_12x12_khr_rgba32i#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_12x12_khr_rgba32ui#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_4x4_khr_rgba32i#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_4x4_khr_rgba32ui#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_5x4_khr_rgba32i#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_5x4_khr_rgba32ui#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_5x5_khr_rgba32i#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_5x5_khr_rgba32ui#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_6x5_khr_rgba32i#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_6x5_khr_rgba32ui#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_6x6_khr_rgba32i#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_6x6_khr_rgba32ui#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x5_khr_rgba32i#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x5_khr_rgba32ui#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x6_khr_rgba32i#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x6_khr_rgba32ui#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x8_khr_rgba32i#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x8_khr_rgba32ui#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_etc2_eac_rgba32i#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_etc2_eac_rgba32i#texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_etc2_eac_rgba32ui#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_etc2_eac_rgba32ui#texture2d_to_renderbuffer
dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rg11_eac_rgba32f#cubemap_to_cubemap
dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rg11_eac_rgba32f#cubemap_to_texture2d
dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rg11_eac_rgba32f#cubemap_to_texture2d_array
@@ -3240,6 +3272,30 @@
dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.signed_r11_eac_rgba16ui#texture2d_to_texture2d
dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.signed_r11_eac_rgba16ui#texture2d_to_texture2d_array
dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.signed_r11_eac_rgba16ui#texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_srgb8_alpha8#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_srgb8_alpha8#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_srgb8_alpha8#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_srgb8_alpha8#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_srgb8_alpha8#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_srgb8_alpha8#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_srgb8_alpha8#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_srgb8_alpha8#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_srgb8_alpha8#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_srgb8_alpha8#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_srgb8_alpha8#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_srgb8_alpha8#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_srgb8_alpha8#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32i#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32ui#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16i#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16ui#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2ui#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8i#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8ui#cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_srgb8_alpha8#cubemap_to_renderbuffer
dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32f_rgba32f#cubemap_to_cubemap
dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32f_rgba32f#cubemap_to_texture2d
dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32f_rgba32f#cubemap_to_texture2d_array
@@ -6697,20 +6753,24 @@
dEQP-GLES31.functional.draw_buffers_indexed.overwrite_common#common_color_mask_buffer_color_mask
dEQP-GLES31.functional.draw_buffers_indexed.overwrite_common#common_separate_blend_func_buffer_blend_func
dEQP-GLES31.functional.draw_buffers_indexed.overwrite_common#common_separate_blend_func_buffer_separate_blend_func
-dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_blend_eq_buffer_advanced_blend_eq
+dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_enable_buffer_enable
+dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_disable_buffer_disable
+dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_disable_buffer_enable
+dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_enable_buffer_disable
dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_blend_eq_buffer_blend_eq
dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_blend_eq_buffer_separate_blend_eq
-dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_blend_func_buffer_blend_func
-dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_blend_func_buffer_separate_blend_func
-dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_color_mask_buffer_color_mask
-dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_disable_buffer_disable
-dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_enable_buffer_disable
-dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_enable_buffer_enable
-dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_separate_blend_eq_buffer_advanced_blend_eq
+dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_blend_eq_buffer_advanced_blend_eq
dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_separate_blend_eq_buffer_blend_eq
dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_separate_blend_eq_buffer_separate_blend_eq
+dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_separate_blend_eq_buffer_advanced_blend_eq
+dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_advanced_blend_eq_buffer_blend_eq
+dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_advanced_blend_eq_buffer_separate_blend_eq
+dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_advanced_blend_eq_buffer_advanced_blend_eq
+dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_blend_func_buffer_blend_func
+dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_blend_func_buffer_separate_blend_func
dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_separate_blend_func_buffer_blend_func
dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_separate_blend_func_buffer_separate_blend_func
+dEQP-GLES31.functional.draw_buffers_indexed.overwrite_indexed#common_color_mask_buffer_color_mask
dEQP-GLES31.functional.draw_buffers_indexed.random.max_implementation_draw_buffers#0
dEQP-GLES31.functional.draw_buffers_indexed.random.max_implementation_draw_buffers#1
dEQP-GLES31.functional.draw_buffers_indexed.random.max_implementation_draw_buffers#14
@@ -8418,6 +8478,10 @@
dEQP-GLES31.functional.shaders.builtin_functions.precision.acosh.highp_compute#vec2
dEQP-GLES31.functional.shaders.builtin_functions.precision.acosh.highp_compute#vec3
dEQP-GLES31.functional.shaders.builtin_functions.precision.acosh.highp_compute#vec4
+dEQP-GLES31.functional.shaders.builtin_functions.precision.atanh.highp_compute#scalar
+dEQP-GLES31.functional.shaders.builtin_functions.precision.atanh.highp_compute#vec2
+dEQP-GLES31.functional.shaders.builtin_functions.precision.atanh.highp_compute#vec3
+dEQP-GLES31.functional.shaders.builtin_functions.precision.atanh.highp_compute#vec4
dEQP-GLES31.functional.shaders.builtin_functions.precision.asin.highp_compute#scalar
dEQP-GLES31.functional.shaders.builtin_functions.precision.asin.highp_compute#vec2
dEQP-GLES31.functional.shaders.builtin_functions.precision.asin.highp_compute#vec3
@@ -8683,6 +8747,10 @@
dEQP-GLES31.functional.shaders.uniform_block.invalid#repeated_block_vertex
dEQP-GLES31.functional.shaders.uniform_block.invalid#too_long_block_name_fragment
dEQP-GLES31.functional.shaders.uniform_block.invalid#too_long_block_name_vertex
+dEQP-GLES31.functional.shaders.uniform_block.invalid#global_layout_std430_fragment
+dEQP-GLES31.functional.shaders.uniform_block.invalid#global_layout_std430_vertex
+dEQP-GLES31.functional.shaders.uniform_block.invalid#structure_definition_fragment
+dEQP-GLES31.functional.shaders.uniform_block.invalid#structure_definition_vertex
dEQP-GLES31.functional.shaders.uniform_block.valid#member_layout_all_8_times_fragment
dEQP-GLES31.functional.shaders.uniform_block.valid#member_layout_all_8_times_vertex
dEQP-GLES31.functional.shaders.uniform_block.valid#member_layout_all_fragment
diff --git a/tests/tests/display/AndroidManifest.xml b/tests/tests/display/AndroidManifest.xml
index 0b24754..bf84219 100644
--- a/tests/tests/display/AndroidManifest.xml
+++ b/tests/tests/display/AndroidManifest.xml
@@ -19,6 +19,8 @@
package="com.android.cts.display">
<uses-permission android:name="android.permission.DISABLE_KEYGUARD" />
+ <!-- For special presentation windows when testing mode switches. -->
+ <uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
<application>
<uses-library android:name="android.test.runner" />
diff --git a/tests/tests/display/AndroidTest.xml b/tests/tests/display/AndroidTest.xml
index 80d33b0..dd42984 100644
--- a/tests/tests/display/AndroidTest.xml
+++ b/tests/tests/display/AndroidTest.xml
@@ -16,6 +16,6 @@
<configuration description="Base config for CTS package preparer">
<include name="common-config" />
<!-- Use a non-standard pattern, must match values in tests/tests/display/.../DisplayTest.java -->
- <option name="run-command:run-command" value="settings put global overlay_display_devices 181x161/214" />
+ <option name="run-command:run-command" value="settings put global overlay_display_devices '181x161/214|181x161/214'" />
<option name="run-command:teardown-command" value="settings put global overlay_display_devices """ />
</configuration>
diff --git a/tests/tests/display/src/android/display/cts/DisplayTest.java b/tests/tests/display/src/android/display/cts/DisplayTest.java
index bea99ed..112710e 100644
--- a/tests/tests/display/src/android/display/cts/DisplayTest.java
+++ b/tests/tests/display/src/android/display/cts/DisplayTest.java
@@ -16,18 +16,29 @@
package android.display.cts;
+import android.app.Presentation;
import android.content.Context;
+import android.graphics.Color;
import android.graphics.PixelFormat;
import android.graphics.Point;
import android.hardware.display.DisplayManager;
+import android.hardware.display.DisplayManager.DisplayListener;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.Looper;
import android.test.AndroidTestCase;
import android.util.DisplayMetrics;
import android.view.Display;
+import android.view.View;
+import android.view.ViewGroup;
import android.view.WindowManager;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
public class DisplayTest extends AndroidTestCase {
- // This test is called from DisplayTestRunner which brings up an overlay display on the target
- // device. The overlay display parameters must match the ones defined there which are
+ // The CTS package brings up an overlay display on the target device (see AndroidTest.xml).
+ // The overlay display parameters must match the ones defined there which are
// 181x161/214 (wxh/dpi). It only matters that these values are different from any real
// display.
@@ -44,6 +55,9 @@
private DisplayManager mDisplayManager;
private WindowManager mWindowManager;
+ // To test display mode switches.
+ private TestPresentation mPresentation;
+
@Override
protected void setUp() throws Exception {
super.setUp();
@@ -177,4 +191,105 @@
assertEquals(Display.FLAG_PRESENTATION, display.getFlags());
}
+
+ /**
+ * Tests that the mode-related attributes and methods work as expected.
+ */
+ public void testMode() {
+ Display display = getSecondaryDisplay(mDisplayManager.getDisplays());
+ assertEquals(2, display.getSupportedModes().length);
+ Display.Mode mode = display.getMode();
+ assertEquals(display.getSupportedModes()[0], mode);
+ assertEquals(SECONDARY_DISPLAY_WIDTH, mode.getPhysicalWidth());
+ assertEquals(SECONDARY_DISPLAY_HEIGHT, mode.getPhysicalHeight());
+ assertEquals(display.getRefreshRate(), mode.getRefreshRate());
+ }
+
+ /**
+ * Tests that mode switch requests are correctly executed.
+ */
+ public void testModeSwitch() throws Exception {
+ final Display display = getSecondaryDisplay(mDisplayManager.getDisplays());
+ Display.Mode[] modes = display.getSupportedModes();
+ assertEquals(2, modes.length);
+ Display.Mode mode = display.getMode();
+ assertEquals(modes[0], mode);
+ final Display.Mode newMode = modes[1];
+
+ Handler handler = new Handler(Looper.getMainLooper());
+
+ // Register for display events.
+ final CountDownLatch changeSignal = new CountDownLatch(1);
+ mDisplayManager.registerDisplayListener(new DisplayListener() {
+ @Override
+ public void onDisplayAdded(int displayId) {}
+
+ @Override
+ public void onDisplayChanged(int displayId) {
+ if (displayId == display.getDisplayId()) {
+ changeSignal.countDown();
+ }
+ }
+
+ @Override
+ public void onDisplayRemoved(int displayId) {}
+ }, handler);
+
+ // Show the presentation.
+ final CountDownLatch presentationSignal = new CountDownLatch(1);
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ mPresentation = new TestPresentation(
+ getContext(), display, newMode.getModeId());
+ mPresentation.show();
+ presentationSignal.countDown();
+ }
+ });
+ assertTrue(presentationSignal.await(5, TimeUnit.SECONDS));
+
+ // Wait until the display change is effective.
+ assertTrue(changeSignal.await(5, TimeUnit.SECONDS));
+
+ assertEquals(newMode, display.getMode());
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ mPresentation.dismiss();
+ }
+ });
+ }
+
+ /**
+ * Used to force mode changes on a display.
+ * <p>
+ * Note that due to limitations of the Presentation class, the modes must have the same size
+ * otherwise the presentation will be automatically dismissed.
+ */
+ private static final class TestPresentation extends Presentation {
+
+ private final int mModeId;
+
+ public TestPresentation(Context context, Display display, int modeId) {
+ super(context, display);
+ mModeId = modeId;
+ }
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ View content = new View(getContext());
+ content.setLayoutParams(new ViewGroup.LayoutParams(
+ ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
+ content.setBackgroundColor(Color.RED);
+ setContentView(content);
+
+ WindowManager.LayoutParams params = getWindow().getAttributes();
+ params.preferredDisplayModeId = mModeId;
+ params.type = WindowManager.LayoutParams.TYPE_SYSTEM_ALERT;
+ params.setTitle("CtsTestPresentation");
+ getWindow().setAttributes(params);
+ }
+ }
}
diff --git a/tests/tests/graphics/res/values/styles.xml b/tests/tests/graphics/res/values/styles.xml
index 20c80f8..31ed175 100644
--- a/tests/tests/graphics/res/values/styles.xml
+++ b/tests/tests/graphics/res/values/styles.xml
@@ -163,6 +163,7 @@
<item name="themeNinePatch">@drawable/ninepatch_0</item>
<item name="themeGravity">48</item>
<item name="themeTileMode">2</item>
+ <item name="themeType">0</item>
</style>
<style name="Theme_NoSwipeDismiss">
diff --git a/tests/tests/graphics/src/android/graphics/drawable/cts/GradientDrawableTest.java b/tests/tests/graphics/src/android/graphics/drawable/cts/GradientDrawableTest.java
index e7a38c5..eeda22c 100644
--- a/tests/tests/graphics/src/android/graphics/drawable/cts/GradientDrawableTest.java
+++ b/tests/tests/graphics/src/android/graphics/drawable/cts/GradientDrawableTest.java
@@ -22,10 +22,8 @@
import org.xmlpull.v1.XmlPullParserException;
import android.content.res.Resources;
-import android.content.res.Resources.Theme;
import android.graphics.Canvas;
import android.graphics.ColorFilter;
-import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.graphics.drawable.GradientDrawable;
import android.graphics.drawable.Drawable.ConstantState;
diff --git a/tests/tests/graphics/src/android/graphics/drawable/cts/ThemedDrawableTest.java b/tests/tests/graphics/src/android/graphics/drawable/cts/ThemedDrawableTest.java
index a967f95..d7becc6 100644
--- a/tests/tests/graphics/src/android/graphics/drawable/cts/ThemedDrawableTest.java
+++ b/tests/tests/graphics/src/android/graphics/drawable/cts/ThemedDrawableTest.java
@@ -17,6 +17,8 @@
package android.graphics.drawable.cts;
import android.annotation.TargetApi;
+import android.content.res.Resources.Theme;
+import android.content.res.TypedArray;
import android.graphics.Color;
import android.graphics.Rect;
import android.graphics.Shader.TileMode;
@@ -26,6 +28,7 @@
import android.graphics.drawable.LayerDrawable;
import android.graphics.drawable.NinePatchDrawable;
import android.graphics.drawable.RippleDrawable;
+import android.os.Debug;
import android.test.AndroidTestCase;
import android.view.Gravity;
@@ -38,7 +41,17 @@
protected void setUp() throws Exception {
super.setUp();
- mContext.setTheme(R.style.Theme_ThemedDrawableTest);
+ // Workaround for ContextImpl.setTheme() being broken.
+ final Theme theme = mContext.getResources().newTheme();
+ theme.applyStyle(R.style.Theme_ThemedDrawableTest, true);
+ final Theme ctxTheme = mContext.getTheme();
+ ctxTheme.setTo(theme);
+ }
+
+ @Override
+ public void testAndroidTestCaseSetupProperly() {
+ final TypedArray t = mContext.obtainStyledAttributes(new int[]{R.attr.themeType});
+ assertTrue("Theme was applied correctly", t.getInt(0, -1) == 0);
}
public void testBitmapDrawable() {
diff --git a/tests/tests/hardware/Android.mk b/tests/tests/hardware/Android.mk
index 153445d..1c144ff 100644
--- a/tests/tests/hardware/Android.mk
+++ b/tests/tests/hardware/Android.mk
@@ -56,4 +56,4 @@
LOCAL_JAVA_LIBRARIES := android.test.runner
-include $(BUILD_CTS_PACKAGE)
+include $(BUILD_CTS_PACKAGE)
\ No newline at end of file
diff --git a/tests/tests/hardware/AndroidManifest.xml b/tests/tests/hardware/AndroidManifest.xml
index ab81162..7b15b61 100644
--- a/tests/tests/hardware/AndroidManifest.xml
+++ b/tests/tests/hardware/AndroidManifest.xml
@@ -25,6 +25,7 @@
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.BODY_SENSORS" />
<uses-permission android:name="android.permission.TRANSMIT_IR" />
+ <uses-permission android:name="android.permission.REORDER_TASKS" />
<application>
<uses-library android:name="android.test.runner" />
@@ -50,6 +51,26 @@
<activity android:name="android.hardware.cts.GLSurfaceViewCtsActivity"
android:label="GLSurfaceViewCtsActivity"/>
+ <service android:name="android.hardware.multiprocess.ErrorLoggingService"
+ android:label="ErrorLoggingService"
+ android:process=":errorLoggingServiceProcess"
+ android:exported="false">
+ </service>
+
+ <activity android:name="android.hardware.multiprocess.camera.cts.Camera1Activity"
+ android:label="RemoteCamera1Activity"
+ android:screenOrientation="landscape"
+ android:configChanges="keyboardHidden|orientation|screenSize"
+ android:process=":camera1ActivityProcess">
+ </activity>
+
+ <activity android:name="android.hardware.multiprocess.camera.cts.Camera2Activity"
+ android:label="RemoteCamera2Activity"
+ android:screenOrientation="landscape"
+ android:configChanges="keyboardHidden|orientation|screenSize"
+ android:process=":camera2ActivityProcess">
+ </activity>
+
</application>
<instrumentation android:name="android.support.test.runner.AndroidJUnitRunner"
diff --git a/tests/tests/hardware/AndroidTest.xml b/tests/tests/hardware/AndroidTest.xml
new file mode 100644
index 0000000..783eafe
--- /dev/null
+++ b/tests/tests/hardware/AndroidTest.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration description="Base config for Sensor CTS tests. Put SensorService in restricted mode">
+ <include name="common-config" />
+ <!-- Put SensorService in restricted mode so that only CTS tests will be able to get access to
+ sensors -->
+ <option name="run-command:run-command" value="dumpsys sensorservice restrict" />
+ <option name="run-command:teardown-command" value="dumpsys sensorservice enable" />
+</configuration>
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/BurstCaptureTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/BurstCaptureTest.java
index da8ea65..2da8cdb 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/BurstCaptureTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/BurstCaptureTest.java
@@ -89,24 +89,13 @@
config.getOutputMinFrameDuration(ImageFormat.YUV_420_888, stillSize);
// Find suitable target FPS range - as high as possible
- Range<Integer>[] fpsRanges = mStaticInfo.getAeAvailableTargetFpsRangesChecked();
+ Range<Integer> targetRange = mStaticInfo.getAeMaxTargetFpsRange();
int minBurstFps = (int) Math.floor(1e9 / minStillFrameDuration);
- Range<Integer> targetRange = null;
- for (Range<Integer> candidateRange : fpsRanges) {
- if (candidateRange.getLower() >= minBurstFps) {
- if (targetRange == null) {
- targetRange = candidateRange;
- } else if (candidateRange.getLower() > targetRange.getLower()) {
- targetRange = candidateRange;
- } else if (candidateRange.getUpper() > targetRange.getUpper()) {
- targetRange = candidateRange;
- }
- }
- }
+
assertTrue(String.format("Cam %s: No target FPS range found with minimum FPS above " +
" 1/minFrameDuration (%d fps, duration %d ns) for full-resolution YUV",
- cameraId, minBurstFps, minStillFrameDuration),
- targetRange != null);
+ cameraId, minBurstFps, minStillFrameDuration),
+ targetRange.getLower() >= minBurstFps);
Log.i(TAG, String.format("Selected frame rate range %d - %d for YUV burst",
targetRange.getLower(), targetRange.getUpper()));
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraDeviceTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraDeviceTest.java
index 53ca31f..9bbcc43 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraDeviceTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraDeviceTest.java
@@ -23,6 +23,7 @@
import static android.hardware.camera2.CaptureRequest.*;
import android.content.Context;
+import android.graphics.SurfaceTexture;
import android.graphics.ImageFormat;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
@@ -380,11 +381,7 @@
closeSession();
}
finally {
- try {
-
- } finally {
- closeDevice(mCameraIds[i], mCameraMockListener);
- }
+ closeDevice(mCameraIds[i], mCameraMockListener);
}
}
}
@@ -581,6 +578,146 @@
}
}
+ /**
+ * Verify basic semantics and error conditions of the prepare call.
+ *
+ */
+ public void testPrepare() throws Exception {
+ for (int i = 0; i < mCameraIds.length; i++) {
+ try {
+ openDevice(mCameraIds[i], mCameraMockListener);
+ waitForDeviceState(STATE_OPENED, CAMERA_OPEN_TIMEOUT_MS);
+
+ prepareTestByCamera();
+ }
+ finally {
+ closeDevice(mCameraIds[i], mCameraMockListener);
+ }
+ }
+ }
+
+ private void prepareTestByCamera() throws Exception {
+ final int PREPARE_TIMEOUT_MS = 10000;
+
+ mSessionMockListener = spy(new BlockingSessionCallback());
+
+ SurfaceTexture output1 = new SurfaceTexture(1);
+ Surface output1Surface = new Surface(output1);
+ SurfaceTexture output2 = new SurfaceTexture(2);
+ Surface output2Surface = new Surface(output2);
+
+ List<Surface> outputSurfaces = new ArrayList<>(
+ Arrays.asList(output1Surface, output2Surface));
+ mCamera.createCaptureSession(outputSurfaces, mSessionMockListener, mHandler);
+
+ mSession = mSessionMockListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
+
+ // Try basic prepare
+
+ mSession.prepare(output1Surface);
+
+ verify(mSessionMockListener, timeout(PREPARE_TIMEOUT_MS).times(1))
+ .onSurfacePrepared(eq(mSession), eq(output1Surface));
+
+ // Should not complain if preparing already prepared stream
+
+ mSession.prepare(output1Surface);
+
+ verify(mSessionMockListener, timeout(PREPARE_TIMEOUT_MS).times(2))
+ .onSurfacePrepared(eq(mSession), eq(output1Surface));
+
+ // Check surface not included in session
+
+ SurfaceTexture output3 = new SurfaceTexture(3);
+ Surface output3Surface = new Surface(output3);
+ try {
+ mSession.prepare(output3Surface);
+ fail("Preparing surface not part of session must throw IllegalArgumentException");
+ } catch (IllegalArgumentException e) {
+ // expected
+ }
+
+ // Ensure second prepare also works
+
+ mSession.prepare(output2Surface);
+
+ verify(mSessionMockListener, timeout(PREPARE_TIMEOUT_MS).times(1))
+ .onSurfacePrepared(eq(mSession), eq(output2Surface));
+
+ // Use output1
+
+ CaptureRequest.Builder r = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+ r.addTarget(output1Surface);
+
+ mSession.capture(r.build(), null, null);
+
+ try {
+ mSession.prepare(output1Surface);
+ fail("Preparing already-used surface must throw IllegalArgumentException");
+ } catch (IllegalArgumentException e) {
+ // expected
+ }
+
+ // Create new session with outputs 1 and 3, ensure output1Surface still can't be prepared
+ // again
+
+ mSessionMockListener = spy(new BlockingSessionCallback());
+
+ outputSurfaces = new ArrayList<>(
+ Arrays.asList(output1Surface, output3Surface));
+ mCamera.createCaptureSession(outputSurfaces, mSessionMockListener, mHandler);
+
+ mSession = mSessionMockListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
+
+ try {
+ mSession.prepare(output1Surface);
+ fail("Preparing surface used in previous session must throw IllegalArgumentException");
+ } catch (IllegalArgumentException e) {
+ // expected
+ }
+
+ // Use output3, wait for result, then make sure prepare still doesn't work
+
+ r = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+ r.addTarget(output3Surface);
+
+ SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
+ mSession.capture(r.build(), resultListener, mHandler);
+
+ resultListener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
+
+ try {
+ mSession.prepare(output3Surface);
+ fail("Preparing already-used surface must throw IllegalArgumentException");
+ } catch (IllegalArgumentException e) {
+ // expected
+ }
+
+ // Create new session with outputs 1 and 2, ensure output2Surface can be prepared again
+
+ mSessionMockListener = spy(new BlockingSessionCallback());
+
+ outputSurfaces = new ArrayList<>(
+ Arrays.asList(output1Surface, output2Surface));
+ mCamera.createCaptureSession(outputSurfaces, mSessionMockListener, mHandler);
+
+ mSession = mSessionMockListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
+
+ mSession.prepare(output2Surface);
+
+ verify(mSessionMockListener, timeout(PREPARE_TIMEOUT_MS).times(1))
+ .onSurfacePrepared(eq(mSession), eq(output2Surface));
+
+ try {
+ mSession.prepare(output1Surface);
+ fail("Preparing surface used in previous session must throw IllegalArgumentException");
+ } catch (IllegalArgumentException e) {
+ // expected
+ }
+
+ }
+
+
private void invalidRequestCaptureTestByCamera() throws Exception {
if (VERBOSE) Log.v(TAG, "invalidRequestCaptureTestByCamera");
@@ -888,7 +1025,7 @@
mSession = mSessionMockListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
waitForSessionState(SESSION_CONFIGURED, SESSION_CONFIGURE_TIMEOUT_MS);
waitForSessionState(SESSION_READY, SESSION_READY_TIMEOUT_MS);
-}
+ }
private void waitForDeviceState(int state, long timeoutMs) {
mCameraMockListener.waitForState(state, timeoutMs);
@@ -1333,7 +1470,7 @@
// OK
} else if (template == CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG &&
!mStaticInfo.isCapabilitySupported(CameraCharacteristics.
- REQUEST_AVAILABLE_CAPABILITIES_OPAQUE_REPROCESSING)) {
+ REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING)) {
// OK.
} else if (sLegacySkipTemplates.contains(template) &&
mStaticInfo.isHardwareLevelLegacy()) {
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraManagerTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraManagerTest.java
index 27ff6d1..77a0c8e 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraManagerTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraManagerTest.java
@@ -422,38 +422,23 @@
new BlockingStateCallback(mockFailListener);
mCameraManager.openCamera(ids[i], successListener, mHandler);
-
- try {
- mCameraManager.openCamera(ids[i], failListener,
- mHandler);
- } catch (CameraAccessException e) {
- // Optional (but common). Camera might fail asynchronously only.
- // Don't assert here, otherwise, all subsequent tests will fail because the
- // opened camera is never closed.
- mCollector.expectEquals(
- "If second camera open fails immediately, must be due to"
- + "camera being busy for ID: " + ids[i],
- CameraAccessException.CAMERA_ERROR, e.getReason());
- }
+ mCameraManager.openCamera(ids[i], failListener,
+ mHandler);
successListener.waitForState(BlockingStateCallback.STATE_OPENED,
CameraTestUtils.CAMERA_IDLE_TIMEOUT_MS);
- // Have to get the successCamera here, otherwise, it won't be
- // closed if STATE_ERROR timeout exception occurs.
ArgumentCaptor<CameraDevice> argument =
ArgumentCaptor.forClass(CameraDevice.class);
verify(mockSuccessListener, atLeastOnce()).onOpened(argument.capture());
+ verify(mockSuccessListener, atLeastOnce()).onDisconnected(argument.capture());
- failListener.waitForState(BlockingStateCallback.STATE_ERROR,
+ failListener.waitForState(BlockingStateCallback.STATE_OPENED,
CameraTestUtils.CAMERA_IDLE_TIMEOUT_MS);
+ verify(mockFailListener, atLeastOnce()).onOpened(argument.capture());
successCamera = verifyCameraStateOpened(
- ids[i], mockSuccessListener);
+ ids[i], mockFailListener);
- verify(mockFailListener)
- .onError(
- and(notNull(CameraDevice.class), not(eq(successCamera))),
- eq(StateCallback.ERROR_CAMERA_IN_USE));
verifyNoMoreInteractions(mockFailListener);
} finally {
if (successCamera != null) {
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraTestUtils.java b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraTestUtils.java
index 9da74d1..d39ff1f 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraTestUtils.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraTestUtils.java
@@ -37,6 +37,7 @@
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
+import android.media.ImageWriter;
import android.media.Image.Plane;
import android.os.Handler;
import android.util.Log;
@@ -60,10 +61,12 @@
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
+import java.util.HashMap;
import java.util.List;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
/**
* A package private utility class for wrapping up the camera2 cts test common utility functions
@@ -106,19 +109,31 @@
public static ImageReader makeImageReader(Size size, int format, int maxNumImages,
ImageReader.OnImageAvailableListener listener, Handler handler) {
ImageReader reader;
- if (format == ImageFormat.PRIVATE) {
- reader = ImageReader.newOpaqueInstance(size.getWidth(), size.getHeight(),
- maxNumImages);
- } else {
- reader = ImageReader.newInstance(size.getWidth(), size.getHeight(), format,
- maxNumImages);
- }
+ reader = ImageReader.newInstance(size.getWidth(), size.getHeight(), format,
+ maxNumImages);
reader.setOnImageAvailableListener(listener, handler);
if (VERBOSE) Log.v(TAG, "Created ImageReader size " + size);
return reader;
}
/**
+ * Create an ImageWriter and hook up the ImageListener.
+ *
+ * @param inputSurface The input surface of the ImageWriter.
+ * @param maxImages The max number of Images that can be dequeued simultaneously.
+ * @param listener The listener used by this ImageWriter to notify callbacks
+ * @param handler The handler to post listener callbacks.
+ * @return ImageWriter object created.
+ */
+ public static ImageWriter makeImageWriter(
+ Surface inputSurface, int maxImages,
+ ImageWriter.OnImageReleasedListener listener, Handler handler) {
+ ImageWriter writer = ImageWriter.newInstance(inputSurface, maxImages);
+ writer.setOnImageReleasedListener(listener, handler);
+ return writer;
+ }
+
+ /**
* Close pending images and clean up an {@link android.media.ImageReader} object.
* @param reader an {@link android.media.ImageReader} to close.
*/
@@ -129,6 +144,16 @@
}
/**
+ * Close pending images and clean up an {@link android.media.ImageWriter} object.
+ * @param writer an {@link android.media.ImageWriter} to close.
+ */
+ public static void closeImageWriter(ImageWriter writer) {
+ if (writer != null) {
+ writer.close();
+ }
+ }
+
+ /**
* Dummy listener that release the image immediately once it is available.
*
* <p>
@@ -179,11 +204,52 @@
implements ImageReader.OnImageAvailableListener {
private final LinkedBlockingQueue<Image> mQueue =
new LinkedBlockingQueue<Image>();
+ // Indicate whether this listener will drop images or not,
+ // when the queued images reaches the reader maxImages
+ private final boolean mAsyncMode;
+ // maxImages held by the queue in async mode.
+ private final int mMaxImages;
+
+ /**
+ * Create a synchronous SimpleImageReaderListener that queues the images
+ * automatically when they are available, no image will be dropped. If
+ * the caller doesn't call getImage(), the producer will eventually run
+ * into buffer starvation.
+ */
+ public SimpleImageReaderListener() {
+ mAsyncMode = false;
+ mMaxImages = 0;
+ }
+
+ /**
+ * Create a synchronous/asynchronous SimpleImageReaderListener that
+ * queues the images automatically when they are available. For
+ * asynchronous listener, image will be dropped if the queued images
+ * reach to maxImages queued. If the caller doesn't call getImage(), the
+ * producer will not be blocked. For synchronous listener, no image will
+ * be dropped. If the caller doesn't call getImage(), the producer will
+ * eventually run into buffer starvation.
+ *
+ * @param asyncMode If the listener is operating at asynchronous mode.
+ * @param maxImages The max number of images held by this listener.
+ */
+ /**
+ *
+ * @param asyncMode
+ */
+ public SimpleImageReaderListener(boolean asyncMode, int maxImages) {
+ mAsyncMode = asyncMode;
+ mMaxImages = maxImages;
+ }
@Override
public void onImageAvailable(ImageReader reader) {
try {
mQueue.put(reader.acquireNextImage());
+ if (mAsyncMode && mQueue.size() >= mMaxImages) {
+ Image img = mQueue.poll();
+ img.close();
+ }
} catch (InterruptedException e) {
throw new UnsupportedOperationException(
"Can't handle InterruptedException in onImageAvailable");
@@ -201,6 +267,47 @@
assertNotNull("Wait for an image timed out in " + timeout + "ms", image);
return image;
}
+
+ /**
+ * Drain the pending images held by this listener currently.
+ *
+ */
+ public void drain() {
+ for (int i = 0; i < mQueue.size(); i++) {
+ Image image = mQueue.poll();
+ assertNotNull("Unable to get an image", image);
+ image.close();
+ }
+ }
+ }
+
+ public static class SimpleImageWriterListener implements ImageWriter.OnImageReleasedListener {
+ private final Semaphore mImageReleasedSema = new Semaphore(0);
+ private final ImageWriter mWriter;
+ @Override
+ public void onImageReleased(ImageWriter writer) {
+ if (writer != mWriter) {
+ return;
+ }
+
+ if (VERBOSE) {
+ Log.v(TAG, "Input image is released");
+ }
+ mImageReleasedSema.release();
+ }
+
+ public SimpleImageWriterListener(ImageWriter writer) {
+ if (writer == null) {
+ throw new IllegalArgumentException("writer cannot be null");
+ }
+ mWriter = writer;
+ }
+
+ public void waitForImageReleased(long timeoutMs) throws InterruptedException {
+ if (!mImageReleasedSema.tryAcquire(timeoutMs, TimeUnit.MILLISECONDS)) {
+ fail("wait for image available timed out after " + timeoutMs + "ms");
+ }
+ }
}
public static class SimpleCaptureCallback extends CameraCaptureSession.CaptureCallback {
@@ -244,6 +351,27 @@
return getTotalCaptureResult(timeout);
}
+ public TotalCaptureResult getCaptureResult(long timeout, long timestamp) {
+ try {
+ long currentTs = -1L;
+ TotalCaptureResult result;
+ while (true) {
+ result = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
+ if (result == null) {
+ throw new RuntimeException(
+ "Wait for a capture result timed out in " + timeout + "ms");
+ }
+ currentTs = result.get(CaptureResult.SENSOR_TIMESTAMP);
+ if (currentTs == timestamp) {
+ return result;
+ }
+ }
+
+ } catch (InterruptedException e) {
+ throw new UnsupportedOperationException("Unhandled interrupted exception", e);
+ }
+ }
+
public TotalCaptureResult getTotalCaptureResult(long timeout) {
try {
TotalCaptureResult result = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
@@ -289,16 +417,63 @@
*/
public TotalCaptureResult getTotalCaptureResultForRequest(CaptureRequest myRequest,
int numResultsWait) {
+ ArrayList<CaptureRequest> captureRequests = new ArrayList<>(1);
+ captureRequests.add(myRequest);
+ return getTotalCaptureResultsForRequests(captureRequests, numResultsWait)[0];
+ }
+
+ /**
+ * Get an array of {@link #TotalCaptureResult total capture results} for a given list of
+ * {@link #CaptureRequest capture requests}. This can be used when the order of results
+ * may not the same as the order of requests.
+ *
+ * @param captureRequests The list of {@link #CaptureRequest capture requests} whose
+ * corresponding {@link #TotalCaptureResult capture results} are
+ * being waited for.
+ * @param numResultsWait Number of frames to wait for the capture results
+ * before timeout.
+ * @throws TimeoutRuntimeException If more than numResultsWait results are
+ * seen before all the results matching captureRequests arrives.
+ */
+ public TotalCaptureResult[] getTotalCaptureResultsForRequests(
+ List<CaptureRequest> captureRequests, int numResultsWait) {
if (numResultsWait < 0) {
throw new IllegalArgumentException("numResultsWait must be no less than 0");
}
+ if (captureRequests == null || captureRequests.size() == 0) {
+ throw new IllegalArgumentException("captureRequests must have at least 1 request.");
+ }
- TotalCaptureResult result;
+ // Create a request -> a list of result indices map that it will wait for.
+ HashMap<CaptureRequest, ArrayList<Integer>> remainingResultIndicesMap = new HashMap<>();
+ for (int i = 0; i < captureRequests.size(); i++) {
+ CaptureRequest request = captureRequests.get(i);
+ ArrayList<Integer> indices = remainingResultIndicesMap.get(request);
+ if (indices == null) {
+ indices = new ArrayList<>();
+ remainingResultIndicesMap.put(request, indices);
+ }
+ indices.add(i);
+ }
+
+ TotalCaptureResult[] results = new TotalCaptureResult[captureRequests.size()];
int i = 0;
do {
- result = getTotalCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
- if (result.getRequest().equals(myRequest)) {
- return result;
+ TotalCaptureResult result = getTotalCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
+ CaptureRequest request = result.getRequest();
+ ArrayList<Integer> indices = remainingResultIndicesMap.get(request);
+ if (indices != null) {
+ results[indices.get(0)] = result;
+ indices.remove(0);
+
+ // Remove the entry if all results for this request has been fulfilled.
+ if (indices.isEmpty()) {
+ remainingResultIndicesMap.remove(request);
+ }
+ }
+
+ if (remainingResultIndicesMap.isEmpty()) {
+ return results;
}
} while (i++ < numResultsWait);
@@ -310,6 +485,11 @@
{
return mQueue.isEmpty();
}
+
+ public void drain() {
+ mQueue.clear();
+ mNumFramesArrived.getAndSet(0);
+ }
}
/**
@@ -380,19 +560,26 @@
throws CameraAccessException {
BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
camera.createCaptureSession(outputSurfaces, sessionListener, handler);
+ CameraCaptureSession session =
+ sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
+ assertFalse("Camera session should not be a reprocessable session",
+ session.isReprocessable());
- return sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
+ return session;
}
- public static CameraCaptureSession configureReprocessibleCameraSession(CameraDevice camera,
+ public static CameraCaptureSession configureReprocessableCameraSession(CameraDevice camera,
InputConfiguration inputConfiguration, List<Surface> outputSurfaces,
CameraCaptureSession.StateCallback listener, Handler handler)
throws CameraAccessException {
BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
- camera.createReprocessibleCaptureSession(inputConfiguration, outputSurfaces,
+ camera.createReprocessableCaptureSession(inputConfiguration, outputSurfaces,
sessionListener, handler);
+ CameraCaptureSession session =
+ sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
+ assertTrue("Camera session should be a reprocessable session", session.isReprocessable());
- return sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
+ return session;
}
public static <T> void assertArrayNotEmpty(T arr, String message) {
@@ -1085,8 +1272,9 @@
if (src.getFormat() != dst.getFormat()) {
throw new IllegalArgumentException("Src and dst images should have the same format");
}
- if (src.isOpaque() || dst.isOpaque()) {
- throw new IllegalArgumentException("Opaque image is not copyable");
+ if (src.getFormat() == ImageFormat.PRIVATE ||
+ dst.getFormat() == ImageFormat.PRIVATE) {
+ throw new IllegalArgumentException("PRIVATE format images are not copyable");
}
// TODO: check the owner of the dst image, it must be from ImageWriter, other source may
@@ -1113,9 +1301,9 @@
* Checks whether the two images are strongly equal.
* </p>
* <p>
- * Two images are strongly equal if and only if the data, formats, sizes, and
- * timestamps are same. For opaque images ({@link Image#isOpaque()} returns
- * true), the image data is not not accessible thus the data comparison is
+ * Two images are strongly equal if and only if the data, formats, sizes,
+ * and timestamps are same. For {@link ImageFormat#PRIVATE PRIVATE} format
+ * images, the image data is not not accessible thus the data comparison is
* effectively skipped as the number of planes is zero.
* </p>
* <p>
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureRequestTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureRequestTest.java
index dcfde8a..68c8077 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureRequestTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureRequestTest.java
@@ -379,7 +379,7 @@
for (String id : mCameraIds) {
try {
openDevice(id);
- if (!mStaticInfo.isManualColorCorrectionSupported()) {
+ if (!mStaticInfo.isColorCorrectionSupported()) {
Log.i(TAG, "Camera " + id +
" doesn't support color correction controls, skipping test");
continue;
@@ -546,8 +546,9 @@
for (String id : mCameraIds) {
try {
openDevice(id);
-
- sceneModeTestByCamera();
+ if (mStaticInfo.isSceneModeSupported()) {
+ sceneModeTestByCamera();
+ }
} finally {
closeDevice();
}
@@ -746,35 +747,55 @@
result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
validateColorCorrectionResult(result,
previewRequestBuilder.get(CaptureRequest.COLOR_CORRECTION_MODE));
-
+ int colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
// TRANSFORM_MATRIX mode
// Only test unit gain and identity transform
- RggbChannelVector UNIT_GAIN = new RggbChannelVector(1.0f, 1.0f, 1.0f, 1.0f);
+ List<Integer> availableControlModes = Arrays.asList(
+ CameraTestUtils.toObject(mStaticInfo.getAvailableControlModesChecked()));
+ List<Integer> availableAwbModes = Arrays.asList(
+ CameraTestUtils.toObject(mStaticInfo.getAwbAvailableModesChecked()));
+ boolean isManualCCSupported =
+ availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF) ||
+ availableAwbModes.contains(CaptureRequest.CONTROL_AWB_MODE_OFF);
+ if (isManualCCSupported) {
+ if (!availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF)) {
+ // Only manual AWB mode is supported
+ manualRequestBuilder.set(CaptureRequest.CONTROL_MODE,
+ CaptureRequest.CONTROL_MODE_AUTO);
+ manualRequestBuilder.set(CaptureRequest.CONTROL_AWB_MODE,
+ CaptureRequest.CONTROL_AWB_MODE_OFF);
+ } else {
+ // All 3A manual controls are supported, it doesn't matter what we set for AWB mode.
+ manualRequestBuilder.set(CaptureRequest.CONTROL_MODE,
+ CaptureRequest.CONTROL_MODE_OFF);
+ }
- ColorSpaceTransform IDENTITY_TRANSFORM = new ColorSpaceTransform(
- new Rational[] {
- ONE_R, ZERO_R, ZERO_R,
- ZERO_R, ONE_R, ZERO_R,
- ZERO_R, ZERO_R, ONE_R
- });
+ RggbChannelVector UNIT_GAIN = new RggbChannelVector(1.0f, 1.0f, 1.0f, 1.0f);
- int colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
- manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_OFF);
- manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode);
- manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_GAINS, UNIT_GAIN);
- manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM, IDENTITY_TRANSFORM);
- request = manualRequestBuilder.build();
- mSession.capture(request, listener, mHandler);
- result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT);
- RggbChannelVector gains = result.get(CaptureResult.COLOR_CORRECTION_GAINS);
- ColorSpaceTransform transform = result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM);
- validateColorCorrectionResult(result, colorCorrectionMode);
- mCollector.expectEquals("control mode result/request mismatch",
- CaptureResult.CONTROL_MODE_OFF, result.get(CaptureResult.CONTROL_MODE));
- mCollector.expectEquals("Color correction gain result/request mismatch",
- UNIT_GAIN, gains);
- mCollector.expectEquals("Color correction gain result/request mismatch",
- IDENTITY_TRANSFORM, transform);
+ ColorSpaceTransform IDENTITY_TRANSFORM = new ColorSpaceTransform(
+ new Rational[] {
+ ONE_R, ZERO_R, ZERO_R,
+ ZERO_R, ONE_R, ZERO_R,
+ ZERO_R, ZERO_R, ONE_R
+ });
+
+ manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode);
+ manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_GAINS, UNIT_GAIN);
+ manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM, IDENTITY_TRANSFORM);
+ request = manualRequestBuilder.build();
+ mSession.capture(request, listener, mHandler);
+ result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT);
+ RggbChannelVector gains = result.get(CaptureResult.COLOR_CORRECTION_GAINS);
+ ColorSpaceTransform transform = result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM);
+ validateColorCorrectionResult(result, colorCorrectionMode);
+ mCollector.expectEquals("control mode result/request mismatch",
+ CaptureResult.CONTROL_MODE_OFF, result.get(CaptureResult.CONTROL_MODE));
+ mCollector.expectEquals("Color correction gain result/request mismatch",
+ UNIT_GAIN, gains);
+ mCollector.expectEquals("Color correction gain result/request mismatch",
+ IDENTITY_TRANSFORM, transform);
+
+ }
// FAST mode
colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_FAST;
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureResultTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureResultTest.java
index 61bf36c..2430dd0 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureResultTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureResultTest.java
@@ -440,17 +440,21 @@
// Keys only present when corresponding control is on are being
// verified in its own functional test
- // Only present when tone mapping mode is CONTRAST_CURVE
+ // Only present in certain tonemap mode. Test in CaptureRequestTest.
waiverKeys.add(CaptureResult.TONEMAP_CURVE);
+ waiverKeys.add(CaptureResult.TONEMAP_GAMMA);
+ waiverKeys.add(CaptureResult.TONEMAP_PRESET_CURVE);
// Only present when test pattern mode is SOLID_COLOR.
// TODO: verify this key in test pattern test later
waiverKeys.add(CaptureResult.SENSOR_TEST_PATTERN_DATA);
// Only present when STATISTICS_LENS_SHADING_MAP_MODE is ON
waiverKeys.add(CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP);
- // Only present when STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES is ON
+ // Only present when STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES is ON
waiverKeys.add(CaptureResult.STATISTICS_HOT_PIXEL_MAP);
- // Only present when face detection is on
+ // Only present when face detection is on
waiverKeys.add(CaptureResult.STATISTICS_FACES);
+ // Only present in reprocessing capture result.
+ waiverKeys.add(CaptureResult.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR);
//Keys not required if RAW is not supported
if (!mStaticInfo.isCapabilitySupported(
@@ -460,6 +464,13 @@
waiverKeys.add(CaptureResult.SENSOR_NOISE_PROFILE);
}
+ //Keys for depth output capability
+ if (!mStaticInfo.isCapabilitySupported(
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT)) {
+ waiverKeys.add(CaptureResult.LENS_POSE_ROTATION);
+ waiverKeys.add(CaptureResult.LENS_POSE_TRANSLATION);
+ }
+
if (mStaticInfo.getAeMaxRegionsChecked() == 0) {
waiverKeys.add(CaptureResult.CONTROL_AE_REGIONS);
}
@@ -478,7 +489,7 @@
* Hardware Level = LIMITED or LEGACY
*/
// Key not present if certain control is not supported
- if (!mStaticInfo.isManualColorCorrectionSupported()) {
+ if (!mStaticInfo.isColorCorrectionSupported()) {
waiverKeys.add(CaptureResult.COLOR_CORRECTION_GAINS);
waiverKeys.add(CaptureResult.COLOR_CORRECTION_MODE);
waiverKeys.add(CaptureResult.COLOR_CORRECTION_TRANSFORM);
@@ -687,6 +698,8 @@
resultKeys.add(CaptureResult.LENS_FOCAL_LENGTH);
resultKeys.add(CaptureResult.LENS_FOCUS_DISTANCE);
resultKeys.add(CaptureResult.LENS_OPTICAL_STABILIZATION_MODE);
+ resultKeys.add(CaptureResult.LENS_POSE_ROTATION);
+ resultKeys.add(CaptureResult.LENS_POSE_TRANSLATION);
resultKeys.add(CaptureResult.LENS_FOCUS_RANGE);
resultKeys.add(CaptureResult.LENS_STATE);
resultKeys.add(CaptureResult.NOISE_REDUCTION_MODE);
@@ -712,7 +725,10 @@
resultKeys.add(CaptureResult.STATISTICS_LENS_SHADING_MAP_MODE);
resultKeys.add(CaptureResult.TONEMAP_CURVE);
resultKeys.add(CaptureResult.TONEMAP_MODE);
+ resultKeys.add(CaptureResult.TONEMAP_GAMMA);
+ resultKeys.add(CaptureResult.TONEMAP_PRESET_CURVE);
resultKeys.add(CaptureResult.BLACK_LEVEL_LOCK);
+ resultKeys.add(CaptureResult.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR);
return resultKeys;
}
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/DngCreatorTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/DngCreatorTest.java
index 807aa29..a8f1b48 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/DngCreatorTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/DngCreatorTest.java
@@ -19,15 +19,16 @@
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
-import android.graphics.BitmapRegionDecoder;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.RectF;
+import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.DngCreator;
+import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.cts.helpers.StaticMetadata;
import android.hardware.camera2.cts.rs.BitmapUtils;
import android.hardware.camera2.cts.rs.RawConverter;
@@ -37,6 +38,7 @@
import android.media.ExifInterface;
import android.media.Image;
import android.media.ImageReader;
+import android.os.ConditionVariable;
import android.util.Log;
import android.util.Pair;
import android.util.Size;
@@ -51,7 +53,6 @@
import java.util.List;
import static android.hardware.camera2.cts.helpers.AssertHelpers.*;
-import static junit.framework.Assert.assertTrue;
/**
* Tests for the DngCreator API.
@@ -61,8 +62,9 @@
private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
private static final String DEBUG_DNG_FILE = "raw16.dng";
- private static final double IMAGE_DIFFERENCE_TOLERANCE = 60;
+ private static final double IMAGE_DIFFERENCE_TOLERANCE = 65;
private static final int DEFAULT_PATCH_DIMEN = 512;
+ private static final int AE_TIMEOUT_MS = 2000;
@Override
protected void setUp() throws Exception {
@@ -131,7 +133,7 @@
captureReader = createImageReader(activeArraySize, ImageFormat.RAW_SENSOR, 2,
captureListener);
Pair<Image, CaptureResult> resultPair = captureSingleRawShot(activeArraySize,
- captureReader, captureListener);
+ /*waitForAe*/false, captureReader, captureListener);
CameraCharacteristics characteristics = mStaticInfo.getCharacteristics();
// Test simple writeImage, no header checks
@@ -233,7 +235,7 @@
captureListeners.add(previewListener);
Pair<List<Image>, CaptureResult> resultPair = captureSingleRawShot(activeArraySize,
- captureReaders, captureListeners);
+ captureReaders, /*waitForAe*/false, captureListeners);
CameraCharacteristics characteristics = mStaticInfo.getCharacteristics();
// Test simple writeImage, no header checks
@@ -356,7 +358,7 @@
captureListeners.add(jpegListener);
Pair<List<Image>, CaptureResult> resultPair = captureSingleRawShot(activeArraySize,
- captureReaders, captureListeners);
+ captureReaders, /*waitForAe*/true, captureListeners);
CameraCharacteristics characteristics = mStaticInfo.getCharacteristics();
Image raw = resultPair.first.get(0);
Image jpeg = resultPair.first.get(1);
@@ -369,8 +371,9 @@
raw.getPlanes()[0].getBuffer().get(rawPlane);
raw.getPlanes()[0].getBuffer().rewind();
RawConverter.convertToSRGB(RenderScriptSingleton.getRS(), raw.getWidth(),
- raw.getHeight(), rawPlane, characteristics,
- resultPair.second, /*offsetX*/0, /*offsetY*/0, /*out*/rawBitmap);
+ raw.getHeight(), raw.getPlanes()[0].getRowStride(), rawPlane,
+ characteristics, resultPair.second, /*offsetX*/0, /*offsetY*/0,
+ /*out*/rawBitmap);
// Decompress JPEG image to a bitmap
byte[] compressedJpegData = CameraTestUtils.getDataFromImage(jpeg);
@@ -494,20 +497,23 @@
}
}
- private Pair<Image, CaptureResult> captureSingleRawShot(Size s, ImageReader captureReader,
+ private Pair<Image, CaptureResult> captureSingleRawShot(Size s, boolean waitForAe,
+ ImageReader captureReader,
CameraTestUtils.SimpleImageReaderListener captureListener) throws Exception {
List<ImageReader> readers = new ArrayList<ImageReader>();
readers.add(captureReader);
List<CameraTestUtils.SimpleImageReaderListener> listeners =
new ArrayList<CameraTestUtils.SimpleImageReaderListener>();
listeners.add(captureListener);
- Pair<List<Image>, CaptureResult> res = captureSingleRawShot(s, readers, listeners);
+ Pair<List<Image>, CaptureResult> res = captureSingleRawShot(s, readers, waitForAe,
+ listeners);
return new Pair<Image, CaptureResult>(res.first.get(0), res.second);
}
- private Pair<List<Image>, CaptureResult> captureSingleRawShot(Size s, List<ImageReader> captureReaders,
+ private Pair<List<Image>, CaptureResult> captureSingleRawShot(Size s,
+ List<ImageReader> captureReaders, boolean waitForAe,
List<CameraTestUtils.SimpleImageReaderListener> captureListeners) throws Exception {
- return captureRawShots(s, captureReaders, captureListeners, 1).get(0);
+ return captureRawShots(s, captureReaders, waitForAe, captureListeners, 1).get(0);
}
/**
@@ -520,8 +526,10 @@
* @return a list of pairs containing a {@link Image} and {@link CaptureResult} used for
* each capture.
*/
- private List<Pair<List<Image>, CaptureResult>> captureRawShots(Size s, List<ImageReader> captureReaders,
- List<CameraTestUtils.SimpleImageReaderListener> captureListeners, int numShots) throws Exception {
+ private List<Pair<List<Image>, CaptureResult>> captureRawShots(Size s,
+ List<ImageReader> captureReaders, boolean waitForAe,
+ List<CameraTestUtils.SimpleImageReaderListener> captureListeners,
+ int numShots) throws Exception {
if (VERBOSE) {
Log.v(TAG, "captureSingleRawShot - Capturing raw image.");
}
@@ -540,16 +548,74 @@
}
assertTrue("Capture size is supported.", validSize);
-
// Capture images.
- List<Surface> outputSurfaces = new ArrayList<Surface>();
+ final List<Surface> outputSurfaces = new ArrayList<Surface>();
for (ImageReader captureReader : captureReaders) {
Surface captureSurface = captureReader.getSurface();
outputSurfaces.add(captureSurface);
}
- CaptureRequest.Builder request = prepareCaptureRequestForSurfaces(outputSurfaces,
- CameraDevice.TEMPLATE_STILL_CAPTURE);
+ // Set up still capture template targeting JPEG/RAW outputs
+ CaptureRequest.Builder request =
+ mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
+ assertNotNull("Fail to get captureRequest", request);
+ for (Surface surface : outputSurfaces) {
+ request.addTarget(surface);
+ }
+
+ ImageReader previewReader = null;
+ if (waitForAe) {
+ // Also setup a small YUV output for AE metering if needed
+ Size yuvSize = (mOrderedPreviewSizes.size() == 0) ? null :
+ mOrderedPreviewSizes.get(mOrderedPreviewSizes.size() - 1);
+ assertNotNull("Must support at least one small YUV size.", yuvSize);
+ previewReader = createImageReader(yuvSize, ImageFormat.YUV_420_888,
+ /*maxNumImages*/2, new CameraTestUtils.ImageDropperListener());
+ outputSurfaces.add(previewReader.getSurface());
+ }
+
+ createSession(outputSurfaces);
+
+ if (waitForAe) {
+ CaptureRequest.Builder precaptureRequest =
+ mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+ assertNotNull("Fail to get captureRequest", precaptureRequest);
+ precaptureRequest.addTarget(previewReader.getSurface());
+ precaptureRequest.set(CaptureRequest.CONTROL_MODE,
+ CaptureRequest.CONTROL_MODE_AUTO);
+ precaptureRequest.set(CaptureRequest.CONTROL_AE_MODE,
+ CaptureRequest.CONTROL_AE_MODE_ON);
+
+ final ConditionVariable waitForAeCondition = new ConditionVariable(/*isOpen*/false);
+ CameraCaptureSession.CaptureCallback captureCallback =
+ new CameraCaptureSession.CaptureCallback() {
+ @Override
+ public void onCaptureProgressed(CameraCaptureSession session,
+ CaptureRequest request, CaptureResult partialResult) {
+ if (partialResult.get(CaptureResult.CONTROL_AE_STATE) ==
+ CaptureRequest.CONTROL_AE_STATE_CONVERGED) {
+ waitForAeCondition.open();
+ }
+ }
+
+ @Override
+ public void onCaptureCompleted(CameraCaptureSession session,
+ CaptureRequest request, TotalCaptureResult result) {
+ if (result.get(CaptureResult.CONTROL_AE_STATE) ==
+ CaptureRequest.CONTROL_AE_STATE_CONVERGED) {
+ waitForAeCondition.open();
+ }
+ }
+ };
+ startCapture(precaptureRequest.build(), /*repeating*/true, captureCallback, mHandler);
+
+ precaptureRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
+ CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
+ startCapture(precaptureRequest.build(), /*repeating*/false, captureCallback, mHandler);
+ assertTrue("Timeout out waiting for AE to converge",
+ waitForAeCondition.block(AE_TIMEOUT_MS));
+ }
+
request.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE,
CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE_ON);
CameraTestUtils.SimpleCaptureCallback resultListener =
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/ExtendedCameraCharacteristicsTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/ExtendedCameraCharacteristicsTest.java
index b1f763d..f504b44 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/ExtendedCameraCharacteristicsTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/ExtendedCameraCharacteristicsTest.java
@@ -87,7 +87,7 @@
private static final int YUV_REPROCESS =
CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING;
private static final int OPAQUE_REPROCESS =
- CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_OPAQUE_REPROCESSING;
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING;
@Override
public void setContext(Context context) {
@@ -198,6 +198,7 @@
{
// (Key Name) (HW Level) (Capabilities <Var-Arg>)
expectKeyAvailable(c, CameraCharacteristics.COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES , LEGACY , BC );
+ expectKeyAvailable(c, CameraCharacteristics.CONTROL_AVAILABLE_MODES , LEGACY , BC );
expectKeyAvailable(c, CameraCharacteristics.CONTROL_AE_AVAILABLE_ANTIBANDING_MODES , LEGACY , BC );
expectKeyAvailable(c, CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES , LEGACY , BC );
expectKeyAvailable(c, CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES , LEGACY , BC );
@@ -490,7 +491,7 @@
boolean supportYUV = arrayContains(capabilities,
CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
boolean supportOpaque = arrayContains(capabilities,
- CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_OPAQUE_REPROCESSING);
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
StreamConfigurationMap configs =
c.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Integer maxNumInputStreams =
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/ImageWriterTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/ImageWriterTest.java
index a4d611d..b081660 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/ImageWriterTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/ImageWriterTest.java
@@ -28,7 +28,6 @@
import android.media.Image.Plane;
import android.media.ImageReader;
import android.media.ImageWriter;
-import android.os.ConditionVariable;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
@@ -51,7 +50,7 @@
private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
// Max number of images can be accessed simultaneously from ImageReader.
private static final int MAX_NUM_IMAGES = 3;
- private static final int CAMERA_OPAQUE_FORMAT = ImageFormat.PRIVATE;
+ private static final int CAMERA_PRIVATE_FORMAT = ImageFormat.PRIVATE;
private ImageReader mReaderForWriter;
private ImageWriter mWriter;
@@ -125,34 +124,13 @@
try {
Log.i(TAG, "Testing Camera " + id);
openDevice(id);
- readerWriterFormatTestByCamera(CAMERA_OPAQUE_FORMAT);
+ readerWriterFormatTestByCamera(CAMERA_PRIVATE_FORMAT);
} finally {
closeDevice(id);
}
}
}
- private final class SimpleImageWriterListener implements ImageWriter.ImageListener {
- private final ConditionVariable imageReleased = new ConditionVariable();
- @Override
- public void onInputImageReleased(ImageWriter writer) {
- if (writer != mWriter) {
- return;
- }
-
- if (VERBOSE) Log.v(TAG, "Input image is released");
- imageReleased.open();
- }
-
- public void waitForImageReleassed(long timeoutMs) {
- if (imageReleased.block(timeoutMs)) {
- imageReleased.close();
- } else {
- fail("wait for image available timed out after " + timeoutMs + "ms");
- }
- }
- }
-
private void readerWriterFormatTestByCamera(int format) throws Exception {
List<Size> sizes = getSortedSizesForFormat(mCamera.getId(), mCameraManager, format, null);
Size maxSize = sizes.get(0);
@@ -178,8 +156,8 @@
Surface surface = mReaderForWriter.getSurface();
assertNotNull("Surface from ImageReader shouldn't be null", surface);
mWriter = ImageWriter.newInstance(surface, MAX_NUM_IMAGES);
- SimpleImageWriterListener writerImageListener = new SimpleImageWriterListener();
- mWriter.setImageListener(writerImageListener, mHandler);
+ SimpleImageWriterListener writerImageListener = new SimpleImageWriterListener(mWriter);
+ mWriter.setOnImageReleasedListener(writerImageListener, mHandler);
// Start capture: capture 2 images.
List<Surface> outputSurfaces = new ArrayList<Surface>();
@@ -201,17 +179,19 @@
Image inputImage = null;
// Image from the second ImageReader.
Image outputImage = null;
- if (format == CAMERA_OPAQUE_FORMAT) {
- assertTrue("First ImageReader should be opaque",
- mReader.isOpaque());
- assertTrue("Second ImageReader should be opaque",
- mReaderForWriter.isOpaque());
- assertTrue("Format of first ImageReader should be opaque",
- mReader.getImageFormat() == CAMERA_OPAQUE_FORMAT);
- assertTrue(" Format of second ImageReader should be opaque",
- mReaderForWriter.getImageFormat() == CAMERA_OPAQUE_FORMAT);
- assertTrue(" Format of ImageWriter should be opaque",
- mWriter.getFormat() == CAMERA_OPAQUE_FORMAT);
+ assertTrue("ImageWriter max images should be " + MAX_NUM_IMAGES,
+ mWriter.getMaxImages() == MAX_NUM_IMAGES);
+ if (format == CAMERA_PRIVATE_FORMAT) {
+ assertTrue("First ImageReader format should be PRIVATE",
+ mReader.getImageFormat() == CAMERA_PRIVATE_FORMAT);
+ assertTrue("Second ImageReader should be PRIVATE",
+ mReaderForWriter.getImageFormat() == CAMERA_PRIVATE_FORMAT);
+ assertTrue("Format of first ImageReader should be PRIVATE",
+ mReader.getImageFormat() == CAMERA_PRIVATE_FORMAT);
+ assertTrue(" Format of second ImageReader should be PRIVATE",
+ mReaderForWriter.getImageFormat() == CAMERA_PRIVATE_FORMAT);
+ assertTrue(" Format of ImageWriter should be PRIVATE",
+ mWriter.getFormat() == CAMERA_PRIVATE_FORMAT);
// Validate 2 images
validateOpaqueImages(maxSize, listenerForCamera, listenerForWriter, captureListener,
@@ -255,7 +235,7 @@
outputImage.close();
// Make sure ImageWriter listener callback is fired.
- writerImageListener.waitForImageReleassed(CAPTURE_IMAGE_TIMEOUT_MS);
+ writerImageListener.waitForImageReleased(CAPTURE_IMAGE_TIMEOUT_MS);
// Test case 2: Directly inject the image into ImageWriter: works for all formats.
@@ -289,7 +269,7 @@
outputImage.close();
// Make sure ImageWriter listener callback is fired.
- writerImageListener.waitForImageReleassed(CAPTURE_IMAGE_TIMEOUT_MS);
+ writerImageListener.waitForImageReleased(CAPTURE_IMAGE_TIMEOUT_MS);
}
stopCapture(/*fast*/false);
@@ -319,24 +299,16 @@
validateOpaqueImage(outputImage, "First Opaque image output by ImageWriter: ",
maxSize, result);
outputImage.close();
- writerListener.waitForImageReleassed(CAPTURE_IMAGE_TIMEOUT_MS);
+ writerListener.waitForImageReleased(CAPTURE_IMAGE_TIMEOUT_MS);
}
}
private void validateOpaqueImage(Image image, String msg, Size imageSize,
CaptureResult result) {
assertNotNull("Opaque image Capture result should not be null", result != null);
- mCollector.expectTrue(msg + "Opaque image format should be: " + CAMERA_OPAQUE_FORMAT,
- image.getFormat() == CAMERA_OPAQUE_FORMAT);
- mCollector.expectTrue(msg + "Opaque image format should be: " + CAMERA_OPAQUE_FORMAT,
- image.getFormat() == CAMERA_OPAQUE_FORMAT);
+ mCollector.expectImageProperties(msg + "Opaque ", image, CAMERA_PRIVATE_FORMAT,
+ imageSize, result.get(CaptureResult.SENSOR_TIMESTAMP));
mCollector.expectTrue(msg + "Opaque image number planes should be zero",
image.getPlanes().length == 0);
- mCollector.expectTrue(msg + "Opaque image size should be " + imageSize,
- image.getWidth() == imageSize.getWidth() &&
- image.getHeight() == imageSize.getHeight());
- long timestampNs = result.get(CaptureResult.SENSOR_TIMESTAMP);
- mCollector.expectTrue(msg + "Opaque image timestamp should be " + timestampNs,
- image.getTimestamp() == timestampNs);
}
}
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/PerformanceTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/PerformanceTest.java
index d4a0e73..3f54a39 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/PerformanceTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/PerformanceTest.java
@@ -19,16 +19,19 @@
import static com.android.ex.camera2.blocking.BlockingSessionCallback.*;
import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
import android.hardware.camera2.cts.CameraTestUtils.SimpleImageReaderListener;
import android.hardware.camera2.cts.helpers.StaticMetadata;
import android.hardware.camera2.cts.helpers.StaticMetadata.CheckLevel;
import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase;
+import android.hardware.camera2.params.InputConfiguration;
import android.util.Log;
import android.util.Pair;
import android.util.Size;
@@ -36,10 +39,10 @@
import android.cts.util.DeviceReportLog;
import android.media.Image;
import android.media.ImageReader;
+import android.media.ImageWriter;
import android.os.ConditionVariable;
import android.os.SystemClock;
-import com.android.cts.util.ReportLog;
import com.android.cts.util.ResultType;
import com.android.cts.util.ResultUnit;
import com.android.cts.util.Stat;
@@ -47,6 +50,7 @@
import com.android.ex.camera2.exceptions.TimeoutRuntimeException;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
@@ -61,9 +65,28 @@
private static final int NUM_TEST_LOOPS = 5;
private static final int NUM_MAX_IMAGES = 4;
private static final int NUM_RESULTS_WAIT = 30;
+ private static final int[] REPROCESS_FORMATS = {ImageFormat.YUV_420_888, ImageFormat.PRIVATE};
+ private final int MAX_REPROCESS_IMAGES = 10;
+ private final int MAX_JPEG_IMAGES = MAX_REPROCESS_IMAGES;
+ private final int MAX_INPUT_IMAGES = MAX_REPROCESS_IMAGES;
+ // ZSL queue depth should be bigger than the max simultaneous reprocessing capture request
+ // count to maintain reasonable number of candidate image for the worse-case.
+ // Here we want to make sure we at most dequeue half of the queue max images for the worst-case.
+ private final int MAX_ZSL_IMAGES = MAX_REPROCESS_IMAGES * 2;
+ private final double REPROCESS_STALL_MARGIN = 0.1;
private DeviceReportLog mReportLog;
+ // Used for reading camera output buffers.
+ private ImageReader mCameraZslReader;
+ private SimpleImageReaderListener mCameraZslImageListener;
+ // Used for reprocessing (jpeg) output.
+ private ImageReader mJpegReader;
+ private SimpleImageReaderListener mJpegListener;
+ // Used for reprocessing input.
+ private ImageWriter mWriter;
+ private SimpleCaptureCallback mZslResultListener;
+
@Override
protected void setUp() throws Exception {
mReportLog = new DeviceReportLog();
@@ -121,8 +144,8 @@
configureStreamTimes[i] = configureTimeMs - openTimeMs;
// Blocking start preview (start preview to first image arrives)
- CameraTestUtils.SimpleCaptureCallback resultListener =
- new CameraTestUtils.SimpleCaptureCallback();
+ SimpleCaptureCallback resultListener =
+ new SimpleCaptureCallback();
blockingStartPreview(resultListener, imageListener);
previewStartedTimeMs = SystemClock.elapsedRealtime();
startPreviewTimes[i] = previewStartedTimeMs - configureTimeMs;
@@ -205,8 +228,8 @@
mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
CaptureRequest.Builder captureBuilder =
mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
- CameraTestUtils.SimpleCaptureCallback previewResultListener =
- new CameraTestUtils.SimpleCaptureCallback();
+ SimpleCaptureCallback previewResultListener =
+ new SimpleCaptureCallback();
SimpleTimingResultListener captureResultListener =
new SimpleTimingResultListener();
SimpleImageListener imageListener = new SimpleImageListener();
@@ -252,7 +275,7 @@
// simulate real scenario (preview runs a bit)
waitForNumResults(previewResultListener, NUM_RESULTS_WAIT);
- stopPreview();
+ blockingStopPreview();
}
mReportLog.printArray("Camera " + id
@@ -267,6 +290,11 @@
mReportLog.printArray("Camera " + id
+ ": Camera capture result latency", getResultTimes,
ResultType.LOWER_BETTER, ResultUnit.MS);
+
+ // Result will not be reported in CTS report if no summary is printed.
+ mReportLog.printSummary("Camera capture result average latency for Camera " + id,
+ Stat.getAverage(getResultTimes),
+ ResultType.LOWER_BETTER, ResultUnit.MS);
}
finally {
closeImageReader();
@@ -275,6 +303,338 @@
}
}
+ /**
+ * Test reprocessing shot-to-shot latency, i.e., from the time a reprocess
+ * request is issued to the time the reprocess image is returned.
+ *
+ */
+ public void testReprocessingLatency() throws Exception {
+ for (String id : mCameraIds) {
+ for (int format : REPROCESS_FORMATS) {
+ if (!isReprocessSupported(id, format)) {
+ continue;
+ }
+
+ try {
+ openDevice(id);
+
+ reprocessingPerformanceTestByCamera(format, /*asyncMode*/false);
+ } finally {
+ closeReaderWriters();
+ closeDevice();
+ }
+ }
+ }
+ }
+
+ /**
+ * Test reprocessing throughput, i.e., how many frames can be reprocessed
+ * during a given amount of time.
+ *
+ */
+ public void testReprocessingThroughput() throws Exception {
+ for (String id : mCameraIds) {
+ for (int format : REPROCESS_FORMATS) {
+ if (!isReprocessSupported(id, format)) {
+ continue;
+ }
+
+ try {
+ openDevice(id);
+
+ reprocessingPerformanceTestByCamera(format, /*asyncMode*/true);
+ } finally {
+ closeReaderWriters();
+ closeDevice();
+ }
+ }
+ }
+ }
+
+ /**
+ * Testing reprocessing caused preview stall (frame drops)
+ */
+ public void testReprocessingCaptureStall() throws Exception {
+ for (String id : mCameraIds) {
+ for (int format : REPROCESS_FORMATS) {
+ if (!isReprocessSupported(id, format)) {
+ continue;
+ }
+
+ try {
+ openDevice(id);
+
+ reprocessingCaptureStallTestByCamera(format);
+ } finally {
+ closeReaderWriters();
+ closeDevice();
+ }
+ }
+ }
+ }
+
+ private void reprocessingCaptureStallTestByCamera(int reprocessInputFormat) throws Exception {
+ prepareReprocessCapture(reprocessInputFormat);
+
+ // Let it stream for a while before reprocessing
+ startZslStreaming();
+ waitForFrames(NUM_RESULTS_WAIT);
+
+ final int NUM_REPROCESS_TESTED = MAX_REPROCESS_IMAGES / 2;
+ // Prepare several reprocessing request
+ Image[] inputImages = new Image[NUM_REPROCESS_TESTED];
+ CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES];
+ for (int i = 0; i < NUM_REPROCESS_TESTED; i++) {
+ inputImages[i] =
+ mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
+ TotalCaptureResult zslResult =
+ mZslResultListener.getCaptureResult(
+ WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp());
+ reprocessReqs[i] = mCamera.createReprocessCaptureRequest(zslResult);
+ reprocessReqs[i].addTarget(mJpegReader.getSurface());
+ mWriter.queueInputImage(inputImages[i]);
+ }
+
+ double[] maxCaptureGapsMs = new double[NUM_REPROCESS_TESTED];
+ double[] averageFrameDurationMs = new double[NUM_REPROCESS_TESTED];
+ Arrays.fill(averageFrameDurationMs, 0.0);
+ final int MAX_REPROCESS_RETURN_FRAME_COUNT = 20;
+ SimpleCaptureCallback reprocessResultListener = new SimpleCaptureCallback();
+ for (int i = 0; i < NUM_REPROCESS_TESTED; i++) {
+ mZslResultListener.drain();
+ CaptureRequest reprocessRequest = reprocessReqs[i].build();
+ mSession.capture(reprocessRequest, reprocessResultListener, mHandler);
+ // Wait for reprocess output jpeg and result come back.
+ reprocessResultListener.getCaptureResultForRequest(reprocessRequest,
+ CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
+ mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
+ long numFramesMaybeStalled = mZslResultListener.getTotalNumFrames();
+ assertTrue("Reprocess capture result should be returned in "
+ + MAX_REPROCESS_RETURN_FRAME_COUNT + " frames",
+ numFramesMaybeStalled <= MAX_REPROCESS_RETURN_FRAME_COUNT);
+
+ // Need look longer time, as the stutter could happen after the reprocessing
+ // output frame is received.
+ long[] timestampGap = new long[MAX_REPROCESS_RETURN_FRAME_COUNT + 1];
+ Arrays.fill(timestampGap, 0);
+ CaptureResult[] results = new CaptureResult[timestampGap.length];
+ long[] frameDurationsNs = new long[timestampGap.length];
+ for (int j = 0; j < results.length; j++) {
+ results[j] = mZslResultListener.getCaptureResult(
+ CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
+ if (j > 0) {
+ timestampGap[j] = results[j].get(CaptureResult.SENSOR_TIMESTAMP) -
+ results[j - 1].get(CaptureResult.SENSOR_TIMESTAMP);
+ assertTrue("Time stamp should be monotonically increasing",
+ timestampGap[j] > 0);
+ }
+ frameDurationsNs[j] = results[j].get(CaptureResult.SENSOR_FRAME_DURATION);
+ }
+
+ if (VERBOSE) {
+ Log.i(TAG, "timestampGap: " + Arrays.toString(timestampGap));
+ Log.i(TAG, "frameDurationsNs: " + Arrays.toString(frameDurationsNs));
+ }
+
+ // Get the number of candidate results, calculate the average frame duration
+ // and max timestamp gap.
+ Arrays.sort(timestampGap);
+ double maxTimestampGapMs = timestampGap[timestampGap.length - 1] / 1000000.0;
+ for (int m = 0; m < frameDurationsNs.length; m++) {
+ averageFrameDurationMs[i] += (frameDurationsNs[m] / 1000000.0);
+ }
+ averageFrameDurationMs[i] /= frameDurationsNs.length;
+
+ maxCaptureGapsMs[i] = maxTimestampGapMs;
+ }
+
+ String reprocessType = " YUV reprocessing ";
+ if (reprocessInputFormat == ImageFormat.PRIVATE) {
+ reprocessType = " opaque reprocessing ";
+ }
+
+ mReportLog.printArray("Camera " + mCamera.getId()
+ + ":" + reprocessType + " max capture timestamp gaps", maxCaptureGapsMs,
+ ResultType.LOWER_BETTER, ResultUnit.MS);
+ mReportLog.printArray("Camera " + mCamera.getId()
+ + ":" + reprocessType + "capture average frame duration", averageFrameDurationMs,
+ ResultType.LOWER_BETTER, ResultUnit.MS);
+ mReportLog.printSummary("Camera reprocessing average max capture timestamp gaps for Camera "
+ + mCamera.getId(), Stat.getAverage(maxCaptureGapsMs), ResultType.LOWER_BETTER,
+ ResultUnit.MS);
+
+ // The max timestamp gap should be less than (captureStall + 1) x average frame
+ // duration * (1 + error margin).
+ int maxCaptureStallFrames = mStaticInfo.getMaxCaptureStallOrDefault();
+ for (int i = 0; i < maxCaptureGapsMs.length; i++) {
+ double stallDurationBound = averageFrameDurationMs[i] *
+ (maxCaptureStallFrames + 1) * (1 + REPROCESS_STALL_MARGIN);
+ assertTrue("max capture stall duration should be no larger than ",
+ maxCaptureGapsMs[i] <= stallDurationBound);
+ }
+ }
+
+ private void reprocessingPerformanceTestByCamera(int reprocessInputFormat, boolean asyncMode)
+ throws Exception {
+ // Prepare the reprocessing capture
+ prepareReprocessCapture(reprocessInputFormat);
+
+ // Start ZSL streaming
+ startZslStreaming();
+ waitForFrames(NUM_RESULTS_WAIT);
+
+ CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES];
+ Image[] inputImages = new Image[MAX_REPROCESS_IMAGES];
+ double[] getImageLatenciesMs = new double[MAX_REPROCESS_IMAGES];
+ long startTimeMs;
+ for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
+ inputImages[i] =
+ mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
+ TotalCaptureResult zslResult =
+ mZslResultListener.getCaptureResult(
+ WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp());
+ reprocessReqs[i] = mCamera.createReprocessCaptureRequest(zslResult);
+ reprocessReqs[i].addTarget(mJpegReader.getSurface());
+ }
+
+ if (asyncMode) {
+ // async capture: issue all the reprocess requests as quick as possible, then
+ // check the throughput of the output jpegs.
+ for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
+ // Could be slow for YUV reprocessing, do it in advance.
+ mWriter.queueInputImage(inputImages[i]);
+ }
+
+ // Submit the requests
+ for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
+ mSession.capture(reprocessReqs[i].build(), null, null);
+ }
+
+ // Get images
+ startTimeMs = SystemClock.elapsedRealtime();
+ for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
+ mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
+ getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs;
+ startTimeMs = SystemClock.elapsedRealtime();
+ }
+ } else {
+ // sync capture: issue reprocess request one by one, only submit next one when
+ // the previous capture image is returned. This is to test the back to back capture
+ // performance.
+ for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
+ startTimeMs = SystemClock.elapsedRealtime();
+ mWriter.queueInputImage(inputImages[i]);
+ mSession.capture(reprocessReqs[i].build(), null, null);
+ mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
+ getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs;
+ }
+ }
+
+ String reprocessType = " YUV reprocessing ";
+ if (reprocessInputFormat == ImageFormat.PRIVATE) {
+ reprocessType = " opaque reprocessing ";
+ }
+
+ // Report the performance data
+ if (asyncMode) {
+ mReportLog.printArray("Camera " + mCamera.getId()
+ + ":" + reprocessType + "capture latency", getImageLatenciesMs,
+ ResultType.LOWER_BETTER, ResultUnit.MS);
+ mReportLog.printSummary("Camera reprocessing average latency for Camera " +
+ mCamera.getId(), Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER,
+ ResultUnit.MS);
+ } else {
+ mReportLog.printArray("Camera " + mCamera.getId()
+ + ":" + reprocessType + "shot to shot latency", getImageLatenciesMs,
+ ResultType.LOWER_BETTER, ResultUnit.MS);
+ mReportLog.printSummary("Camera reprocessing shot to shot average latency for Camera " +
+ mCamera.getId(), Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER,
+ ResultUnit.MS);
+ }
+ }
+
+ /**
+ * Start preview and ZSL streaming
+ */
+ private void startZslStreaming() throws Exception {
+ CaptureRequest.Builder zslBuilder =
+ mCamera.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
+ zslBuilder.addTarget(mPreviewSurface);
+ zslBuilder.addTarget(mCameraZslReader.getSurface());
+ mSession.setRepeatingRequest(zslBuilder.build(), mZslResultListener, mHandler);
+ }
+
+ /**
+ * Wait for a certain number of frames, the images and results will be drained from the
+ * listeners to make sure that next reprocessing can get matched results and images.
+ *
+ * @param numFrameWait The number of frames to wait before return, 0 means that
+ * this call returns immediately after streaming on.
+ */
+ private void waitForFrames(int numFrameWait) {
+ if (numFrameWait < 0) {
+ throw new IllegalArgumentException("numFrameWait " + numFrameWait +
+ " should be non-negative");
+ }
+
+ if (numFrameWait == 0) {
+ // Let is stream out for a while
+ waitForNumResults(mZslResultListener, numFrameWait);
+ // Drain the pending images, to ensure that all future images have an associated
+ // capture result available.
+ mCameraZslImageListener.drain();
+ }
+ }
+
+ private void closeReaderWriters() {
+ CameraTestUtils.closeImageReader(mCameraZslReader);
+ mCameraZslReader = null;
+ CameraTestUtils.closeImageReader(mJpegReader);
+ mJpegReader = null;
+ CameraTestUtils.closeImageWriter(mWriter);
+ mWriter = null;
+ }
+
+ private void prepareReprocessCapture(int inputFormat)
+ throws CameraAccessException {
+ // 1. Find the right preview and capture sizes.
+ Size maxPreviewSize = mOrderedPreviewSizes.get(0);
+ Size[] supportedInputSizes =
+ mStaticInfo.getAvailableSizesForFormatChecked(inputFormat,
+ StaticMetadata.StreamDirection.Input);
+ Size maxInputSize = CameraTestUtils.getMaxSize(supportedInputSizes);
+ Size maxJpegSize = mOrderedStillSizes.get(0);
+ updatePreviewSurface(maxPreviewSize);
+ mZslResultListener = new SimpleCaptureCallback();
+
+ // 2. Create camera output ImageReaders.
+ // YUV/Opaque output, camera should support output with input size/format
+ mCameraZslImageListener = new SimpleImageReaderListener(
+ /*asyncMode*/true, MAX_ZSL_IMAGES / 2);
+ mCameraZslReader = CameraTestUtils.makeImageReader(
+ maxInputSize, inputFormat, MAX_ZSL_IMAGES, mCameraZslImageListener, mHandler);
+ // Jpeg reprocess output
+ mJpegListener = new SimpleImageReaderListener();
+ mJpegReader = CameraTestUtils.makeImageReader(
+ maxJpegSize, ImageFormat.JPEG, MAX_JPEG_IMAGES, mJpegListener, mHandler);
+
+ // create camera reprocess session
+ List<Surface> outSurfaces = new ArrayList<Surface>();
+ outSurfaces.add(mPreviewSurface);
+ outSurfaces.add(mCameraZslReader.getSurface());
+ outSurfaces.add(mJpegReader.getSurface());
+ InputConfiguration inputConfig = new InputConfiguration(maxInputSize.getWidth(),
+ maxInputSize.getHeight(), inputFormat);
+ mSessionListener = new BlockingSessionCallback();
+ mSession = CameraTestUtils.configureReprocessableCameraSession(
+ mCamera, inputConfig, outSurfaces, mSessionListener, mHandler);
+
+ // 3. Create ImageWriter for input
+ mWriter = CameraTestUtils.makeImageWriter(
+ mSession.getInputSurface(), MAX_INPUT_IMAGES, /*listener*/null, /*handler*/null);
+
+ }
+
private void blockingStopPreview() throws Exception {
stopPreview();
mSessionListener.getStateWaiter().waitForState(SESSION_CLOSED,
@@ -295,19 +655,6 @@
imageListener.waitForImageAvailable(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
}
- private void blockingCaptureImage(CaptureCallback listener,
- SimpleImageListener imageListener) throws Exception {
- if (mReaderSurface == null) {
- throw new IllegalStateException("reader surface must be initialized first");
- }
-
- CaptureRequest.Builder captureBuilder =
- mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
- captureBuilder.addTarget(mReaderSurface);
- mSession.capture(captureBuilder.build(), listener, mHandler);
- imageListener.waitForImageAvailable(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
- }
-
/**
* Configure reader and preview outputs and wait until done.
*/
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/RecordingTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/RecordingTest.java
index 005d948..4792a42 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/RecordingTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/RecordingTest.java
@@ -368,7 +368,7 @@
Range<Integer> maxRange = availableFpsRanges[0];
boolean foundRange = false;
for (Range<Integer> range : availableFpsRanges) {
- if (range.getLower() == range.getUpper() && range.getLower() >= maxRange.getLower()) {
+ if (range.getLower().equals(range.getUpper()) && range.getLower() >= maxRange.getLower()) {
foundRange = true;
maxRange = range;
}
@@ -631,6 +631,13 @@
Size maxPreviewSize = mOrderedPreviewSizes.get(0);
+ if (mStaticInfo.isHardwareLevelLegacy() &&
+ (videoSz.getWidth() > maxPreviewSize.getWidth() ||
+ videoSz.getHeight() > maxPreviewSize.getHeight())) {
+ // Skip. Legacy mode can only do recording up to max preview size
+ continue;
+ }
+
// For LEGACY, find closest supported smaller or equal JPEG size to the current video
// size; if no size is smaller than the video, pick the smallest JPEG size. The assert
// for video size above guarantees that for LIMITED or FULL, we select videoSz here.
@@ -1018,7 +1025,7 @@
));
}
- durationMs = (int) (nextTS - currentTS) / 1000000;
+ durationMs = (nextTS - currentTS) / 1000000.0;
mCollector.expectTrue(
String.format(
"Video %dx%d Frame drop detected after video snapshot: " +
@@ -1032,7 +1039,7 @@
if (durationMs >= expectedDurationMs * 2) {
Log.w(TAG, String.format(
"Video %dx%d Frame drop detected after video snapshot: " +
- "duration %dms (expected %dms)",
+ "duration %fms (expected %fms)",
mVideoSize.getWidth(), mVideoSize.getHeight(),
durationMs, expectedDurationMs
));
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/ReprocessCaptureTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/ReprocessCaptureTest.java
index 30c3526..945bb4c 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/ReprocessCaptureTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/ReprocessCaptureTest.java
@@ -25,6 +25,7 @@
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.cts.helpers.StaticMetadata;
import android.hardware.camera2.cts.helpers.StaticMetadata.CheckLevel;
@@ -48,8 +49,6 @@
private static final String TAG = "ReprocessCaptureTest";
private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
- private static final int MAX_NUM_IMAGE_READER_IMAGES = 3;
- private static final int MAX_NUM_IMAGE_WRITER_IMAGES = 3;
private static final int CAPTURE_TIMEOUT_FRAMES = 100;
private static final int CAPTURE_TIMEOUT_MS = 3000;
private static final int WAIT_FOR_SURFACE_CHANGE_TIMEOUT_MS = 1000;
@@ -57,16 +56,27 @@
private static final int PREVIEW_TEMPLATE = CameraDevice.TEMPLATE_PREVIEW;
private static final int NUM_REPROCESS_TEST_LOOP = 3;
private static final int NUM_REPROCESS_CAPTURES = 3;
+ private static final int NUM_REPROCESS_BURST = 3;
private int mDumpFrameCount = 0;
// The image reader for the first regular capture
private ImageReader mFirstImageReader;
// The image reader for the reprocess capture
private ImageReader mSecondImageReader;
+ // A flag indicating whether the regular capture and the reprocess capture share the same image
+ // reader. If it's true, mFirstImageReader should be used for regular and reprocess outputs.
+ private boolean mShareOneImageReader;
private SimpleImageReaderListener mFirstImageReaderListener;
private SimpleImageReaderListener mSecondImageReaderListener;
private Surface mInputSurface;
private ImageWriter mImageWriter;
+ private SimpleImageWriterListener mImageWriterListener;
+
+ private enum CaptureTestCase {
+ SINGLE_SHOT,
+ BURST,
+ MIXED_BURST
+ }
/**
* Test YUV_420_888 -> YUV_420_888 with maximal supported sizes
@@ -137,7 +147,8 @@
// open Camera device
openDevice(id);
// no preview
- testReprocessingAllCombinations(id, null);
+ testReprocessingAllCombinations(id, /*previewSize*/null,
+ CaptureTestCase.SINGLE_SHOT);
} finally {
closeDevice();
}
@@ -156,7 +167,8 @@
try {
// open Camera device
openDevice(id);
- testReprocessingAllCombinations(id, mOrderedPreviewSizes.get(0));
+ testReprocessingAllCombinations(id, mOrderedPreviewSizes.get(0),
+ CaptureTestCase.SINGLE_SHOT);
} finally {
closeDevice();
}
@@ -234,8 +246,8 @@
}
setupImageReaders(inputSize, inputFormat, reprocessOutputSize,
- reprocessOutputFormat);
- setupReprocessibleSession(/*previewSurface*/null);
+ reprocessOutputFormat, /*maxImages*/1);
+ setupReprocessableSession(/*previewSurface*/null, /*numImageWriterImages*/1);
TotalCaptureResult result = submitCaptureRequest(mFirstImageReader.getSurface(),
/*inputResult*/null);
@@ -246,11 +258,17 @@
// recreate the session
closeReprossibleSession();
- setupReprocessibleSession(/*previewSurface*/null);
+ setupReprocessableSession(/*previewSurface*/null, /*numImageWriterImages*/1);
try {
+ TotalCaptureResult reprocessResult;
// issue and wait on reprocess capture request
- TotalCaptureResult reprocessResult =
- submitCaptureRequest(mSecondImageReader.getSurface(), result);
+ if (mShareOneImageReader) {
+ reprocessResult =
+ submitCaptureRequest(mFirstImageReader.getSurface(), result);
+ } else {
+ reprocessResult =
+ submitCaptureRequest(mSecondImageReader.getSurface(), result);
+ }
fail("Camera " + id + ": should get IllegalArgumentException for cross " +
"session reprocess captrue.");
} catch (IllegalArgumentException e) {
@@ -268,8 +286,52 @@
}
}
- // todo: test aborting reprocessing captures.
- // todo: test burst reprocessing captures.
+ /**
+ * Test burst reprocessing captures with and without preview.
+ */
+ public void testBurstReprocessing() throws Exception {
+ for (String id : mCameraIds) {
+ if (!isYuvReprocessSupported(id) && !isOpaqueReprocessSupported(id)) {
+ continue;
+ }
+
+ try {
+ // open Camera device
+ openDevice(id);
+ // no preview
+ testReprocessingAllCombinations(id, /*previewSize*/null, CaptureTestCase.BURST);
+ // with preview
+ testReprocessingAllCombinations(id, mOrderedPreviewSizes.get(0),
+ CaptureTestCase.BURST);
+ } finally {
+ closeDevice();
+ }
+ }
+ }
+
+ /**
+ * Test burst captures mixed with regular and reprocess captures with and without preview.
+ */
+ public void testMixedBurstReprocessing() throws Exception {
+ for (String id : mCameraIds) {
+ if (!isYuvReprocessSupported(id) && !isOpaqueReprocessSupported(id)) {
+ continue;
+ }
+
+ try {
+ // open Camera device
+ openDevice(id);
+ // no preview
+ testReprocessingAllCombinations(id, /*previewSize*/null,
+ CaptureTestCase.MIXED_BURST);
+ // with preview
+ testReprocessingAllCombinations(id, mOrderedPreviewSizes.get(0),
+ CaptureTestCase.MIXED_BURST);
+ } finally {
+ closeDevice();
+ }
+ }
+ }
/**
* Test the input format and output format with the largest input and output sizes.
@@ -294,8 +356,8 @@
/**
* Test all input format, input size, output format, and output size combinations.
*/
- private void testReprocessingAllCombinations(String cameraId,
- Size previewSize) throws Exception {
+ private void testReprocessingAllCombinations(String cameraId, Size previewSize,
+ CaptureTestCase captureTestCase) throws Exception {
int[] supportedInputFormats =
mStaticInfo.getAvailableFormats(StaticMetadata.StreamDirection.Input);
@@ -314,15 +376,165 @@
StaticMetadata.StreamDirection.Output);
for (Size reprocessOutputSize : supportedReprocessOutputSizes) {
- testReprocess(cameraId, inputSize, inputFormat,
- reprocessOutputSize, reprocessOutputFormat, previewSize,
- NUM_REPROCESS_CAPTURES);
+ switch (captureTestCase) {
+ case SINGLE_SHOT:
+ testReprocess(cameraId, inputSize, inputFormat,
+ reprocessOutputSize, reprocessOutputFormat, previewSize,
+ NUM_REPROCESS_CAPTURES);
+ break;
+ case BURST:
+ testReprocessBurst(cameraId, inputSize, inputFormat,
+ reprocessOutputSize, reprocessOutputFormat, previewSize,
+ NUM_REPROCESS_BURST);
+ break;
+ case MIXED_BURST:
+ testReprocessMixedBurst(cameraId, inputSize, inputFormat,
+ reprocessOutputSize, reprocessOutputFormat, previewSize,
+ NUM_REPROCESS_BURST);
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid capture type");
+ }
}
}
}
}
}
+ /**
+ * Test burst that is mixed with regular and reprocess capture requests.
+ */
+ private void testReprocessMixedBurst(String cameraId, Size inputSize, int inputFormat,
+ Size reprocessOutputSize, int reprocessOutputFormat, Size previewSize,
+ int numBurst) throws Exception {
+ if (VERBOSE) {
+ Log.v(TAG, "testReprocessMixedBurst: cameraId: " + cameraId + " inputSize: " +
+ inputSize + " inputFormat: " + inputFormat + " reprocessOutputSize: " +
+ reprocessOutputSize + " reprocessOutputFormat: " + reprocessOutputFormat +
+ " previewSize: " + previewSize + " numBurst: " + numBurst);
+ }
+
+ boolean enablePreview = (previewSize != null);
+ ImageResultHolder[] imageResultHolders = new ImageResultHolder[0];
+
+ try {
+ // totalNumBurst = number of regular burst + number of reprocess burst.
+ int totalNumBurst = numBurst * 2;
+
+ if (enablePreview) {
+ updatePreviewSurface(previewSize);
+ } else {
+ mPreviewSurface = null;
+ }
+
+ setupImageReaders(inputSize, inputFormat, reprocessOutputSize, reprocessOutputFormat,
+ totalNumBurst);
+ setupReprocessableSession(mPreviewSurface, /*numImageWriterImages*/numBurst);
+
+ if (enablePreview) {
+ startPreview(mPreviewSurface);
+ }
+
+ // Prepare an array of booleans indicating each capture's type (regular or reprocess)
+ boolean[] isReprocessCaptures = new boolean[totalNumBurst];
+ for (int i = 0; i < totalNumBurst; i++) {
+ if ((i & 1) == 0) {
+ isReprocessCaptures[i] = true;
+ } else {
+ isReprocessCaptures[i] = false;
+ }
+ }
+
+ imageResultHolders = doMixedReprocessBurstCapture(isReprocessCaptures);
+ for (ImageResultHolder holder : imageResultHolders) {
+ Image reprocessedImage = holder.getImage();
+ TotalCaptureResult result = holder.getTotalCaptureResult();
+
+ mCollector.expectImageProperties("testReprocessMixedBurst", reprocessedImage,
+ reprocessOutputFormat, reprocessOutputSize,
+ result.get(CaptureResult.SENSOR_TIMESTAMP));
+
+ if (DEBUG) {
+ Log.d(TAG, String.format("camera %s in %dx%d %d out %dx%d %d",
+ cameraId, inputSize.getWidth(), inputSize.getHeight(), inputFormat,
+ reprocessOutputSize.getWidth(), reprocessOutputSize.getHeight(),
+ reprocessOutputFormat));
+ dumpImage(reprocessedImage,
+ "/testReprocessMixedBurst_camera" + cameraId + "_" + mDumpFrameCount);
+ mDumpFrameCount++;
+ }
+ }
+ } finally {
+ for (ImageResultHolder holder : imageResultHolders) {
+ holder.getImage().close();
+ }
+ closeReprossibleSession();
+ closeImageReaders();
+ }
+ }
+
+ /**
+ * Test burst of reprocess capture requests.
+ */
+ private void testReprocessBurst(String cameraId, Size inputSize, int inputFormat,
+ Size reprocessOutputSize, int reprocessOutputFormat, Size previewSize,
+ int numBurst) throws Exception {
+ if (VERBOSE) {
+ Log.v(TAG, "testReprocessBurst: cameraId: " + cameraId + " inputSize: " +
+ inputSize + " inputFormat: " + inputFormat + " reprocessOutputSize: " +
+ reprocessOutputSize + " reprocessOutputFormat: " + reprocessOutputFormat +
+ " previewSize: " + previewSize + " numBurst: " + numBurst);
+ }
+
+ boolean enablePreview = (previewSize != null);
+ ImageResultHolder[] imageResultHolders = new ImageResultHolder[0];
+
+ try {
+ if (enablePreview) {
+ updatePreviewSurface(previewSize);
+ } else {
+ mPreviewSurface = null;
+ }
+
+ setupImageReaders(inputSize, inputFormat, reprocessOutputSize, reprocessOutputFormat,
+ numBurst);
+ setupReprocessableSession(mPreviewSurface, numBurst);
+
+ if (enablePreview) {
+ startPreview(mPreviewSurface);
+ }
+
+ imageResultHolders = doReprocessBurstCapture(numBurst);
+ for (ImageResultHolder holder : imageResultHolders) {
+ Image reprocessedImage = holder.getImage();
+ TotalCaptureResult result = holder.getTotalCaptureResult();
+
+ mCollector.expectImageProperties("testReprocessBurst", reprocessedImage,
+ reprocessOutputFormat, reprocessOutputSize,
+ result.get(CaptureResult.SENSOR_TIMESTAMP));
+
+ if (DEBUG) {
+ Log.d(TAG, String.format("camera %s in %dx%d %d out %dx%d %d",
+ cameraId, inputSize.getWidth(), inputSize.getHeight(), inputFormat,
+ reprocessOutputSize.getWidth(), reprocessOutputSize.getHeight(),
+ reprocessOutputFormat));
+ dumpImage(reprocessedImage,
+ "/testReprocessBurst_camera" + cameraId + "_" + mDumpFrameCount);
+ mDumpFrameCount++;
+ }
+ }
+ } finally {
+ for (ImageResultHolder holder : imageResultHolders) {
+ holder.getImage().close();
+ }
+ closeReprossibleSession();
+ closeImageReaders();
+ }
+ }
+
+ /**
+ * Test a sequences of reprocess capture requests.
+ */
private void testReprocess(String cameraId, Size inputSize, int inputFormat,
Size reprocessOutputSize, int reprocessOutputFormat, Size previewSize,
int numReprocessCaptures) throws Exception {
@@ -342,57 +554,39 @@
mPreviewSurface = null;
}
- setupImageReaders(inputSize, inputFormat, reprocessOutputSize, reprocessOutputFormat);
- setupReprocessibleSession(mPreviewSurface);
+ setupImageReaders(inputSize, inputFormat, reprocessOutputSize, reprocessOutputFormat,
+ /*maxImages*/1);
+ setupReprocessableSession(mPreviewSurface, /*numImageWriterImages*/1);
if (enablePreview) {
startPreview(mPreviewSurface);
}
for (int i = 0; i < numReprocessCaptures; i++) {
- Image reprocessedImage = null;
+ ImageResultHolder imageResultHolder = null;
try {
- reprocessedImage = doReprocessCapture();
+ imageResultHolder = doReprocessCapture();
+ Image reprocessedImage = imageResultHolder.getImage();
+ TotalCaptureResult result = imageResultHolder.getTotalCaptureResult();
- assertTrue(String.format("Reprocess output size is %dx%d. Expecting %dx%d.",
- reprocessedImage.getWidth(), reprocessedImage.getHeight(),
- reprocessOutputSize.getWidth(), reprocessOutputSize.getHeight()),
- reprocessedImage.getWidth() == reprocessOutputSize.getWidth() &&
- reprocessedImage.getHeight() == reprocessOutputSize.getHeight());
- assertTrue(String.format("Reprocess output format is %d. Expecting %d.",
- reprocessedImage.getFormat(), reprocessOutputFormat),
- reprocessedImage.getFormat() == reprocessOutputFormat);
+ mCollector.expectImageProperties("testReprocess", reprocessedImage,
+ reprocessOutputFormat, reprocessOutputSize,
+ result.get(CaptureResult.SENSOR_TIMESTAMP));
if (DEBUG) {
- String filename = DEBUG_FILE_NAME_BASE + "/reprocessed_camera" + cameraId +
- "_" + mDumpFrameCount;
- mDumpFrameCount++;
-
- switch(reprocessedImage.getFormat()) {
- case ImageFormat.JPEG:
- filename += ".jpg";
- break;
- case ImageFormat.NV16:
- case ImageFormat.NV21:
- case ImageFormat.YUV_420_888:
- filename += ".yuv";
- break;
- default:
- filename += "." + reprocessedImage.getFormat();
- break;
- }
-
- Log.d(TAG, "dumping an image to " + filename);
Log.d(TAG, String.format("camera %s in %dx%d %d out %dx%d %d",
cameraId, inputSize.getWidth(), inputSize.getHeight(), inputFormat,
reprocessOutputSize.getWidth(), reprocessOutputSize.getHeight(),
reprocessOutputFormat));
- dumpFile(filename , getDataFromImage(reprocessedImage));
+
+ dumpImage(reprocessedImage,
+ "/testReprocess_camera" + cameraId + "_" + mDumpFrameCount);
+ mDumpFrameCount++;
}
} finally {
- if (reprocessedImage != null) {
- reprocessedImage.close();
+ if (imageResultHolder != null) {
+ imageResultHolder.getImage().close();
}
}
}
@@ -402,20 +596,37 @@
}
}
+ /**
+ * Set up two image readers: one for regular capture (used for reprocess input) and one for
+ * reprocess capture.
+ */
private void setupImageReaders(Size inputSize, int inputFormat, Size reprocessOutputSize,
- int reprocessOutputFormat) {
+ int reprocessOutputFormat, int maxImages) {
+ mShareOneImageReader = false;
+ // If the regular output and reprocess output have the same size and format,
+ // they can share one image reader.
+ if (inputFormat == reprocessOutputFormat &&
+ inputSize.equals(reprocessOutputSize)) {
+ maxImages *= 2;
+ mShareOneImageReader = true;
+ }
// create an ImageReader for the regular capture
mFirstImageReaderListener = new SimpleImageReaderListener();
- mFirstImageReader = makeImageReader(inputSize, inputFormat,
- MAX_NUM_IMAGE_READER_IMAGES, mFirstImageReaderListener, mHandler);
+ mFirstImageReader = makeImageReader(inputSize, inputFormat, maxImages,
+ mFirstImageReaderListener, mHandler);
- // create an ImageReader for the reprocess capture
- mSecondImageReaderListener = new SimpleImageReaderListener();
- mSecondImageReader = makeImageReader(reprocessOutputSize, reprocessOutputFormat,
- MAX_NUM_IMAGE_READER_IMAGES, mSecondImageReaderListener, mHandler);
+ if (!mShareOneImageReader) {
+ // create an ImageReader for the reprocess capture
+ mSecondImageReaderListener = new SimpleImageReaderListener();
+ mSecondImageReader = makeImageReader(reprocessOutputSize, reprocessOutputFormat,
+ maxImages, mSecondImageReaderListener, mHandler);
+ }
}
+ /**
+ * Close two image readers.
+ */
private void closeImageReaders() {
CameraTestUtils.closeImageReader(mFirstImageReader);
mFirstImageReader = null;
@@ -423,27 +634,47 @@
mSecondImageReader = null;
}
- private void setupReprocessibleSession(Surface previewSurface) throws Exception {
- // create a reprocessible capture session
+ /**
+ * Set up a reprocessable session and create an ImageWriter with the sessoin's input surface.
+ */
+ private void setupReprocessableSession(Surface previewSurface, int numImageWriterImages)
+ throws Exception {
+ // create a reprocessable capture session
List<Surface> outSurfaces = new ArrayList<Surface>();
outSurfaces.add(mFirstImageReader.getSurface());
- outSurfaces.add(mSecondImageReader.getSurface());
+ if (!mShareOneImageReader) {
+ outSurfaces.add(mSecondImageReader.getSurface());
+ }
if (previewSurface != null) {
outSurfaces.add(previewSurface);
}
InputConfiguration inputConfig = new InputConfiguration(mFirstImageReader.getWidth(),
mFirstImageReader.getHeight(), mFirstImageReader.getImageFormat());
+ assertTrue(String.format("inputConfig is wrong: %dx%d format %d. Expect %dx%d format %d",
+ inputConfig.getWidth(), inputConfig.getHeight(), inputConfig.getFormat(),
+ mFirstImageReader.getWidth(), mFirstImageReader.getHeight(),
+ mFirstImageReader.getImageFormat()),
+ inputConfig.getWidth() == mFirstImageReader.getWidth() &&
+ inputConfig.getHeight() == mFirstImageReader.getHeight() &&
+ inputConfig.getFormat() == mFirstImageReader.getImageFormat());
+
mSessionListener = new BlockingSessionCallback();
- mSession = configureReprocessibleCameraSession(mCamera, inputConfig, outSurfaces,
+ mSession = configureReprocessableCameraSession(mCamera, inputConfig, outSurfaces,
mSessionListener, mHandler);
// create an ImageWriter
mInputSurface = mSession.getInputSurface();
mImageWriter = ImageWriter.newInstance(mInputSurface,
- MAX_NUM_IMAGE_WRITER_IMAGES);
+ numImageWriterImages);
+
+ mImageWriterListener = new SimpleImageWriterListener(mImageWriter);
+ mImageWriter.setOnImageReleasedListener(mImageWriterListener, mHandler);
}
+ /**
+ * Close the reprocessable session and ImageWriter.
+ */
private void closeReprossibleSession() {
mInputSurface = null;
@@ -458,20 +689,74 @@
}
}
- private Image doReprocessCapture() throws Exception {
- // issue and wait on regular capture request
- TotalCaptureResult result = submitCaptureRequest(mFirstImageReader.getSurface(),
- /*inputResult*/null);
- Image image = mFirstImageReaderListener.getImage(CAPTURE_TIMEOUT_MS);
+ /**
+ * Do one reprocess capture.
+ */
+ private ImageResultHolder doReprocessCapture() throws Exception {
+ return doReprocessBurstCapture(/*numBurst*/1)[0];
+ }
- // queue the image to image writer
- mImageWriter.queueInputImage(image);
+ /**
+ * Do a burst of reprocess captures.
+ */
+ private ImageResultHolder[] doReprocessBurstCapture(int numBurst) throws Exception {
+ boolean[] isReprocessCaptures = new boolean[numBurst];
+ for (int i = 0; i < numBurst; i++) {
+ isReprocessCaptures[i] = true;
+ }
- // issue and wait on reprocess capture request
- TotalCaptureResult reprocessResult =
- submitCaptureRequest(mSecondImageReader.getSurface(), result);
+ return doMixedReprocessBurstCapture(isReprocessCaptures);
+ }
- return mSecondImageReaderListener.getImage(CAPTURE_TIMEOUT_MS);
+ /**
+ * Do a burst of captures that are mixed with regular and reprocess captures.
+ *
+ * @param isReprocessCaptures An array whose elements indicate whether it's a reprocess capture
+ * request. If the element is true, it represents a reprocess capture
+ * request. If the element is false, it represents a regular capture
+ * request. The size of the array is the number of capture requests
+ * in the burst.
+ */
+ private ImageResultHolder[] doMixedReprocessBurstCapture(boolean[] isReprocessCaptures)
+ throws Exception {
+ if (isReprocessCaptures == null || isReprocessCaptures.length <= 0) {
+ throw new IllegalArgumentException("isReprocessCaptures must have at least 1 capture.");
+ }
+
+ TotalCaptureResult[] results = new TotalCaptureResult[isReprocessCaptures.length];
+ for (int i = 0; i < isReprocessCaptures.length; i++) {
+ // submit a capture and get the result if this entry is a reprocess capture.
+ if (isReprocessCaptures[i]) {
+ results[i] = submitCaptureRequest(mFirstImageReader.getSurface(),
+ /*inputResult*/null);
+ mImageWriter.queueInputImage(
+ mFirstImageReaderListener.getImage(CAPTURE_TIMEOUT_MS));
+ }
+ }
+
+ Surface[] outputSurfaces = new Surface[isReprocessCaptures.length];
+ for (int i = 0; i < isReprocessCaptures.length; i++) {
+ if (mShareOneImageReader) {
+ outputSurfaces[i] = mFirstImageReader.getSurface();
+ } else {
+ outputSurfaces[i] = mSecondImageReader.getSurface();
+ }
+ }
+
+ TotalCaptureResult[] finalResults = submitMixedCaptureBurstRequest(outputSurfaces, results);
+
+ ImageResultHolder[] holders = new ImageResultHolder[isReprocessCaptures.length];
+ for (int i = 0; i < isReprocessCaptures.length; i++) {
+ Image image;
+ if (mShareOneImageReader) {
+ image = mFirstImageReaderListener.getImage(CAPTURE_TIMEOUT_MS);
+ } else {
+ image = mSecondImageReaderListener.getImage(CAPTURE_TIMEOUT_MS);
+ }
+ holders[i] = new ImageResultHolder(image, finalResults[i]);
+ }
+
+ return holders;
}
/**
@@ -487,22 +772,85 @@
* Issue a capture request and return the result. If inputResult is null, it's a regular
* request. Otherwise, it's a reprocess request.
*/
- private TotalCaptureResult submitCaptureRequest(Surface output, TotalCaptureResult inputResult)
- throws Exception {
- SimpleCaptureCallback captureCallback = new SimpleCaptureCallback();
- CaptureRequest.Builder builder;
- if (inputResult != null) {
- builder = mCamera.createReprocessCaptureRequest(inputResult);
- } else {
- builder = mCamera.createCaptureRequest(CAPTURE_TEMPLATE);
+ private TotalCaptureResult submitCaptureRequest(Surface output,
+ TotalCaptureResult inputResult) throws Exception {
+ Surface[] outputs = new Surface[1];
+ outputs[0] = output;
+ TotalCaptureResult[] inputResults = new TotalCaptureResult[1];
+ inputResults[0] = inputResult;
+
+ return submitMixedCaptureBurstRequest(outputs, inputResults)[0];
+ }
+
+ /**
+ * Submit a burst request mixed with regular and reprocess requests.
+ *
+ * @param outputs An array of output surfaces. One output surface will be used in one request
+ * so the length of the array is the number of requests in a burst request.
+ * @param inputResults An array of input results. If it's null, all requests are regular
+ * requests. If an element is null, that element represents a regular
+ * request. If an element if not null, that element represents a reprocess
+ * request.
+ *
+ */
+ private TotalCaptureResult[] submitMixedCaptureBurstRequest(Surface[] outputs,
+ TotalCaptureResult[] inputResults) throws Exception {
+ if (outputs == null || outputs.length <= 0) {
+ throw new IllegalArgumentException("outputs must have at least 1 surface");
+ } else if (inputResults != null && inputResults.length != outputs.length) {
+ throw new IllegalArgumentException("The lengths of outputs and inputResults " +
+ "don't match");
}
- builder.addTarget(output);
- CaptureRequest request = builder.build();
- mSession.capture(request, captureCallback, mHandler);
+ int numReprocessCaptures = 0;
+ SimpleCaptureCallback captureCallback = new SimpleCaptureCallback();
+ ArrayList<CaptureRequest> captureRequests = new ArrayList<>(outputs.length);
- // wait for regular capture result
- return captureCallback.getTotalCaptureResultForRequest(request, CAPTURE_TIMEOUT_FRAMES);
+ // Prepare a list of capture requests. Whether it's a regular or reprocess capture request
+ // is based on inputResults array.
+ for (int i = 0; i < outputs.length; i++) {
+ CaptureRequest.Builder builder;
+ boolean isReprocess = (inputResults != null && inputResults[i] != null);
+ if (isReprocess) {
+ builder = mCamera.createReprocessCaptureRequest(inputResults[i]);
+ numReprocessCaptures++;
+ } else {
+ builder = mCamera.createCaptureRequest(CAPTURE_TEMPLATE);
+ }
+ builder.addTarget(outputs[i]);
+ CaptureRequest request = builder.build();
+ assertTrue("Capture request reprocess type " + request.isReprocess() + " is wrong.",
+ request.isReprocess() == isReprocess);
+
+ captureRequests.add(request);
+ }
+
+ if (captureRequests.size() == 1) {
+ mSession.capture(captureRequests.get(0), captureCallback, mHandler);
+ } else {
+ mSession.captureBurst(captureRequests, captureCallback, mHandler);
+ }
+
+ TotalCaptureResult[] results;
+ if (numReprocessCaptures == 0 || numReprocessCaptures == outputs.length) {
+ results = new TotalCaptureResult[outputs.length];
+ // If the requests are not mixed, they should come in order.
+ for (int i = 0; i < results.length; i++){
+ results[i] = captureCallback.getTotalCaptureResultForRequest(
+ captureRequests.get(i), CAPTURE_TIMEOUT_FRAMES);
+ }
+ } else {
+ // If the requests are mixed, they may not come in order.
+ results = captureCallback.getTotalCaptureResultsForRequests(
+ captureRequests, CAPTURE_TIMEOUT_FRAMES * captureRequests.size());
+ }
+
+ // make sure all input surfaces are released.
+ for (int i = 0; i < numReprocessCaptures; i++) {
+ mImageWriterListener.waitForImageReleased(CAPTURE_TIMEOUT_MS);
+ }
+
+ return results;
}
private Size getMaxSize(int format, StaticMetadata.StreamDirection direction) {
@@ -511,18 +859,51 @@
}
private boolean isYuvReprocessSupported(String cameraId) throws Exception {
- StaticMetadata info =
- new StaticMetadata(mCameraManager.getCameraCharacteristics(cameraId),
- CheckLevel.ASSERT, /*collector*/ null);
- return info.isCapabilitySupported(
- CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
+ return isReprocessSupported(cameraId, ImageFormat.YUV_420_888);
}
private boolean isOpaqueReprocessSupported(String cameraId) throws Exception {
- StaticMetadata info =
- new StaticMetadata(mCameraManager.getCameraCharacteristics(cameraId),
- CheckLevel.ASSERT, /*collector*/ null);
- return info.isCapabilitySupported(
- CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_OPAQUE_REPROCESSING);
+ return isReprocessSupported(cameraId, ImageFormat.PRIVATE);
}
-}
\ No newline at end of file
+
+ private void dumpImage(Image image, String name) {
+ String filename = DEBUG_FILE_NAME_BASE + name;
+ switch(image.getFormat()) {
+ case ImageFormat.JPEG:
+ filename += ".jpg";
+ break;
+ case ImageFormat.NV16:
+ case ImageFormat.NV21:
+ case ImageFormat.YUV_420_888:
+ filename += ".yuv";
+ break;
+ default:
+ filename += "." + image.getFormat();
+ break;
+ }
+
+ Log.d(TAG, "dumping an image to " + filename);
+ dumpFile(filename , getDataFromImage(image));
+ }
+
+ /**
+ * A class that holds an Image and a TotalCaptureResult.
+ */
+ private static class ImageResultHolder {
+ private final Image mImage;
+ private final TotalCaptureResult mResult;
+
+ public ImageResultHolder(Image image, TotalCaptureResult result) {
+ mImage = image;
+ mResult = result;
+ }
+
+ public Image getImage() {
+ return mImage;
+ }
+
+ public TotalCaptureResult getTotalCaptureResult() {
+ return mResult;
+ }
+ }
+}
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/StaticMetadataTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/StaticMetadataTest.java
index 3076d09..31f9188 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/StaticMetadataTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/StaticMetadataTest.java
@@ -380,7 +380,7 @@
break;
case REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING:
- case REQUEST_AVAILABLE_CAPABILITIES_OPAQUE_REPROCESSING:
+ case REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING:
// Tested in ExtendedCameraCharacteristicsTest
return;
default:
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/SurfaceViewPreviewTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/SurfaceViewPreviewTest.java
index 01da4c8..7d377d6 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/SurfaceViewPreviewTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/SurfaceViewPreviewTest.java
@@ -18,6 +18,8 @@
import static android.hardware.camera2.cts.CameraTestUtils.*;
+import android.graphics.ImageFormat;
+import android.view.Surface;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
import android.hardware.camera2.CameraDevice;
@@ -29,6 +31,7 @@
import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase;
import android.util.Log;
+import android.util.Pair;
import android.util.Range;
import org.mockito.ArgumentCaptor;
@@ -36,6 +39,7 @@
import static org.mockito.Mockito.*;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -118,6 +122,192 @@
}
/**
+ * Test to verify the {@link CameraCaptureSession#prepare} method works correctly, and has the
+ * expected effects on performance.
+ *
+ * - Ensure that prepare() results in onSurfacePrepared() being invoked
+ * - Ensure that prepare() does not cause preview glitches while operating
+ * - Ensure that starting to use a newly-prepared output does not cause additional
+ * preview glitches to occur
+ */
+ public void testPreparePerformance() throws Throwable {
+ for (int i = 0; i < mCameraIds.length; i++) {
+ try {
+ openDevice(mCameraIds[i]);
+
+ preparePerformanceTestByCamera(mCameraIds[i]);
+ }
+ finally {
+ closeDevice();
+ }
+ }
+ }
+
+ private void preparePerformanceTestByCamera(String cameraId) throws Exception {
+ final int MAX_IMAGES_TO_PREPARE = 10;
+ final int UNKNOWN_LATENCY_RESULT_WAIT = 5;
+ final int MAX_RESULTS_TO_WAIT = 10;
+ final int FRAMES_FOR_AVERAGING = 100;
+ final int PREPARE_TIMEOUT_MS = 10000; // 10 s
+ final float PREPARE_FRAME_RATE_BOUNDS = 0.05f; // fraction allowed difference
+ final float PREPARE_PEAK_RATE_BOUNDS = 0.5f; // fraction allowed difference
+
+ Size maxYuvSize = getSupportedPreviewSizes(cameraId, mCameraManager, null).get(0);
+ Size maxPreviewSize = mOrderedPreviewSizes.get(0);
+
+ // Don't need image data, just drop it right away to minimize overhead
+ ImageDropperListener imageListener = new ImageDropperListener();
+
+ SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
+
+ CaptureRequest.Builder previewRequest =
+ mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+
+ // Configure outputs and session
+
+ updatePreviewSurface(maxPreviewSize);
+
+ createImageReader(maxYuvSize, ImageFormat.YUV_420_888, MAX_IMAGES_TO_PREPARE, imageListener);
+
+ List<Surface> outputSurfaces = new ArrayList<Surface>();
+ outputSurfaces.add(mPreviewSurface);
+ outputSurfaces.add(mReaderSurface);
+
+ CameraCaptureSession.StateCallback mockSessionListener =
+ mock(CameraCaptureSession.StateCallback.class);
+
+ mSession = configureCameraSession(mCamera, outputSurfaces, mockSessionListener, mHandler);
+
+ previewRequest.addTarget(mPreviewSurface);
+ Range<Integer> maxFpsTarget = mStaticInfo.getAeMaxTargetFpsRange();
+ previewRequest.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, maxFpsTarget);
+
+ mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
+
+ // Converge AE
+ waitForAeStable(resultListener, UNKNOWN_LATENCY_RESULT_WAIT);
+
+ if (mStaticInfo.isAeLockSupported()) {
+ // Lock AE if possible to improve stability
+ previewRequest.set(CaptureRequest.CONTROL_AE_LOCK, true);
+ mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
+ waitForResultValue(resultListener, CaptureResult.CONTROL_AE_STATE,
+ CaptureResult.CONTROL_AE_STATE_LOCKED, MAX_RESULTS_TO_WAIT);
+ }
+
+ // Measure frame rate for a bit
+ Pair<Long, Long> frameDurationStats =
+ measureMeanFrameInterval(resultListener, FRAMES_FOR_AVERAGING, /*prevTimestamp*/ 0);
+
+ Log.i(TAG, String.format("Frame interval avg during normal preview: %f ms, peak %f ms",
+ frameDurationStats.first / 1e6, frameDurationStats.second / 1e6));
+
+ // Drain results, do prepare
+ resultListener.drain();
+
+ mSession.prepare(mReaderSurface);
+
+ verify(mockSessionListener,
+ timeout(PREPARE_TIMEOUT_MS).times(1)).
+ onSurfacePrepared(eq(mSession), eq(mReaderSurface));
+
+ // Calculate frame rate during prepare
+
+ int resultsReceived = (int) resultListener.getTotalNumFrames();
+ if (resultsReceived > 2) {
+ // Only verify frame rate if there are a couple of results
+ Pair<Long, Long> whilePreparingFrameDurationStats =
+ measureMeanFrameInterval(resultListener, resultsReceived, /*prevTimestamp*/ 0);
+
+ Log.i(TAG, String.format("Frame interval during prepare avg: %f ms, peak %f ms",
+ whilePreparingFrameDurationStats.first / 1e6,
+ whilePreparingFrameDurationStats.second / 1e6));
+
+ if (mStaticInfo.isHardwareLevelLimitedOrBetter()) {
+ mCollector.expectTrue(
+ String.format("Camera %s: Preview peak frame interval affected by prepare " +
+ "call: preview avg frame duration: %f ms, peak during prepare: %f ms",
+ cameraId,
+ frameDurationStats.first / 1e6,
+ whilePreparingFrameDurationStats.second / 1e6),
+ (whilePreparingFrameDurationStats.second <=
+ frameDurationStats.first * (1 + PREPARE_PEAK_RATE_BOUNDS)));
+ mCollector.expectTrue(
+ String.format("Camera %s: Preview average frame interval affected by prepare " +
+ "call: preview avg frame duration: %f ms, during prepare: %f ms",
+ cameraId,
+ frameDurationStats.first / 1e6,
+ whilePreparingFrameDurationStats.first / 1e6),
+ (whilePreparingFrameDurationStats.first <=
+ frameDurationStats.first * (1 + PREPARE_FRAME_RATE_BOUNDS)));
+ }
+ }
+
+ resultListener.drain();
+
+ // Get at least one more preview result without prepared target
+ CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
+ long prevTimestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
+
+ // Now use the prepared stream and ensure there are no hiccups from using it
+ previewRequest.addTarget(mReaderSurface);
+
+ mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
+
+ Pair<Long, Long> preparedFrameDurationStats =
+ measureMeanFrameInterval(resultListener, MAX_IMAGES_TO_PREPARE*2, prevTimestamp);
+
+ Log.i(TAG, String.format("Frame interval with prepared stream added avg: %f ms, peak %f ms",
+ preparedFrameDurationStats.first / 1e6,
+ preparedFrameDurationStats.second / 1e6));
+
+ if (mStaticInfo.isHardwareLevelLimitedOrBetter()) {
+ mCollector.expectTrue(
+ String.format("Camera %s: Preview peak frame interval affected by use of new " +
+ " stream: preview avg frame duration: %f ms, peak with new stream: %f ms",
+ cameraId,
+ frameDurationStats.first / 1e6, preparedFrameDurationStats.second / 1e6),
+ (preparedFrameDurationStats.second <=
+ frameDurationStats.first * (1 + PREPARE_PEAK_RATE_BOUNDS)));
+ mCollector.expectTrue(
+ String.format("Camera %s: Preview average frame interval affected by use of new " +
+ "stream: preview avg frame duration: %f ms, with new stream: %f ms",
+ cameraId,
+ frameDurationStats.first / 1e6, preparedFrameDurationStats.first / 1e6),
+ (preparedFrameDurationStats.first <=
+ frameDurationStats.first * (1 + PREPARE_FRAME_RATE_BOUNDS)));
+ }
+ }
+
+ /**
+ * Measure the inter-frame interval based on SENSOR_TIMESTAMP for frameCount frames from the
+ * provided capture listener. If prevTimestamp is positive, it is used for the first interval
+ * calculation; otherwise, the first result is used to establish the starting time.
+ *
+ * Returns the mean interval in the first pair entry, and the largest interval in the second
+ * pair entry
+ */
+ Pair<Long, Long> measureMeanFrameInterval(SimpleCaptureCallback resultListener, int frameCount,
+ long prevTimestamp) throws Exception {
+ long summedIntervals = 0;
+ long maxInterval = 0;
+ int measurementCount = frameCount - ((prevTimestamp > 0) ? 0 : 1);
+
+ for (int i = 0; i < frameCount; i++) {
+ CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
+ long timestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
+ if (prevTimestamp > 0) {
+ long interval = timestamp - prevTimestamp;
+ if (interval > maxInterval) maxInterval = interval;
+ summedIntervals += interval;
+ }
+ prevTimestamp = timestamp;
+ }
+ return new Pair<Long, Long>(summedIntervals / measurementCount, maxInterval);
+ }
+
+
+ /**
* Test preview fps range for all supported ranges. The exposure time are frame duration are
* validated.
*/
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/CameraErrorCollector.java b/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/CameraErrorCollector.java
index 0ee5ffc..9f0c012 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/CameraErrorCollector.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/CameraErrorCollector.java
@@ -22,6 +22,7 @@
import android.hardware.camera2.CaptureRequest.Builder;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.params.MeteringRectangle;
+import android.media.Image;
import android.util.Log;
import android.util.Size;
@@ -1049,4 +1050,13 @@
Set<T> sizeSet = new HashSet<T>(list);
expectTrue(msg + " each element must be distinct", sizeSet.size() == list.size());
}
+
+ public void expectImageProperties(String msg, Image image, int format, Size size,
+ long timestampNs) {
+ expectEquals(msg + "Image format is wrong.", image.getFormat(), format);
+ expectEquals(msg + "Image width is wrong.", image.getWidth(), size.getWidth());
+ expectEquals(msg + "Image height is wrong.", image.getHeight(), size.getHeight());
+ expectEquals(msg + "Image timestamp is wrong.", image.getTimestamp(), timestampNs);
+ }
+
}
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/StaticMetadata.java b/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/StaticMetadata.java
index b08deae..a22db8f 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/StaticMetadata.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/StaticMetadata.java
@@ -64,6 +64,7 @@
private static final int CONTROL_AE_COMPENSATION_RANGE_DEFAULT_MAX = 2;
private static final Rational CONTROL_AE_COMPENSATION_STEP_DEFAULT = new Rational(1, 2);
private static final byte REQUEST_PIPELINE_MAX_DEPTH_MAX = 8;
+ private static final int MAX_REPROCESS_MAX_CAPTURE_STALL = 4;
// TODO: Consider making this work across any metadata object, not just camera characteristics
private final CameraCharacteristics mCharacteristics;
@@ -977,6 +978,62 @@
}
/**
+ * get android.control.availableModes and do the sanity check.
+ *
+ * @return available control modes.
+ */
+ public int[] getAvailableControlModesChecked() {
+ Key<int[]> modesKey = CameraCharacteristics.CONTROL_AVAILABLE_MODES;
+ int[] modes = getValueFromKeyNonNull(modesKey);
+ if (modes == null) {
+ modes = new int[0];
+ }
+
+ List<Integer> modeList = Arrays.asList(CameraTestUtils.toObject(modes));
+ checkTrueForKey(modesKey, "value is empty", !modeList.isEmpty());
+
+ // All camera device must support AUTO
+ checkTrueForKey(modesKey, "values " + modeList.toString() + " must contain AUTO mode",
+ modeList.contains(CameraMetadata.CONTROL_MODE_AUTO));
+
+ boolean isAeOffSupported = Arrays.asList(
+ CameraTestUtils.toObject(getAeAvailableModesChecked())).contains(
+ CameraMetadata.CONTROL_AE_MODE_OFF);
+ boolean isAfOffSupported = Arrays.asList(
+ CameraTestUtils.toObject(getAfAvailableModesChecked())).contains(
+ CameraMetadata.CONTROL_AF_MODE_OFF);
+ boolean isAwbOffSupported = Arrays.asList(
+ CameraTestUtils.toObject(getAwbAvailableModesChecked())).contains(
+ CameraMetadata.CONTROL_AWB_MODE_OFF);
+ if (isAeOffSupported && isAfOffSupported && isAwbOffSupported) {
+ // 3A OFF controls are supported, OFF mode must be supported here.
+ checkTrueForKey(modesKey, "values " + modeList.toString() + " must contain OFF mode",
+ modeList.contains(CameraMetadata.CONTROL_MODE_OFF));
+ }
+
+ if (isSceneModeSupported()) {
+ checkTrueForKey(modesKey, "values " + modeList.toString() + " must contain"
+ + " USE_SCENE_MODE",
+ modeList.contains(CameraMetadata.CONTROL_MODE_USE_SCENE_MODE));
+ }
+
+ return modes;
+ }
+
+ public boolean isSceneModeSupported() {
+ List<Integer> availableSceneModes = Arrays.asList(
+ CameraTestUtils.toObject(getAvailableSceneModesChecked()));
+
+ if (availableSceneModes.isEmpty()) {
+ return false;
+ }
+
+ // If sceneMode is not supported, camera device will contain single entry: DISABLED.
+ return availableSceneModes.size() > 1 ||
+ !availableSceneModes.contains(CameraMetadata.CONTROL_SCENE_MODE_DISABLED);
+ }
+
+ /**
* Get aeAvailableModes and do the sanity check.
*
* <p>Depending on the check level this class has, for WAR or COLLECT levels,
@@ -1240,6 +1297,30 @@
}
/**
+ * Get the highest supported target FPS range.
+ * Prioritizes maximizing the min FPS, then the max FPS without lowering min FPS.
+ */
+ public Range<Integer> getAeMaxTargetFpsRange() {
+ Range<Integer>[] fpsRanges = getAeAvailableTargetFpsRangesChecked();
+
+ Range<Integer> targetRange = fpsRanges[0];
+ // Assume unsorted list of target FPS ranges, so use two passes, first maximize min FPS
+ for (Range<Integer> candidateRange : fpsRanges) {
+ if (candidateRange.getLower() > targetRange.getLower()) {
+ targetRange = candidateRange;
+ }
+ }
+ // Then maximize max FPS while not lowering min FPS
+ for (Range<Integer> candidateRange : fpsRanges) {
+ if (candidateRange.getLower() >= targetRange.getLower() &&
+ candidateRange.getUpper() > targetRange.getUpper()) {
+ targetRange = candidateRange;
+ }
+ }
+ return targetRange;
+ }
+
+ /**
* Get max frame duration.
*
* @return 0 if maxFrameDuration is null
@@ -1738,7 +1819,7 @@
*
* @return {@code true} if manual color correction control is supported
*/
- public boolean isManualColorCorrectionSupported() {
+ public boolean isColorCorrectionSupported() {
return areKeysAvailable(CaptureRequest.COLOR_CORRECTION_MODE);
}
@@ -1846,6 +1927,26 @@
}
/**
+ * Get maxCaptureStall frames or default value (if value doesn't exist)
+ * @return maxCaptureStall frames or default value.
+ */
+ public int getMaxCaptureStallOrDefault() {
+ Key<Integer> key =
+ CameraCharacteristics.REPROCESS_MAX_CAPTURE_STALL;
+ Integer value = getValueFromKeyNonNull(key);
+
+ if (value == null) {
+ return MAX_REPROCESS_MAX_CAPTURE_STALL;
+ }
+
+ checkTrueForKey(key, " value is out of range ",
+ value >= 0 &&
+ value <= MAX_REPROCESS_MAX_CAPTURE_STALL);
+
+ return value;
+ }
+
+ /**
* Get the scaler's cropping type (center only or freeform)
* @return cropping type, return default value (CENTER_ONLY) if value is unavailable
*/
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/rs/RawConverter.java b/tests/tests/hardware/src/android/hardware/camera2/cts/rs/RawConverter.java
index 2cd2469..8ca650f 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/rs/RawConverter.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/rs/RawConverter.java
@@ -163,6 +163,7 @@
* @param rs a {@link RenderScript} context to use.
* @param inputWidth width of the input RAW16 image in pixels.
* @param inputHeight height of the input RAW16 image in pixels.
+ * @param inputStride stride of the input RAW16 image in bytes.
* @param rawImageInput a byte array containing a RAW16 image.
* @param staticMetadata the {@link CameraCharacteristics} for this RAW capture.
* @param dynamicMetadata the {@link CaptureResult} for this RAW capture.
@@ -176,7 +177,7 @@
* image to be rendered.
*/
public static void convertToSRGB(RenderScript rs, int inputWidth, int inputHeight,
- byte[] rawImageInput, CameraCharacteristics staticMetadata,
+ int inputStride, byte[] rawImageInput, CameraCharacteristics staticMetadata,
CaptureResult dynamicMetadata, int outputOffsetX, int outputOffsetY,
/*out*/Bitmap argbOutput) {
int cfa = staticMetadata.get(CameraCharacteristics.SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
@@ -209,7 +210,7 @@
LensShadingMap shadingMap = dynamicMetadata.get(CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP);
- convertToSRGB(rs, inputWidth, inputHeight, cfa, blackLevelPattern, whiteLevel,
+ convertToSRGB(rs, inputWidth, inputHeight, inputStride, cfa, blackLevelPattern, whiteLevel,
rawImageInput, ref1, ref2, calib1, calib2, color1, color2,
forward1, forward2, neutral, shadingMap, outputOffsetX, outputOffsetY, argbOutput);
}
@@ -219,8 +220,8 @@
*
* @see #convertToSRGB
*/
- private static void convertToSRGB(RenderScript rs, int inputWidth, int inputHeight, int cfa,
- int[] blackLevelPattern, int whiteLevel, byte[] rawImageInput,
+ private static void convertToSRGB(RenderScript rs, int inputWidth, int inputHeight,
+ int inputStride, int cfa, int[] blackLevelPattern, int whiteLevel, byte[] rawImageInput,
int referenceIlluminant1, int referenceIlluminant2, float[] calibrationTransform1,
float[] calibrationTransform2, float[] colorMatrix1, float[] colorMatrix2,
float[] forwardTransform1, float[] forwardTransform2, Rational[/*3*/] neutralColorPoint,
@@ -238,6 +239,12 @@
if (outputOffsetX < 0 || outputOffsetY < 0) {
throw new IllegalArgumentException("Negative offset passed to convertToSRGB");
}
+ if ((inputStride / 2) < inputWidth) {
+ throw new IllegalArgumentException("Stride too small.");
+ }
+ if ((inputStride % 2) != 0) {
+ throw new IllegalArgumentException("Invalid stride for RAW16 format, see graphics.h.");
+ }
int outWidth = argbOutput.getWidth();
int outHeight = argbOutput.getHeight();
if (outWidth + outputOffsetX > inputWidth || outHeight + outputOffsetY > inputHeight) {
@@ -314,7 +321,7 @@
// Setup input allocation (16-bit raw pixels)
Type.Builder typeBuilder = new Type.Builder(rs, Element.U16(rs));
- typeBuilder.setX(inputWidth);
+ typeBuilder.setX((inputStride / 2));
typeBuilder.setY(inputHeight);
Type inputType = typeBuilder.create();
Allocation input = Allocation.createTyped(rs, inputType);
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2AndroidTestCase.java b/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2AndroidTestCase.java
index e7f1e7a..78370b3 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2AndroidTestCase.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2AndroidTestCase.java
@@ -294,13 +294,8 @@
ImageReader.OnImageAvailableListener listener) throws Exception {
ImageReader reader = null;
- if (format == ImageFormat.PRIVATE) {
- // Create opaque ImageReader
- reader = ImageReader.newOpaqueInstance(size.getWidth(), size.getHeight(), maxNumImages);
- } else {
- reader = ImageReader.newInstance(size.getWidth(), size.getHeight(),
- format, maxNumImages);
- }
+ reader = ImageReader.newInstance(size.getWidth(), size.getHeight(),
+ format, maxNumImages);
reader.setOnImageAvailableListener(listener, mHandler);
if (VERBOSE) Log.v(TAG, "Created ImageReader size " + size.toString());
@@ -462,15 +457,6 @@
// Expected.
}
- // Image#isOpaque test
- try {
- closedImage.isOpaque();
- fail("Image should throw IllegalStateException when calling isOpaque"
- + " after the image is closed");
- } catch (IllegalStateException e) {
- // Expected.
- }
-
// Image#getCropRect test
try {
closedImage.getCropRect();
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2SurfaceViewTestCase.java b/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2SurfaceViewTestCase.java
index bcc4061..3ca696b 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2SurfaceViewTestCase.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2SurfaceViewTestCase.java
@@ -694,4 +694,21 @@
return null;
}
+
+ protected boolean isReprocessSupported(String cameraId, int format)
+ throws CameraAccessException {
+ if (format != ImageFormat.YUV_420_888 && format != ImageFormat.PRIVATE) {
+ throw new IllegalArgumentException(
+ "format " + format + " is not supported for reprocessing");
+ }
+
+ StaticMetadata info =
+ new StaticMetadata(mCameraManager.getCameraCharacteristics(cameraId),
+ CheckLevel.ASSERT, /*collector*/ null);
+ int cap = CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING;
+ if (format == ImageFormat.PRIVATE) {
+ cap = CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING;
+ }
+ return info.isCapabilitySupported(cap);
+ }
}
diff --git a/tests/tests/hardware/src/android/hardware/cts/SingleSensorTests.java b/tests/tests/hardware/src/android/hardware/cts/SingleSensorTests.java
index 42cbdfb..0fbd8fa 100644
--- a/tests/tests/hardware/src/android/hardware/cts/SingleSensorTests.java
+++ b/tests/tests/hardware/src/android/hardware/cts/SingleSensorTests.java
@@ -22,6 +22,7 @@
import android.hardware.cts.helpers.SensorStats;
import android.hardware.cts.helpers.TestSensorEnvironment;
import android.hardware.cts.helpers.sensoroperations.TestSensorOperation;
+import android.content.pm.PackageManager;
import java.util.HashMap;
import java.util.Map;
@@ -106,8 +107,13 @@
public void testSensorProperties() {
// sensor type: [getMinDelay()]
Map<Integer, Object[]> expectedProperties = new HashMap<>(3);
- expectedProperties.put(Sensor.TYPE_ACCELEROMETER, new Object[]{10000});
- expectedProperties.put(Sensor.TYPE_GYROSCOPE, new Object[]{10000});
+ if(getContext().getPackageManager().hasSystemFeature(PackageManager.FEATURE_WATCH)) {
+ expectedProperties.put(Sensor.TYPE_ACCELEROMETER, new Object[]{20000});
+ expectedProperties.put(Sensor.TYPE_GYROSCOPE, new Object[]{20000});
+ }else {
+ expectedProperties.put(Sensor.TYPE_ACCELEROMETER, new Object[]{10000});
+ expectedProperties.put(Sensor.TYPE_GYROSCOPE, new Object[]{10000});
+ }
expectedProperties.put(Sensor.TYPE_MAGNETIC_FIELD, new Object[]{100000});
SensorManager sensorManager =
diff --git a/tests/tests/hardware/src/android/hardware/multiprocess/ErrorLoggingService.java b/tests/tests/hardware/src/android/hardware/multiprocess/ErrorLoggingService.java
new file mode 100644
index 0000000..1b713ba
--- /dev/null
+++ b/tests/tests/hardware/src/android/hardware/multiprocess/ErrorLoggingService.java
@@ -0,0 +1,611 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.multiprocess;
+
+import android.app.Service;
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.content.ServiceConnection;
+import android.os.AsyncTask;
+import android.os.Bundle;
+import android.os.ConditionVariable;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.IBinder;
+import android.os.Looper;
+import android.os.Message;
+import android.os.Messenger;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.os.RemoteException;
+import android.util.Log;
+import android.util.Pair;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.ListIterator;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.FutureTask;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+/**
+ * Service for collecting error messages from other processes.
+ *
+ * <p />
+ * Used by CTS for multi-process error logging.
+ */
+public class ErrorLoggingService extends Service {
+ public static final String TAG = "ErrorLoggingService";
+
+ /**
+ * Receive all currently logged error strings in replyTo Messenger.
+ */
+ public static final int MSG_GET_LOG = 0;
+
+ /**
+ * Append a new error string to the log maintained in this service.
+ */
+ public static final int MSG_LOG_EVENT = 1;
+
+ /**
+ * Logged errors being reported in a replyTo Messenger by this service.
+ */
+ public static final int MSG_LOG_REPORT = 2;
+
+ /**
+ * A list of strings containing all error messages reported to this service.
+ */
+ private final ArrayList<LogEvent> mLog = new ArrayList<>();
+
+ /**
+ * A list of Messengers waiting for logs for any event.
+ */
+ private final ArrayList<Pair<Integer, Messenger>> mEventWaiters = new ArrayList<>();
+
+ private static final int DO_EVENT_FILTER = 1;
+ private static final String LOG_EVENT = "log_event";
+ private static final String LOG_EVENT_ARRAY = "log_event_array";
+
+
+ /**
+ * The messenger binder used by clients of this service to report/retrieve errors.
+ */
+ private final Messenger mMessenger = new Messenger(new MainHandler(mLog, mEventWaiters));
+
+ @Override
+ public void onDestroy() {
+ super.onDestroy();
+ mLog.clear();
+ }
+
+ @Override
+ public IBinder onBind(Intent intent) {
+ return mMessenger.getBinder();
+ }
+
+ /**
+ * Handler implementing the message interface for this service.
+ */
+ private static class MainHandler extends Handler {
+
+ ArrayList<LogEvent> mErrorLog;
+ ArrayList<Pair<Integer, Messenger>> mEventWaiters;
+
+ MainHandler(ArrayList<LogEvent> log, ArrayList<Pair<Integer, Messenger>> waiters) {
+ mErrorLog = log;
+ mEventWaiters = waiters;
+ }
+
+ private void sendMessages() {
+ if (mErrorLog.size() > 0) {
+ ListIterator<Pair<Integer, Messenger>> iter = mEventWaiters.listIterator();
+ boolean messagesHandled = false;
+ while (iter.hasNext()) {
+ Pair<Integer, Messenger> elem = iter.next();
+ for (LogEvent i : mErrorLog) {
+ if (elem.first == null || elem.first == i.getEvent()) {
+ Message m = Message.obtain(null, MSG_LOG_REPORT);
+ Bundle b = m.getData();
+ b.putParcelableArray(LOG_EVENT_ARRAY,
+ mErrorLog.toArray(new LogEvent[mErrorLog.size()]));
+ m.setData(b);
+ try {
+ elem.second.send(m);
+ messagesHandled = true;
+ } catch (RemoteException e) {
+ Log.e(TAG, "Could not report log message to remote, " +
+ "received exception from remote: " + e +
+ "\n Original errors: " +
+ Arrays.toString(mErrorLog.toArray()));
+ }
+ iter.remove();
+ }
+ }
+ }
+ if (messagesHandled) {
+ mErrorLog.clear();
+ }
+ }
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ switch(msg.what) {
+ case MSG_GET_LOG:
+ if (msg.replyTo == null) {
+ break;
+ }
+
+ if (msg.arg1 == DO_EVENT_FILTER) {
+ mEventWaiters.add(new Pair<Integer, Messenger>(msg.arg2, msg.replyTo));
+ } else {
+ mEventWaiters.add(new Pair<Integer, Messenger>(null, msg.replyTo));
+ }
+
+ sendMessages();
+
+ break;
+ case MSG_LOG_EVENT:
+ Bundle b = msg.getData();
+ b.setClassLoader(LogEvent.class.getClassLoader());
+ LogEvent error = b.getParcelable(LOG_EVENT);
+ mErrorLog.add(error);
+
+ sendMessages();
+
+ break;
+ default:
+ Log.e(TAG, "Unknown message type: " + msg.what);
+ super.handleMessage(msg);
+ }
+ }
+ }
+
+ /**
+ * Parcelable object to use with logged events.
+ */
+ public static class LogEvent implements Parcelable {
+
+ private final int mEvent;
+ private final String mLogText;
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel out, int flags) {
+ out.writeInt(mEvent);
+ out.writeString(mLogText);
+ }
+
+ public int getEvent() {
+ return mEvent;
+ }
+
+ public String getLogText() {
+ return mLogText;
+ }
+
+ public static final Parcelable.Creator<LogEvent> CREATOR
+ = new Parcelable.Creator<LogEvent>() {
+
+ public LogEvent createFromParcel(Parcel in) {
+ return new LogEvent(in);
+ }
+
+ public LogEvent[] newArray(int size) {
+ return new LogEvent[size];
+ }
+ };
+
+ private LogEvent(Parcel in) {
+ mEvent = in.readInt();
+ mLogText = in.readString();
+ }
+
+ public LogEvent(int id, String msg) {
+ mEvent = id;
+ mLogText = msg;
+ }
+
+ @Override
+ public String toString() {
+ return "LogEvent{" +
+ "Event=" + mEvent +
+ ", LogText='" + mLogText + '\'' +
+ '}';
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ LogEvent logEvent = (LogEvent) o;
+
+ if (mEvent != logEvent.mEvent) return false;
+ if (mLogText != null ? !mLogText.equals(logEvent.mLogText) : logEvent.mLogText != null)
+ return false;
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = mEvent;
+ result = 31 * result + (mLogText != null ? mLogText.hashCode() : 0);
+ return result;
+ }
+ }
+
+ /**
+ * Implementation of Future to use when retrieving error messages from service.
+ *
+ * <p />
+ * To use this, either pass a {@link Runnable} or {@link Callable} in the constructor,
+ * or use the default constructor and set the result externally with {@link #setResult(Object)}.
+ */
+ private static class SettableFuture<T> extends FutureTask<T> {
+
+ public SettableFuture() {
+ super(new Callable<T>() {
+ @Override
+ public T call() throws Exception {
+ throw new IllegalStateException(
+ "Empty task, use #setResult instead of calling run.");
+ }
+ });
+ }
+
+ public SettableFuture(Callable<T> callable) {
+ super(callable);
+ }
+
+ public SettableFuture(Runnable runnable, T result) {
+ super(runnable, result);
+ }
+
+ public void setResult(T result) {
+ set(result);
+ }
+ }
+
+ /**
+ * Helper class for setting up and using a connection to {@link ErrorLoggingService}.
+ */
+ public static class ErrorServiceConnection implements AutoCloseable {
+
+ private Messenger mService = null;
+ private boolean mBind = false;
+ private final Object mLock = new Object();
+ private final Context mContext;
+ private final HandlerThread mReplyThread;
+ private ReplyHandler mReplyHandler;
+ private Messenger mReplyMessenger;
+
+ /**
+ * Construct a connection to the {@link ErrorLoggingService} in the given {@link Context}.
+ *
+ * @param context the {@link Context} to bind the service in.
+ */
+ public ErrorServiceConnection(final Context context) {
+ mContext = context;
+ mReplyThread = new HandlerThread("ErrorServiceConnection");
+ mReplyThread.start();
+ mReplyHandler = new ReplyHandler(mReplyThread.getLooper());
+ mReplyMessenger = new Messenger(mReplyHandler);
+ }
+
+ @Override
+ public void close() {
+ stop();
+ mReplyThread.quit();
+ synchronized (mLock) {
+ mService = null;
+ mBind = false;
+ mReplyHandler.cancelAll();
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ close();
+ super.finalize();
+ }
+
+ private static final class ReplyHandler extends Handler {
+
+ private final LinkedBlockingQueue<SettableFuture<List<LogEvent>>> mFuturesQueue =
+ new LinkedBlockingQueue<>();
+
+ private ReplyHandler(Looper looper) {
+ super(looper);
+ }
+
+ /**
+ * Cancel all pending futures for this handler.
+ */
+ public void cancelAll() {
+ List<SettableFuture<List<LogEvent>>> logFutures = new ArrayList<>();
+ mFuturesQueue.drainTo(logFutures);
+ for (SettableFuture<List<LogEvent>> i : logFutures) {
+ i.cancel(true);
+ }
+ }
+
+ /**
+ * Cancel a given future, and remove from the pending futures for this handler.
+ *
+ * @param report future to remove.
+ */
+ public void cancel(SettableFuture<List<LogEvent>> report) {
+ mFuturesQueue.remove(report);
+ report.cancel(true);
+ }
+
+ /**
+ * Add future for the next received report from this service.
+ *
+ * @param report a future to get the next received event report from.
+ */
+ public void addFuture(SettableFuture<List<LogEvent>> report) {
+ if (!mFuturesQueue.offer(report)) {
+ Log.e(TAG, "Could not request another error report, too many requests queued.");
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public void handleMessage(Message msg) {
+ switch (msg.what) {
+ case MSG_LOG_REPORT:
+ SettableFuture<List<LogEvent>> task = mFuturesQueue.poll();
+ if (task == null) break;
+ Bundle b = msg.getData();
+ b.setClassLoader(LogEvent.class.getClassLoader());
+ Parcelable[] array = b.getParcelableArray(LOG_EVENT_ARRAY);
+ LogEvent[] events = Arrays.copyOf(array, array.length, LogEvent[].class);
+ List<LogEvent> res = Arrays.asList(events);
+ task.setResult(res);
+ break;
+ default:
+ Log.e(TAG, "Unknown message type: " + msg.what);
+ super.handleMessage(msg);
+ }
+ }
+ }
+
+ private ServiceConnection mConnection = new ServiceConnection() {
+ @Override
+ public void onServiceConnected(ComponentName componentName, IBinder iBinder) {
+ Log.i(TAG, "Service connected.");
+ synchronized (mLock) {
+ mService = new Messenger(iBinder);
+ mBind = true;
+ mLock.notifyAll();
+ }
+ }
+
+ @Override
+ public void onServiceDisconnected(ComponentName componentName) {
+ Log.i(TAG, "Service disconnected.");
+ synchronized (mLock) {
+ mService = null;
+ mBind = false;
+ mReplyHandler.cancelAll();
+ }
+ }
+ };
+
+ private Messenger blockingGetBoundService() {
+ synchronized (mLock) {
+ if (!mBind) {
+ mContext.bindService(new Intent(mContext, ErrorLoggingService.class), mConnection,
+ Context.BIND_AUTO_CREATE);
+ mBind = true;
+ }
+ try {
+ while (mService == null && mBind) {
+ mLock.wait();
+ }
+ } catch (InterruptedException e) {
+ Log.e(TAG, "Waiting for error service interrupted: " + e);
+ }
+ if (!mBind) {
+ Log.w(TAG, "Could not get service, service disconnected.");
+ }
+ return mService;
+ }
+ }
+
+ private Messenger getBoundService() {
+ synchronized (mLock) {
+ if (!mBind) {
+ mContext.bindService(new Intent(mContext, ErrorLoggingService.class), mConnection,
+ Context.BIND_AUTO_CREATE);
+ mBind = true;
+ }
+ return mService;
+ }
+ }
+
+ /**
+ * If the {@link ErrorLoggingService} is not yet bound, begin service connection attempt.
+ *
+ * <p />
+ * Note: This will not block.
+ */
+ public void start() {
+ synchronized (mLock) {
+ if (!mBind) {
+ mContext.bindService(new Intent(mContext, ErrorLoggingService.class), mConnection,
+ Context.BIND_AUTO_CREATE);
+ mBind = true;
+ }
+ }
+ }
+
+ /**
+ * Unbind from the {@link ErrorLoggingService} if it has been bound.
+ *
+ * <p />
+ * Note: This will not block.
+ */
+ public void stop() {
+ synchronized (mLock) {
+ if (mBind) {
+ mContext.unbindService(mConnection);
+ mBind = false;
+ }
+ }
+ }
+
+ /**
+ * Send an logged event to the bound {@link ErrorLoggingService}.
+ *
+ * <p />
+ * If the service is not yet bound, this will bind the service and wait until it has been
+ * connected.
+ *
+ * <p />
+ * This is not safe to call from the UI thread, as this will deadlock with the looper used
+ * when connecting the service.
+ *
+ * @param id an int indicating the ID of this event.
+ * @param msg a {@link String} message to send.
+ */
+ public void log(final int id, final String msg) {
+ Messenger service = blockingGetBoundService();
+ Message m = Message.obtain(null, MSG_LOG_EVENT);
+ m.getData().putParcelable(LOG_EVENT, new LogEvent(id, msg));
+ try {
+ service.send(m);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Received exception while logging error: " + e);
+ }
+ }
+
+ /**
+ * Send an logged event to the bound {@link ErrorLoggingService} when it becomes available.
+ *
+ * <p />
+ * If the service is not yet bound, this will bind the service.
+ *
+ * @param id an int indicating the ID of this event.
+ * @param msg a {@link String} message to send.
+ */
+ public void logAsync(final int id, final String msg) {
+ AsyncTask.SERIAL_EXECUTOR.execute(new Runnable() {
+ @Override
+ public void run() {
+ log(id, msg);
+ }
+ });
+ }
+
+ /**
+ * Retrieve all events logged in the {@link ErrorLoggingService}.
+ *
+ * <p />
+ * If the service is not yet bound, this will bind the service and wait until it has been
+ * connected. Likewise, after the service has been bound, this method will block until
+ * the given timeout passes or an event is logged in the service. Passing a negative
+ * timeout is equivalent to using an infinite timeout value.
+ *
+ * <p />
+ * This is not safe to call from the UI thread, as this will deadlock with the looper used
+ * when connecting the service.
+ *
+ * <p />
+ * Note: This method clears the events stored in the bound {@link ErrorLoggingService}.
+ *
+ * @param timeoutMs the number of milliseconds to wait for a logging event.
+ * @return a list of {@link String} error messages reported to the bound
+ * {@link ErrorLoggingService} since the last call to getLog.
+ *
+ * @throws TimeoutException if the given timeout elapsed with no events logged.
+ */
+ public List<LogEvent> getLog(long timeoutMs) throws TimeoutException {
+ return retrieveLog(false, 0, timeoutMs);
+ }
+
+ /**
+ * Retrieve all events logged in the {@link ErrorLoggingService}.
+ *
+ * <p />
+ * If the service is not yet bound, this will bind the service and wait until it has been
+ * connected. Likewise, after the service has been bound, this method will block until
+ * the given timeout passes or an event with the given event ID is logged in the service.
+ * Passing a negative timeout is equivalent to using an infinite timeout value.
+ *
+ * <p />
+ * This is not safe to call from the UI thread, as this will deadlock with the looper used
+ * when connecting the service.
+ *
+ * <p />
+ * Note: This method clears the events stored in the bound {@link ErrorLoggingService}.
+ *
+ * @param timeoutMs the number of milliseconds to wait for a logging event.
+ * @param event the ID of the event to wait for.
+ * @return a list of {@link String} error messages reported to the bound
+ * {@link ErrorLoggingService} since the last call to getLog.
+ *
+ * @throws TimeoutException if the given timeout elapsed with no events of the given type
+ * logged.
+ */
+ public List<LogEvent> getLog(long timeoutMs, int event) throws TimeoutException {
+ return retrieveLog(true, event, timeoutMs);
+ }
+
+ private List<LogEvent> retrieveLog(boolean hasEvent, int event, long timeout)
+ throws TimeoutException {
+ Messenger service = blockingGetBoundService();
+
+ SettableFuture<List<LogEvent>> task = new SettableFuture<>();
+
+ Message m = (hasEvent) ?
+ Message.obtain(null, MSG_GET_LOG, DO_EVENT_FILTER, event, null) :
+ Message.obtain(null, MSG_GET_LOG);
+ m.replyTo = mReplyMessenger;
+
+ synchronized(this) {
+ mReplyHandler.addFuture(task);
+ try {
+ service.send(m);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Received exception while retrieving errors: " + e);
+ return null;
+ }
+ }
+
+ List<LogEvent> res = null;
+ try {
+ res = (timeout < 0) ? task.get() : task.get(timeout, TimeUnit.MILLISECONDS);
+ } catch (InterruptedException|ExecutionException e) {
+ Log.e(TAG, "Received exception while retrieving errors: " + e);
+ }
+ return res;
+ }
+ }
+}
diff --git a/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/Camera1Activity.java b/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/Camera1Activity.java
new file mode 100644
index 0000000..5c27111
--- /dev/null
+++ b/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/Camera1Activity.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.multiprocess.camera.cts;
+
+import android.app.Activity;
+import android.hardware.Camera;
+import android.hardware.multiprocess.ErrorLoggingService;
+import android.os.Bundle;
+import android.util.Log;
+
+/**
+ * Activity implementing basic access of the Camera1 API.
+ *
+ * <p />
+ * This will log all errors to {@link android.hardware.multiprocess.ErrorLoggingService}.
+ */
+public class Camera1Activity extends Activity {
+ private static final String TAG = "Camera1Activity";
+
+ Camera mCamera;
+ ErrorLoggingService.ErrorServiceConnection mErrorServiceConnection;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ Log.i(TAG, "onCreate called.");
+ super.onCreate(savedInstanceState);
+ mErrorServiceConnection = new ErrorLoggingService.ErrorServiceConnection(this);
+ mErrorServiceConnection.start();
+ }
+
+ @Override
+ protected void onResume() {
+ Log.i(TAG, "onResume called.");
+ super.onResume();
+ try {
+ mCamera = Camera.open();
+ if (mCamera == null) {
+ mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_ERROR, TAG +
+ " no cameras available.");
+ }
+ mCamera.setErrorCallback(new Camera.ErrorCallback() {
+ @Override
+ public void onError(int i, Camera camera) {
+ if (i == Camera.CAMERA_ERROR_EVICTED) {
+ mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_EVICTED,
+ TAG + " camera evicted");
+ Log.e(TAG, "onError called with event " + i + ", camera evicted");
+ } else {
+ mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_ERROR,
+ TAG + " camera experienced error: " + i);
+ Log.e(TAG, "onError called with event " + i + ", camera error");
+ }
+ }
+ });
+ mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_CONNECT,
+ TAG + " camera connected");
+ } catch (RuntimeException e) {
+ mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_ERROR, TAG +
+ " camera exception during connection: " + e);
+ Log.e(TAG, "Runtime error: " + e);
+ }
+ }
+
+ @Override
+ protected void onPause() {
+ Log.i(TAG, "onPause called.");
+ super.onPause();
+ }
+
+ @Override
+ protected void onDestroy() {
+ Log.i(TAG, "onDestroy called.");
+ super.onDestroy();
+ if (mErrorServiceConnection != null) {
+ mErrorServiceConnection.stop();
+ mErrorServiceConnection = null;
+ }
+ }
+}
\ No newline at end of file
diff --git a/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/Camera2Activity.java b/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/Camera2Activity.java
new file mode 100644
index 0000000..2a78649
--- /dev/null
+++ b/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/Camera2Activity.java
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.multiprocess.camera.cts;
+
+import android.app.Activity;
+import android.content.Context;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.multiprocess.ErrorLoggingService;
+import android.os.Bundle;
+import android.os.Handler;
+import android.util.Log;
+
+/**
+ * Activity implementing basic access of the Camera2 API.
+ *
+ * <p />
+ * This will log all errors to {@link android.hardware.multiprocess.ErrorLoggingService}.
+ */
+public class Camera2Activity extends Activity {
+ private static final String TAG = "Camera2Activity";
+
+ ErrorLoggingService.ErrorServiceConnection mErrorServiceConnection;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ Log.i(TAG, "onCreate called.");
+ super.onCreate(savedInstanceState);
+ mErrorServiceConnection = new ErrorLoggingService.ErrorServiceConnection(this);
+ mErrorServiceConnection.start();
+ }
+
+ @Override
+ protected void onPause() {
+ Log.i(TAG, "onPause called.");
+ super.onPause();
+ }
+
+ @Override
+ protected void onResume() {
+ Log.i(TAG, "onResume called.");
+ super.onResume();
+
+ try {
+ CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
+
+ if (manager == null) {
+ mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_ERROR, TAG +
+ " could not connect camera service");
+ return;
+ }
+ String[] cameraIds = manager.getCameraIdList();
+
+ if (cameraIds == null || cameraIds.length == 0) {
+ mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_ERROR, TAG +
+ " device reported having no cameras");
+ return;
+ }
+
+ manager.registerAvailabilityCallback(new CameraManager.AvailabilityCallback() {
+ @Override
+ public void onCameraAvailable(String cameraId) {
+ super.onCameraAvailable(cameraId);
+ mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_AVAILABLE,
+ cameraId);
+ Log.i(TAG, "Camera " + cameraId + " is available");
+ }
+
+ @Override
+ public void onCameraUnavailable(String cameraId) {
+ super.onCameraUnavailable(cameraId);
+ mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_UNAVAILABLE,
+ cameraId);
+ Log.i(TAG, "Camera " + cameraId + " is unavailable");
+ }
+ }, null);
+
+ final String chosen = cameraIds[0];
+
+ manager.openCamera(chosen, new CameraDevice.StateCallback() {
+ @Override
+ public void onOpened(CameraDevice cameraDevice) {
+ mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_CONNECT,
+ chosen);
+ Log.i(TAG, "Camera " + chosen + " is opened");
+ }
+
+ @Override
+ public void onDisconnected(CameraDevice cameraDevice) {
+ mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_EVICTED,
+ chosen);
+ Log.i(TAG, "Camera " + chosen + " is disconnected");
+ }
+
+ @Override
+ public void onError(CameraDevice cameraDevice, int i) {
+ mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_ERROR, TAG +
+ " Camera " + chosen + " experienced error " + i);
+ Log.e(TAG, "Camera " + chosen + " onError called with error " + i);
+ }
+ }, null);
+ } catch (CameraAccessException e) {
+ mErrorServiceConnection.logAsync(TestConstants.EVENT_CAMERA_ERROR, TAG +
+ " camera exception during connection: " + e);
+ Log.e(TAG, "Access exception: " + e);
+ }
+ }
+
+ @Override
+ protected void onDestroy() {
+ Log.i(TAG, "onDestroy called.");
+ super.onDestroy();
+ if (mErrorServiceConnection != null) {
+ mErrorServiceConnection.stop();
+ mErrorServiceConnection = null;
+ }
+ }
+}
diff --git a/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/CameraEvictionTest.java b/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/CameraEvictionTest.java
new file mode 100644
index 0000000..3cf1dc7
--- /dev/null
+++ b/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/CameraEvictionTest.java
@@ -0,0 +1,494 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.multiprocess.camera.cts;
+
+import android.app.ActivityManager;
+import android.content.Context;
+import android.content.Intent;
+import android.hardware.Camera;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.cts.CameraCtsActivity;
+import android.hardware.multiprocess.ErrorLoggingService;
+import android.os.Handler;
+import android.test.ActivityInstrumentationTestCase2;
+import android.util.Log;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+import java.util.concurrent.TimeoutException;
+
+import static org.mockito.Mockito.*;
+
+/**
+ * Tests for multi-process camera usage behavior.
+ */
+public class CameraEvictionTest extends ActivityInstrumentationTestCase2<CameraCtsActivity> {
+
+ public static final String TAG = "CameraEvictionTest";
+
+ private static final int OPEN_TIMEOUT = 2000; // Timeout for camera to open (ms).
+ private static final int SETUP_TIMEOUT = 5000; // Remote camera setup timeout (ms).
+ private static final int EVICTION_TIMEOUT = 1000; // Remote camera eviction timeout (ms).
+ private static final int WAIT_TIME = 2000; // Time to wait for process to launch (ms).
+ private static final int UI_TIMEOUT = 10000; // Time to wait for UI event before timeout (ms).
+ ErrorLoggingService.ErrorServiceConnection mErrorServiceConnection;
+
+ private ActivityManager mActivityManager;
+ private Context mContext;
+ private Camera mCamera;
+ private CameraDevice mCameraDevice;
+ private final Object mLock = new Object();
+ private boolean mCompleted = false;
+ private int mProcessPid = -1;
+
+ public CameraEvictionTest() {
+ super(CameraCtsActivity.class);
+ }
+
+ public static class StateCallbackImpl extends CameraDevice.StateCallback {
+ CameraDevice mCameraDevice;
+
+ public StateCallbackImpl() {
+ super();
+ }
+
+ @Override
+ public void onOpened(CameraDevice cameraDevice) {
+ synchronized(this) {
+ mCameraDevice = cameraDevice;
+ }
+ Log.i(TAG, "CameraDevice onOpened called for main CTS test process.");
+ }
+
+ @Override
+ public void onClosed(CameraDevice camera) {
+ super.onClosed(camera);
+ synchronized(this) {
+ mCameraDevice = null;
+ }
+ Log.i(TAG, "CameraDevice onClosed called for main CTS test process.");
+ }
+
+ @Override
+ public void onDisconnected(CameraDevice cameraDevice) {
+ synchronized(this) {
+ mCameraDevice = null;
+ }
+ Log.i(TAG, "CameraDevice onDisconnected called for main CTS test process.");
+
+ }
+
+ @Override
+ public void onError(CameraDevice cameraDevice, int i) {
+ Log.i(TAG, "CameraDevice onError called for main CTS test process with error " +
+ "code: " + i);
+ }
+
+ public synchronized CameraDevice getCameraDevice() {
+ return mCameraDevice;
+ }
+ }
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+
+ mCompleted = false;
+ mContext = getActivity();
+ System.setProperty("dexmaker.dexcache", mContext.getCacheDir().toString());
+ mActivityManager = (ActivityManager) mContext.getSystemService(Context.ACTIVITY_SERVICE);
+ mErrorServiceConnection = new ErrorLoggingService.ErrorServiceConnection(mContext);
+ mErrorServiceConnection.start();
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ if (mProcessPid != -1) {
+ android.os.Process.killProcess(mProcessPid);
+ mProcessPid = -1;
+ }
+ if (mErrorServiceConnection != null) {
+ mErrorServiceConnection.stop();
+ mErrorServiceConnection = null;
+ }
+ if (mCamera != null) {
+ mCamera.release();
+ mCamera = null;
+ }
+ if (mCameraDevice != null) {
+ mCameraDevice.close();
+ mCameraDevice = null;
+ }
+ mContext = null;
+ mActivityManager = null;
+ }
+
+ /**
+ * Test basic eviction scenarios for the Camera1 API.
+ */
+ public void testCamera1ActivityEviction() throws Throwable {
+
+ // Open a camera1 client in the main CTS process's activity
+ final Camera.ErrorCallback mockErrorCb1 = mock(Camera.ErrorCallback.class);
+ final boolean[] skip = {false};
+ runTestOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ // Open camera
+ mCamera = Camera.open();
+ if (mCamera == null) {
+ skip[0] = true;
+ } else {
+ mCamera.setErrorCallback(mockErrorCb1);
+ }
+ notifyFromUI();
+ }
+ });
+ waitForUI();
+
+ if (skip[0]) {
+ Log.i(TAG, "Skipping testCamera1ActivityEviction, device has no cameras.");
+ return;
+ }
+
+ verifyZeroInteractions(mockErrorCb1);
+
+ startRemoteProcess(Camera1Activity.class, "camera1ActivityProcess");
+
+ // Make sure camera was setup correctly in remote activity
+ List<ErrorLoggingService.LogEvent> events = null;
+ try {
+ events = mErrorServiceConnection.getLog(SETUP_TIMEOUT,
+ TestConstants.EVENT_CAMERA_CONNECT);
+ } finally {
+ if (events != null) assertOnly(TestConstants.EVENT_CAMERA_CONNECT, events);
+ }
+
+ Thread.sleep(WAIT_TIME);
+
+ // Ensure UI thread has a chance to process callbacks.
+ runTestOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ Log.i("CTS", "Did something on UI thread.");
+ notifyFromUI();
+ }
+ });
+ waitForUI();
+
+ // Make sure we received correct callback in error listener, and nothing else
+ verify(mockErrorCb1, only()).onError(eq(Camera.CAMERA_ERROR_EVICTED), isA(Camera.class));
+ mCamera = null;
+
+ // Try to open the camera again (even though other TOP process holds the camera).
+ final boolean[] pass = {false};
+ runTestOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ // Open camera
+ try {
+ mCamera = Camera.open();
+ } catch (RuntimeException e) {
+ pass[0] = true;
+ }
+ notifyFromUI();
+ }
+ });
+ waitForUI();
+
+ assertTrue("Did not receive exception when opening camera while camera is held by a" +
+ " higher priority client process.", pass[0]);
+
+ // Verify that attempting to open the camera didn't cause anything weird to happen in the
+ // other process.
+ List<ErrorLoggingService.LogEvent> eventList2 = null;
+ boolean timeoutExceptionHit = false;
+ try {
+ eventList2 = mErrorServiceConnection.getLog(EVICTION_TIMEOUT);
+ } catch (TimeoutException e) {
+ timeoutExceptionHit = true;
+ }
+
+ assertNone("Remote camera service received invalid events: ", eventList2);
+ assertTrue("Remote camera service exited early", timeoutExceptionHit);
+ android.os.Process.killProcess(mProcessPid);
+ mProcessPid = -1;
+ }
+
+ /**
+ * Test basic eviction scenarios for the Camera2 API.
+ */
+ public void testBasicCamera2ActivityEviction() throws Throwable {
+ CameraManager manager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
+ assertNotNull(manager);
+ String[] cameraIds = manager.getCameraIdList();
+ assertNotEmpty(cameraIds);
+ assertTrue(mContext.getMainLooper() != null);
+
+ // Setup camera manager
+ String chosenCamera = cameraIds[0];
+ Handler cameraHandler = new Handler(mContext.getMainLooper());
+ final CameraManager.AvailabilityCallback mockAvailCb =
+ mock(CameraManager.AvailabilityCallback.class);
+
+ manager.registerAvailabilityCallback(mockAvailCb, cameraHandler);
+
+ Thread.sleep(WAIT_TIME);
+
+ verify(mockAvailCb, times(1)).onCameraAvailable(chosenCamera);
+ verify(mockAvailCb, never()).onCameraUnavailable(chosenCamera);
+
+ // Setup camera device
+ final CameraDevice.StateCallback spyStateCb = spy(new StateCallbackImpl());
+ manager.openCamera(chosenCamera, spyStateCb, cameraHandler);
+
+ verify(spyStateCb, timeout(OPEN_TIMEOUT).times(1)).onOpened(any(CameraDevice.class));
+ verify(spyStateCb, never()).onClosed(any(CameraDevice.class));
+ verify(spyStateCb, never()).onDisconnected(any(CameraDevice.class));
+ verify(spyStateCb, never()).onError(any(CameraDevice.class), anyInt());
+
+ // Open camera from remote process
+ startRemoteProcess(Camera2Activity.class, "camera2ActivityProcess");
+
+ // Verify that the remote camera was opened correctly
+ List<ErrorLoggingService.LogEvent> allEvents = mErrorServiceConnection.getLog(SETUP_TIMEOUT,
+ TestConstants.EVENT_CAMERA_CONNECT);
+ assertNotNull("Camera device not setup in remote process!", allEvents);
+
+ // Filter out relevant events for other camera devices
+ ArrayList<ErrorLoggingService.LogEvent> events = new ArrayList<>();
+ for (ErrorLoggingService.LogEvent e : allEvents) {
+ int eventTag = e.getEvent();
+ if (eventTag == TestConstants.EVENT_CAMERA_UNAVAILABLE ||
+ eventTag == TestConstants.EVENT_CAMERA_CONNECT ||
+ eventTag == TestConstants.EVENT_CAMERA_AVAILABLE) {
+ if (!Objects.equals(e.getLogText(), chosenCamera)) {
+ continue;
+ }
+ }
+ events.add(e);
+ }
+ int[] eventList = new int[events.size()];
+ int eventIdx = 0;
+ for (ErrorLoggingService.LogEvent e : events) {
+ eventList[eventIdx++] = e.getEvent();
+ }
+ String[] actualEvents = TestConstants.convertToStringArray(eventList);
+ String[] expectedEvents = new String[] {TestConstants.EVENT_CAMERA_UNAVAILABLE_STR,
+ TestConstants.EVENT_CAMERA_CONNECT_STR};
+ String[] ignoredEvents = new String[] { TestConstants.EVENT_CAMERA_AVAILABLE_STR,
+ TestConstants.EVENT_CAMERA_UNAVAILABLE_STR };
+ assertOrderedEvents(actualEvents, expectedEvents, ignoredEvents);
+
+ // Verify that the local camera was evicted properly
+ verify(spyStateCb, times(1)).onDisconnected(any(CameraDevice.class));
+ verify(spyStateCb, never()).onClosed(any(CameraDevice.class));
+ verify(spyStateCb, never()).onError(any(CameraDevice.class), anyInt());
+ verify(spyStateCb, times(1)).onOpened(any(CameraDevice.class));
+
+ // Verify that we can no longer open the camera, as it is held by a higher priority process
+ boolean openException = false;
+ try {
+ manager.openCamera(chosenCamera, spyStateCb, cameraHandler);
+ } catch(CameraAccessException e) {
+ assertTrue("Received incorrect camera exception when opening camera: " + e,
+ e.getReason() == CameraAccessException.CAMERA_IN_USE);
+ openException = true;
+ }
+
+ assertTrue("Didn't receive exception when trying to open camera held by higher priority " +
+ "process.", openException);
+
+ // Verify that attempting to open the camera didn't cause anything weird to happen in the
+ // other process.
+ List<ErrorLoggingService.LogEvent> eventList2 = null;
+ boolean timeoutExceptionHit = false;
+ try {
+ eventList2 = mErrorServiceConnection.getLog(EVICTION_TIMEOUT);
+ } catch (TimeoutException e) {
+ timeoutExceptionHit = true;
+ }
+
+ assertNone("Remote camera service received invalid events: ", eventList2);
+ assertTrue("Remote camera service exited early", timeoutExceptionHit);
+ android.os.Process.killProcess(mProcessPid);
+ mProcessPid = -1;
+ }
+
+ /**
+ * Block until UI thread calls {@link #notifyFromUI()}.
+ * @throws InterruptedException
+ */
+ private void waitForUI() throws InterruptedException {
+ synchronized(mLock) {
+ if (mCompleted) return;
+ while (!mCompleted) {
+ mLock.wait();
+ }
+ mCompleted = false;
+ }
+ }
+
+ /**
+ * Wake up any threads waiting in calls to {@link #waitForUI()}.
+ */
+ private void notifyFromUI() {
+ synchronized (mLock) {
+ mCompleted = true;
+ mLock.notifyAll();
+ }
+ }
+
+ /**
+ * Return the PID for the process with the given name in the given list of process info.
+ *
+ * @param processName the name of the process who's PID to return.
+ * @param list a list of {@link ActivityManager.RunningAppProcessInfo} to check.
+ * @return the PID of the given process, or -1 if it was not included in the list.
+ */
+ private static int getPid(String processName,
+ List<ActivityManager.RunningAppProcessInfo> list) {
+ for (ActivityManager.RunningAppProcessInfo rai : list) {
+ if (processName.equals(rai.processName))
+ return rai.pid;
+ }
+ return -1;
+ }
+
+ /**
+ * Start an activity of the given class running in a remote process with the given name.
+ *
+ * @param klass the class of the {@link android.app.Activity} to start.
+ * @param processName the remote activity name.
+ * @throws InterruptedException
+ */
+ public void startRemoteProcess(java.lang.Class<?> klass, String processName)
+ throws InterruptedException {
+ // Ensure no running activity process with same name
+ String cameraActivityName = mContext.getPackageName() + ":" + processName;
+ List<ActivityManager.RunningAppProcessInfo> list =
+ mActivityManager.getRunningAppProcesses();
+ assertEquals(-1, getPid(cameraActivityName, list));
+
+ // Start activity in a new top foreground process
+ Intent activityIntent = new Intent(mContext, klass);
+ activityIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
+ mContext.startActivity(activityIntent);
+ Thread.sleep(WAIT_TIME);
+
+ // Fail if activity isn't running
+ list = mActivityManager.getRunningAppProcesses();
+ mProcessPid = getPid(cameraActivityName, list);
+ assertTrue(-1 != mProcessPid);
+ }
+
+ /**
+ * Assert that there is only one event of the given type in the event list.
+ *
+ * @param event event type to check for.
+ * @param events {@link List} of events.
+ */
+ public static void assertOnly(int event, List<ErrorLoggingService.LogEvent> events) {
+ assertTrue("Remote camera activity never received event: " + event, events != null);
+ for (ErrorLoggingService.LogEvent e : events) {
+ assertFalse("Remote camera activity received invalid event (" + e +
+ ") while waiting for event: " + event,
+ e.getEvent() < 0 || e.getEvent() != event);
+ }
+ assertTrue("Remote camera activity never received event: " + event, events.size() >= 1);
+ assertTrue("Remote camera activity received too many " + event + " events, received: " +
+ events.size(), events.size() == 1);
+ }
+
+ /**
+ * Assert there were no logEvents in the given list.
+ *
+ * @param msg message to show on assertion failure.
+ * @param events {@link List} of events.
+ */
+ public static void assertNone(String msg, List<ErrorLoggingService.LogEvent> events) {
+ if (events == null) return;
+ StringBuilder builder = new StringBuilder(msg + "\n");
+ for (ErrorLoggingService.LogEvent e : events) {
+ builder.append(e).append("\n");
+ }
+ assertTrue(builder.toString(), events.isEmpty());
+ }
+
+ /**
+ * Assert array is null or empty.
+ *
+ * @param array array to check.
+ */
+ public static <T> void assertNotEmpty(T[] array) {
+ assertNotNull(array);
+ assertFalse("Array is empty: " + Arrays.toString(array), array.length == 0);
+ }
+
+ /**
+ * Given an 'actual' array of objects, check that the objects given in the 'expected'
+ * array are also present in the 'actual' array in the same order. Objects in the 'actual'
+ * array that are not in the 'expected' array are skipped and ignored if they are given
+ * in the 'ignored' array, otherwise this assertion will fail.
+ *
+ * @param actual the ordered array of objects to check.
+ * @param expected the ordered array of expected objects.
+ * @param ignored the array of objects that will be ignored if present in actual,
+ * but not in expected (or are out of order).
+ * @param <T>
+ */
+ public static <T> void assertOrderedEvents(T[] actual, T[] expected, T[] ignored) {
+ assertNotNull(actual);
+ assertNotNull(expected);
+ assertNotNull(ignored);
+
+ int expIndex = 0;
+ int index = 0;
+ for (T i : actual) {
+ // If explicitly expected, move to next
+ if (expIndex < expected.length && Objects.equals(i, expected[expIndex])) {
+ expIndex++;
+ continue;
+ }
+
+ // Fail if not ignored
+ boolean canIgnore = false;
+ for (T j : ignored) {
+ if (Objects.equals(i, j)) {
+ canIgnore = true;
+ break;
+ }
+
+ }
+
+ // Fail if not ignored.
+ assertTrue("Event at index " + index + " in actual array " +
+ Arrays.toString(actual) + " was unexpected: expected array was " +
+ Arrays.toString(expected) + ", ignored array was: " +
+ Arrays.toString(ignored), canIgnore);
+ index++;
+ }
+ assertTrue("Only had " + expIndex + " of " + expected.length +
+ " expected objects in array " + Arrays.toString(actual) + ", expected was " +
+ Arrays.toString(expected), expIndex == expected.length);
+ }
+}
diff --git a/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/TestConstants.java b/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/TestConstants.java
new file mode 100644
index 0000000..2805e02
--- /dev/null
+++ b/tests/tests/hardware/src/android/hardware/multiprocess/camera/cts/TestConstants.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.multiprocess.camera.cts;
+
+/**
+ * Constants used throughout the multi-process unit tests.
+ */
+public class TestConstants {
+
+ public static final int EVENT_CAMERA_ERROR = -1;
+ public static final int EVENT_CAMERA_CONNECT = 1;
+ public static final int EVENT_CAMERA_EVICTED = 2;
+ public static final int EVENT_CAMERA_AVAILABLE = 3;
+ public static final int EVENT_CAMERA_UNAVAILABLE = 4;
+
+ public static final String EVENT_CAMERA_ERROR_STR = "error";
+ public static final String EVENT_CAMERA_CONNECT_STR = "connect";
+ public static final String EVENT_CAMERA_EVICTED_STR = "evicted";
+ public static final String EVENT_CAMERA_AVAILABLE_STR = "available";
+ public static final String EVENT_CAMERA_UNAVAILABLE_STR = "unavailable";
+
+ public static final String EVENT_CAMERA_UNKNOWN_STR = "unknown";
+
+ /**
+ * Convert the given error code to a string.
+ *
+ * @param err error code from {@link TestConstants}.
+ * @return string for this error code.
+ */
+ public static String errToStr(int err) {
+ switch(err) {
+ case EVENT_CAMERA_ERROR:
+ return EVENT_CAMERA_ERROR_STR;
+ case EVENT_CAMERA_CONNECT:
+ return EVENT_CAMERA_CONNECT_STR;
+ case EVENT_CAMERA_EVICTED:
+ return EVENT_CAMERA_EVICTED_STR;
+ case EVENT_CAMERA_AVAILABLE:
+ return EVENT_CAMERA_AVAILABLE_STR;
+ case EVENT_CAMERA_UNAVAILABLE:
+ return EVENT_CAMERA_UNAVAILABLE_STR;
+ default:
+ return EVENT_CAMERA_UNKNOWN_STR + " " + err;
+ }
+ }
+
+ /**
+ * Convert the given array of error codes to an array of strings.
+ *
+ * @param err array of error codes from {@link TestConstants}.
+ * @return string array for the given error codes.
+ */
+ public static String[] convertToStringArray(int[] err) {
+ if (err == null) return null;
+ String[] ret = new String[err.length];
+ for (int i = 0; i < err.length; i++) {
+ ret[i] = errToStr(err[i]);
+ }
+ return ret;
+ }
+
+}
diff --git a/tests/tests/keystore/src/android/keystore/cts/KeyPairGeneratorSpecTest.java b/tests/tests/keystore/src/android/keystore/cts/KeyPairGeneratorSpecTest.java
index a923844..33c8955 100644
--- a/tests/tests/keystore/src/android/keystore/cts/KeyPairGeneratorSpecTest.java
+++ b/tests/tests/keystore/src/android/keystore/cts/KeyPairGeneratorSpecTest.java
@@ -25,11 +25,6 @@
import javax.security.auth.x500.X500Principal;
public class KeyPairGeneratorSpecTest extends AndroidTestCase {
- private static final X500Principal DEFAULT_CERT_SUBJECT = new X500Principal("CN=fake");
- private static final BigInteger DEFAULT_CERT_SERIAL_NUMBER = new BigInteger("1");
- private static final Date DEFAULT_CERT_NOT_BEFORE = new Date(0L); // Jan 1 1980
- private static final Date DEFAULT_CERT_NOT_AFTER = new Date(2461449600000L); // Jan 1 2048
-
private static final String TEST_ALIAS_1 = "test1";
private static final X500Principal TEST_DN_1 = new X500Principal("CN=test1");
@@ -110,44 +105,56 @@
}
}
- public void testBuilder_MissingSubjectDN_Success() throws Exception {
- KeyPairGeneratorSpec spec = new KeyPairGeneratorSpec.Builder(getContext())
- .setAlias(TEST_ALIAS_1)
- .setSerialNumber(SERIAL_1)
- .setStartDate(NOW)
- .setEndDate(NOW_PLUS_10_YEARS)
- .build();
- assertEquals(DEFAULT_CERT_SUBJECT, spec.getSubjectDN());
+ public void testBuilder_MissingSubjectDN_Failure() throws Exception {
+ try {
+ new KeyPairGeneratorSpec.Builder(getContext())
+ .setAlias(TEST_ALIAS_1)
+ .setSerialNumber(SERIAL_1)
+ .setStartDate(NOW)
+ .setEndDate(NOW_PLUS_10_YEARS)
+ .build();
+ fail("Should throw IllegalArgumentException when subject is missing");
+ } catch (IllegalArgumentException expected) {
+ }
}
- public void testBuilder_MissingSerialNumber_Success() throws Exception {
- KeyPairGeneratorSpec spec = new KeyPairGeneratorSpec.Builder(getContext())
- .setAlias(TEST_ALIAS_1)
- .setSubject(TEST_DN_1)
- .setStartDate(NOW)
- .setEndDate(NOW_PLUS_10_YEARS)
- .build();
- assertEquals(DEFAULT_CERT_SERIAL_NUMBER, spec.getSerialNumber());
+ public void testBuilder_MissingSerialNumber_Failure() throws Exception {
+ try {
+ new KeyPairGeneratorSpec.Builder(getContext())
+ .setAlias(TEST_ALIAS_1)
+ .setSubject(TEST_DN_1)
+ .setStartDate(NOW)
+ .setEndDate(NOW_PLUS_10_YEARS)
+ .build();
+ fail("Should throw IllegalArgumentException when serialNumber is missing");
+ } catch (IllegalArgumentException expected) {
+ }
}
- public void testBuilder_MissingStartDate_Success() throws Exception {
- KeyPairGeneratorSpec spec = new KeyPairGeneratorSpec.Builder(getContext())
- .setAlias(TEST_ALIAS_1)
- .setSubject(TEST_DN_1)
- .setSerialNumber(SERIAL_1)
- .setEndDate(NOW_PLUS_10_YEARS)
- .build();
- assertEquals(DEFAULT_CERT_NOT_BEFORE, spec.getStartDate());
+ public void testBuilder_MissingStartDate_Failure() throws Exception {
+ try {
+ new KeyPairGeneratorSpec.Builder(getContext())
+ .setAlias(TEST_ALIAS_1)
+ .setSubject(TEST_DN_1)
+ .setSerialNumber(SERIAL_1)
+ .setEndDate(NOW_PLUS_10_YEARS)
+ .build();
+ fail("Should throw IllegalArgumentException when startDate is missing");
+ } catch (IllegalArgumentException expected) {
+ }
}
- public void testBuilder_MissingEndDate_Success() throws Exception {
- KeyPairGeneratorSpec spec = new KeyPairGeneratorSpec.Builder(getContext())
- .setAlias(TEST_ALIAS_1)
- .setSubject(TEST_DN_1)
- .setSerialNumber(SERIAL_1)
- .setStartDate(NOW)
- .build();
- assertEquals(DEFAULT_CERT_NOT_AFTER, spec.getEndDate());
+ public void testBuilder_MissingEndDate_Failure() throws Exception {
+ try {
+ new KeyPairGeneratorSpec.Builder(getContext())
+ .setAlias(TEST_ALIAS_1)
+ .setSubject(TEST_DN_1)
+ .setSerialNumber(SERIAL_1)
+ .setStartDate(NOW)
+ .build();
+ fail("Should throw IllegalArgumentException when endDate is missing");
+ } catch (IllegalArgumentException expected) {
+ }
}
public void testBuilder_EndBeforeStart_Failure() throws Exception {
diff --git a/tests/webgl/Android.mk b/tests/tests/libcorelegacy22/Android.mk
old mode 100755
new mode 100644
similarity index 69%
rename from tests/webgl/Android.mk
rename to tests/tests/libcorelegacy22/Android.mk
index ce22dd8..fb3c503
--- a/tests/webgl/Android.mk
+++ b/tests/tests/libcorelegacy22/Android.mk
@@ -1,4 +1,4 @@
-# Copyright (C) 2014 The Android Open Source Project
+# Copyright (C) 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,19 +16,17 @@
include $(CLEAR_VARS)
-# Don't include this package in any target.
+# don't include this package in any target
LOCAL_MODULE_TAGS := optional
-
-# When built, explicitly put it in the data partition.
+# and when built explicitly put it in the data partition
LOCAL_MODULE_PATH := $(TARGET_OUT_DATA_APPS)
-LOCAL_STATIC_JAVA_LIBRARIES := ctsdeviceutil ctstestrunner
+LOCAL_STATIC_JAVA_LIBRARIES := ctstestrunner
LOCAL_SRC_FILES := $(call all-java-files-under, src)
-# Must match the package name in CtsTestCaseList.mk
-LOCAL_PACKAGE_NAME := CtsWebGLTestCases
+LOCAL_PACKAGE_NAME := CtsLibcoreLegacy22TestCases
-LOCAL_SDK_VERSION := current
+LOCAL_SDK_VERSION := 22
include $(BUILD_CTS_PACKAGE)
diff --git a/tests/tests/libcorelegacy22/AndroidManifest.xml b/tests/tests/libcorelegacy22/AndroidManifest.xml
new file mode 100644
index 0000000..4ff9ec2
--- /dev/null
+++ b/tests/tests/libcorelegacy22/AndroidManifest.xml
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ -->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="com.android.cts.libcorelegacy22">
+
+ <uses-permission android:name="android.permission.DISABLE_KEYGUARD" />
+ <uses-permission android:name="android.permission.READ_LOGS" />
+ <application>
+ <uses-library android:name="android.test.runner" />
+ </application>
+
+ <instrumentation android:name="android.support.test.runner.AndroidJUnitRunner"
+ android:targetPackage="com.android.cts.libcorelegacy22"
+ android:label="CTS tests of android APIs last available in API 22">
+ <meta-data android:name="listener"
+ android:value="com.android.cts.runner.CtsTestRunListener" />
+ </instrumentation>
+
+</manifest>
+
diff --git a/tests/tests/util/src/android/util/cts/FloatMathTest.java b/tests/tests/libcorelegacy22/src/android/util/cts/FloatMathTest.java
similarity index 62%
rename from tests/tests/util/src/android/util/cts/FloatMathTest.java
rename to tests/tests/libcorelegacy22/src/android/util/cts/FloatMathTest.java
index 4d0b572..6b775fc 100644
--- a/tests/tests/util/src/android/util/cts/FloatMathTest.java
+++ b/tests/tests/libcorelegacy22/src/android/util/cts/FloatMathTest.java
@@ -19,27 +19,44 @@
import android.util.FloatMath;
public class FloatMathTest extends TestCase {
- public void testFloatMathMethods() {
- // ceil
- assertEquals(8.0f, FloatMath.ceil(7.2f));
- assertEquals(-6.0f, FloatMath.ceil(-6.3f));
- // floor
+ public void testSqrt() {
+ assertEquals(5.0f, FloatMath.sqrt(25));
+ assertEquals(7, FloatMath.sqrt(49), 0);
+ assertEquals(10, FloatMath.sqrt(100), 0);
+ assertEquals(0, FloatMath.sqrt(0), 0);
+ assertEquals(1, FloatMath.sqrt(1), 0);
+ }
+
+ public void testFloor() {
+ assertEquals(78, FloatMath.floor(78.89f), 0);
+ assertEquals(-79, FloatMath.floor(-78.89f), 0);
assertEquals(7.0f, FloatMath.floor(7.2f));
assertEquals(-7.0f, FloatMath.floor(-6.3f));
+ }
- // sin
+ public void testCeil() {
+ assertEquals(79, FloatMath.ceil(78.89f), 0);
+ assertEquals(-78, FloatMath.ceil(-78.89f), 0);
+ assertEquals(8.0f, FloatMath.ceil(7.2f));
+ assertEquals(-6.0f, FloatMath.ceil(-6.3f));
+ }
+
+ public void testCos() {
+ assertEquals(1.0f, FloatMath.cos(0), 0);
+ assertEquals(0.5403023058681398f, FloatMath.cos(1), 0);
+ assertEquals(0.964966f, FloatMath.cos(50));
+ assertEquals(0.69925081f, FloatMath.cos(150));
+ assertEquals(0.964966f, FloatMath.cos(-50));
+ }
+
+ public void testSin() {
+ assertEquals(0.0, FloatMath.sin(0), 0);
+ assertEquals(0.8414709848078965f, FloatMath.sin(1), 0);
assertEquals(-0.26237485f, FloatMath.sin(50));
assertEquals(-0.71487643f, FloatMath.sin(150));
assertEquals(0.26237485f, FloatMath.sin(-50));
- // cos
- assertEquals(0.964966f, FloatMath.cos(50));
- assertEquals(0.69925081f, FloatMath.cos(150));
- assertEquals(0.964966f, FloatMath.cos(-50));
-
- // sqrt
- assertEquals(5.0f, FloatMath.sqrt(25));
}
-
}
+
diff --git a/tests/tests/media/AndroidManifest.xml b/tests/tests/media/AndroidManifest.xml
index e913f05..e4b6f6b 100644
--- a/tests/tests/media/AndroidManifest.xml
+++ b/tests/tests/media/AndroidManifest.xml
@@ -52,6 +52,21 @@
<category android:name="android.intent.category.FRAMEWORK_INSTRUMENTATION_TEST" />
</intent-filter>
</activity>
+ <activity android:name="android.media.cts.ResourceManagerStubActivity"
+ android:label="ResourceManagerStubActivity">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN" />
+ <category android:name="android.intent.category.FRAMEWORK_INSTRUMENTATION_TEST" />
+ </intent-filter>
+ </activity>
+ <activity android:name="android.media.cts.ResourceManagerTestActivity1"
+ android:label="ResourceManagerTestActivity1"
+ android:process=":mediaCodecTestProcess1">
+ </activity>
+ <activity android:name="android.media.cts.ResourceManagerTestActivity2"
+ android:label="ResourceManagerTestActivity2"
+ android:process=":mediaCodecTestProcess2">
+ </activity>
<activity android:name="android.media.cts.RingtonePickerActivity"
android:label="RingtonePickerActivity">
<intent-filter>
diff --git a/tests/tests/media/assets/fileSequence0.ts b/tests/tests/media/assets/fileSequence0.ts
new file mode 100644
index 0000000..48f2bcd
--- /dev/null
+++ b/tests/tests/media/assets/fileSequence0.ts
Binary files differ
diff --git a/tests/tests/media/assets/fileSequence1.ts b/tests/tests/media/assets/fileSequence1.ts
new file mode 100644
index 0000000..737fbd0
--- /dev/null
+++ b/tests/tests/media/assets/fileSequence1.ts
Binary files differ
diff --git a/tests/tests/media/assets/prog_index.m3u8 b/tests/tests/media/assets/prog_index.m3u8
new file mode 100644
index 0000000..88f99d3
--- /dev/null
+++ b/tests/tests/media/assets/prog_index.m3u8
@@ -0,0 +1,10 @@
+#EXTM3U
+#EXT-X-TARGETDURATION:10
+#EXT-X-VERSION:3
+#EXT-X-MEDIA-SEQUENCE:0
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:9.90000,
+fileSequence0.ts
+#EXTINF:10.00000,
+fileSequence1.ts
+#EXT-X-ENDLIST
diff --git a/tests/tests/media/src/android/media/cts/AdaptivePlaybackTest.java b/tests/tests/media/src/android/media/cts/AdaptivePlaybackTest.java
index dbb609d..1fb3ea7 100644
--- a/tests/tests/media/src/android/media/cts/AdaptivePlaybackTest.java
+++ b/tests/tests/media/src/android/media/cts/AdaptivePlaybackTest.java
@@ -862,6 +862,15 @@
mCodec.configure(format, mSurface.getSurface(), null /* crypto */, 0 /* flags */);
Log.i(TAG, "start");
mCodec.start();
+
+ // inject some minimal setOutputSurface test
+ // TODO: change this test to also change the surface midstream
+ try {
+ mCodec.setOutputSurface(null);
+ fail("should not be able to set surface to NULL");
+ } catch (IllegalArgumentException e) {}
+ mCodec.setOutputSurface(mSurface.getSurface());
+
mInputBuffers = mCodec.getInputBuffers();
mOutputBuffers = mCodec.getOutputBuffers();
Log.i(TAG, "configured " + mInputBuffers.length + " input[" +
diff --git a/tests/tests/media/src/android/media/cts/AudioHelper.java b/tests/tests/media/src/android/media/cts/AudioHelper.java
index 6f3d4d0..efee024 100644
--- a/tests/tests/media/src/android/media/cts/AudioHelper.java
+++ b/tests/tests/media/src/android/media/cts/AudioHelper.java
@@ -17,6 +17,7 @@
package android.media.cts;
import java.nio.ByteBuffer;
+
import org.junit.Assert;
import android.media.AudioAttributes;
@@ -29,6 +30,43 @@
// Used for statistics and loopers in listener tests.
// See AudioRecordTest.java and AudioTrack_ListenerTest.java.
public class AudioHelper {
+
+ // create sine waves or chirps for data arrays
+ public static byte[] createSoundDataInByteArray(int bufferSamples, final int sampleRate,
+ final double frequency, double sweep) {
+ final double rad = 2 * Math.PI * frequency / sampleRate;
+ byte[] vai = new byte[bufferSamples];
+ sweep = Math.PI * sweep / ((double)sampleRate * vai.length);
+ for (int j = 0; j < vai.length; j++) {
+ int unsigned = (int)(Math.sin(j * (rad + j * sweep)) * Byte.MAX_VALUE)
+ + Byte.MAX_VALUE & 0xFF;
+ vai[j] = (byte) unsigned;
+ }
+ return vai;
+ }
+
+ public static short[] createSoundDataInShortArray(int bufferSamples, final int sampleRate,
+ final double frequency, double sweep) {
+ final double rad = 2 * Math.PI * frequency / sampleRate;
+ short[] vai = new short[bufferSamples];
+ sweep = Math.PI * sweep / ((double)sampleRate * vai.length);
+ for (int j = 0; j < vai.length; j++) {
+ vai[j] = (short)(Math.sin(j * (rad + j * sweep)) * Short.MAX_VALUE);
+ }
+ return vai;
+ }
+
+ public static float[] createSoundDataInFloatArray(int bufferSamples, final int sampleRate,
+ final double frequency, double sweep) {
+ final double rad = 2 * Math.PI * frequency / sampleRate;
+ float[] vaf = new float[bufferSamples];
+ sweep = Math.PI * sweep / ((double)sampleRate * vaf.length);
+ for (int j = 0; j < vaf.length; j++) {
+ vaf[j] = (float)(Math.sin(j * (rad + j * sweep)));
+ }
+ return vaf;
+ }
+
public static int frameSizeFromFormat(AudioFormat format) {
return format.getChannelCount()
* format.getBytesPerSample(format.getEncoding());
diff --git a/tests/tests/media/src/android/media/cts/AudioRecordTest.java b/tests/tests/media/src/android/media/cts/AudioRecordTest.java
index 459e575..0f5087f 100644
--- a/tests/tests/media/src/android/media/cts/AudioRecordTest.java
+++ b/tests/tests/media/src/android/media/cts/AudioRecordTest.java
@@ -22,13 +22,17 @@
import android.content.pm.PackageManager;
import android.cts.util.CtsAndroidTestCase;
import android.media.AudioFormat;
+import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioRecord.OnRecordPositionUpdateListener;
+import android.media.AudioTrack;
import android.media.MediaRecorder;
+import android.media.MediaSyncEvent;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.util.Log;
+
import com.android.cts.util.ReportLog;
import com.android.cts.util.ResultType;
import com.android.cts.util.ResultUnit;
@@ -314,16 +318,251 @@
AudioFormat.ENCODING_PCM_16BIT);
}
+ // Test AudioRecord.Builder to verify the observed configuration of an AudioRecord built with
+ // an empty Builder matches the documentation / expected values
+ public void testAudioRecordBuilderDefault() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testAudioRecordBuilderDefault";
+ // expected values below match the AudioRecord.Builder documentation
+ final int expectedCapturePreset = MediaRecorder.AudioSource.DEFAULT;
+ final String rateStr = new AudioManager(getContext())
+ .getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
+ final int expectedRate = Integer.valueOf(rateStr).intValue();
+ final int expectedChannel = AudioFormat.CHANNEL_IN_MONO;
+ final int expectedEncoding = AudioFormat.ENCODING_PCM_16BIT;
+ final int expectedState = AudioRecord.STATE_INITIALIZED;
+ // use builder with default values
+ final AudioRecord rec = new AudioRecord.Builder().build();
+ // save results
+ final int observedRate = rec.getSampleRate();
+ final int observedSource = rec.getAudioSource();
+ final int observedChannel = rec.getChannelConfiguration();
+ final int observedEncoding = rec.getAudioFormat();
+ final int observedState = rec.getState();
+ // release recorder before the test exits (either successfully or with an exception)
+ rec.release();
+ // compare results
+ assertEquals(TEST_NAME + ": default capture preset", expectedCapturePreset, observedSource);
+ assertEquals(TEST_NAME + ": default rate", expectedRate, observedRate);
+ assertEquals(TEST_NAME + ": default channel config", expectedChannel, observedChannel);
+ assertEquals(TEST_NAME + ": default encoding", expectedEncoding, observedEncoding);
+ assertEquals(TEST_NAME + ": state", expectedState, observedState);
+ }
+
+ // Test AudioRecord.Builder to verify the observed configuration of an AudioRecord built with
+ // an incomplete AudioFormat matches the documentation / expected values
+ public void testAudioRecordBuilderPartialFormat() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testAudioRecordBuilderPartialFormat";
+ final int expectedRate = 16000;
+ final int expectedState = AudioRecord.STATE_INITIALIZED;
+ // expected values below match the AudioRecord.Builder documentation
+ final int expectedChannel = AudioFormat.CHANNEL_IN_MONO;
+ final int expectedEncoding = AudioFormat.ENCODING_PCM_16BIT;
+ // use builder with a partial audio format
+ final AudioRecord rec = new AudioRecord.Builder()
+ .setAudioFormat(new AudioFormat.Builder().setSampleRate(expectedRate).build())
+ .build();
+ // save results
+ final int observedRate = rec.getSampleRate();
+ final int observedChannel = rec.getChannelConfiguration();
+ final int observedEncoding = rec.getAudioFormat();
+ final int observedState = rec.getState();
+ // release recorder before the test exits (either successfully or with an exception)
+ rec.release();
+ // compare results
+ assertEquals(TEST_NAME + ": configured rate", expectedRate, observedRate);
+ assertEquals(TEST_NAME + ": default channel config", expectedChannel, observedChannel);
+ assertEquals(TEST_NAME + ": default encoding", expectedEncoding, observedEncoding);
+ assertEquals(TEST_NAME + ": state", expectedState, observedState);
+ }
+
+ // Test AudioRecord.Builder to verify the observed configuration of an AudioRecord matches
+ // the parameters used in the builder
+ public void testAudioRecordBuilderParams() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testAudioRecordBuilderParams";
+ final int expectedRate = 8000;
+ final int expectedChannel = AudioFormat.CHANNEL_IN_MONO;
+ final int expectedChannelCount = 1;
+ final int expectedEncoding = AudioFormat.ENCODING_PCM_16BIT;
+ final int expectedSource = MediaRecorder.AudioSource.VOICE_COMMUNICATION;
+ final int expectedState = AudioRecord.STATE_INITIALIZED;
+ // use builder with expected parameters
+ final AudioRecord rec = new AudioRecord.Builder()
+ .setAudioFormat(new AudioFormat.Builder()
+ .setSampleRate(expectedRate)
+ .setChannelMask(expectedChannel)
+ .setEncoding(expectedEncoding)
+ .build())
+ .setAudioSource(expectedSource)
+ .build();
+ // save results
+ final int observedRate = rec.getSampleRate();
+ final int observedChannel = rec.getChannelConfiguration();
+ final int observedChannelCount = rec.getChannelCount();
+ final int observedEncoding = rec.getAudioFormat();
+ final int observedSource = rec.getAudioSource();
+ final int observedState = rec.getState();
+ // release recorder before the test exits (either successfully or with an exception)
+ rec.release();
+ // compare results
+ assertEquals(TEST_NAME + ": configured rate", expectedRate, observedRate);
+ assertEquals(TEST_NAME + ": configured channel config", expectedChannel, observedChannel);
+ assertEquals(TEST_NAME + ": configured encoding", expectedEncoding, observedEncoding);
+ assertEquals(TEST_NAME + ": implicit channel count", expectedChannelCount,
+ observedChannelCount);
+ assertEquals(TEST_NAME + ": configured source", expectedSource, observedSource);
+ assertEquals(TEST_NAME + ": state", expectedState, observedState);
+ }
+
+ public void testSynchronizedRecord() throws Exception {
+ if (!hasMicrophone()) {
+ return;
+ }
+ final String TEST_NAME = "testSynchronizedRecord";
+ AudioTrack track = null;
+ AudioRecord record = null;
+
+ try {
+ // 1. create a static AudioTrack.
+ final int PLAYBACK_TIME_IN_MS = 2000; /* ms duration. */
+ final int PLAYBACK_SAMPLE_RATE = 8000; /* in hz */
+ AudioFormat format = new AudioFormat.Builder()
+ .setChannelMask(AudioFormat.CHANNEL_OUT_MONO)
+ .setEncoding(AudioFormat.ENCODING_PCM_8BIT)
+ .setSampleRate(PLAYBACK_SAMPLE_RATE)
+ .build();
+ final int frameCount = AudioHelper.frameCountFromMsec(PLAYBACK_TIME_IN_MS, format);
+ final int frameSize = AudioHelper.frameSizeFromFormat(format);
+ track = new AudioTrack.Builder()
+ .setAudioFormat(format)
+ .setBufferSizeInBytes(frameCount * frameSize)
+ .setTransferMode(AudioTrack.MODE_STATIC)
+ .build();
+ // create float array and write it
+ final int sampleCount = frameCount * format.getChannelCount();
+ byte[] vab = AudioHelper.createSoundDataInByteArray(
+ sampleCount, PLAYBACK_SAMPLE_RATE, 600 /* frequency */, 0 /* sweep */);
+ assertEquals(TEST_NAME, vab.length,
+ track.write(vab, 0 /* offsetInBytes */, vab.length,
+ AudioTrack.WRITE_NON_BLOCKING));
+ final int trackSessionId = track.getAudioSessionId();
+
+ // 2. create an AudioRecord to sync off of AudioTrack completion.
+ final int RECORD_TIME_IN_MS = 2000;
+ final int RECORD_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
+ final int RECORD_CHANNEL_MASK = AudioFormat.CHANNEL_IN_STEREO;
+ final int RECORD_SAMPLE_RATE = 44100;
+ record = new AudioRecord.Builder()
+ .setAudioFormat(new AudioFormat.Builder()
+ .setSampleRate(RECORD_SAMPLE_RATE)
+ .setChannelMask(RECORD_CHANNEL_MASK)
+ .setEncoding(RECORD_ENCODING)
+ .build())
+ .build();
+ // AudioRecord creation may have silently failed, check state now
+ assertEquals(TEST_NAME, AudioRecord.STATE_INITIALIZED, record.getState());
+
+ // 3. create a MediaSyncEvent
+ // This MediaSyncEvent checks playback completion of an AudioTrack
+ // (or MediaPlayer, or ToneGenerator) based on its audio session id.
+ //
+ // Note: when synchronizing record from a MediaSyncEvent
+ // (1) You need to be "close" to the end of the associated AudioTrack.
+ // If the track does not complete in 30 seconds, recording begins regardless.
+ // (actual delay limit may vary).
+ //
+ // (2) Track completion may be triggered by pause() as well as stop()
+ // or when a static AudioTrack completes playback.
+ //
+ final int eventType = MediaSyncEvent.SYNC_EVENT_PRESENTATION_COMPLETE;
+ MediaSyncEvent event = MediaSyncEvent.createEvent(eventType)
+ .setAudioSessionId(trackSessionId);
+ assertEquals(TEST_NAME, trackSessionId, event.getAudioSessionId());
+ assertEquals(TEST_NAME, eventType, event.getType());
+
+ // 4. now set the AudioTrack playing and start the recording synchronized
+ track.play();
+ // start recording. Recording state turns to RECORDSTATE_RECORDING immediately
+ // but the data read() only occurs after the AudioTrack completes.
+ record.startRecording(event);
+ assertEquals(TEST_NAME,
+ AudioRecord.RECORDSTATE_RECORDING, record.getRecordingState());
+ long startTime = System.currentTimeMillis();
+
+ // 5. get record data.
+ // For our tests, we could set test duration by timed sleep or by # frames received.
+ // Since we don't know *exactly* when AudioRecord actually begins recording,
+ // we end the test by # frames read.
+ final int numChannels =
+ AudioFormat.channelCountFromInChannelMask(RECORD_CHANNEL_MASK);
+ final int bytesPerSample = AudioFormat.getBytesPerSample(RECORD_ENCODING);
+ final int bytesPerFrame = numChannels * bytesPerSample;
+ // careful about integer overflow in the formula below:
+ final int targetSamples =
+ (int)((long)RECORD_TIME_IN_MS * RECORD_SAMPLE_RATE * numChannels / 1000);
+ final int BUFFER_FRAMES = 512;
+ final int BUFFER_SAMPLES = BUFFER_FRAMES * numChannels;
+
+ // After starting, there is no guarantee when the first frame of data is read.
+ long firstSampleTime = 0;
+ int samplesRead = 0;
+
+ // For 16 bit data, use shorts
+ short[] shortData = new short[BUFFER_SAMPLES];
+ while (samplesRead < targetSamples) {
+ // the first time through, we read a single frame.
+ // this sets the recording anchor position.
+ int amount = samplesRead == 0 ? numChannels :
+ Math.min(BUFFER_SAMPLES, targetSamples - samplesRead);
+ int ret = record.read(shortData, 0, amount);
+ assertEquals(TEST_NAME, amount, ret);
+ if (samplesRead == 0 && ret > 0) {
+ firstSampleTime = System.currentTimeMillis();
+ }
+ samplesRead += ret;
+ // sanity check: elapsed time cannot be more than a second
+ // than what we expect.
+ assertTrue(System.currentTimeMillis() - startTime <=
+ PLAYBACK_TIME_IN_MS + RECORD_TIME_IN_MS + 1000);
+ }
+
+ // 6. We've read all the frames, now check the timing.
+ final long endTime = System.currentTimeMillis();
+ //Log.d(TEST_NAME, "first sample time " + (firstSampleTime - startTime)
+ // + " test time " + (endTime - firstSampleTime));
+ //
+ // Verify recording starts within 400 ms of AudioTrack completion (typical 180ms)
+ // Verify recording completes within 50 ms of expected test time (typical 20ms)
+ assertEquals(TEST_NAME, PLAYBACK_TIME_IN_MS, firstSampleTime - startTime, 400);
+ assertEquals(TEST_NAME, RECORD_TIME_IN_MS, endTime - firstSampleTime, 50);
+
+ record.stop();
+ assertEquals(TEST_NAME, AudioRecord.RECORDSTATE_STOPPED, record.getRecordingState());
+ } finally {
+ if (record != null) {
+ record.release();
+ record = null;
+ }
+ if (track != null) {
+ track.release();
+ track = null;
+ }
+ }
+ }
+
private AudioRecord createAudioRecord(
int audioSource, int sampleRateInHz,
int channelConfig, int audioFormat, int bufferSizeInBytes,
boolean auditRecording, boolean isChannelIndex) {
+ final AudioRecord record;
if (auditRecording) {
- return new AudioHelper.AudioRecordAudit(
+ record = new AudioHelper.AudioRecordAudit(
audioSource, sampleRateInHz, channelConfig,
audioFormat, bufferSizeInBytes, isChannelIndex);
} else if (isChannelIndex) {
- return new AudioRecord.Builder()
+ record = new AudioRecord.Builder()
.setAudioFormat(new AudioFormat.Builder()
.setChannelIndexMask(channelConfig)
.setEncoding(audioFormat)
@@ -332,9 +571,23 @@
.setBufferSizeInBytes(bufferSizeInBytes)
.build();
} else {
- return new AudioRecord(audioSource, sampleRateInHz, channelConfig,
+ record = new AudioRecord(audioSource, sampleRateInHz, channelConfig,
audioFormat, bufferSizeInBytes);
}
+
+ // did we get the AudioRecord we expected?
+ final AudioFormat format = record.getFormat();
+ assertEquals(isChannelIndex ? channelConfig : AudioFormat.CHANNEL_INVALID,
+ format.getChannelIndexMask());
+ assertEquals(isChannelIndex ? AudioFormat.CHANNEL_INVALID : channelConfig,
+ format.getChannelMask());
+ assertEquals(audioFormat, format.getEncoding());
+ assertEquals(sampleRateInHz, format.getSampleRate());
+ final int frameSize =
+ format.getChannelCount() * AudioFormat.getBytesPerSample(audioFormat);
+ // our native frame count cannot be smaller than our minimum buffer size request.
+ assertTrue(record.getNativeFrameCount() * frameSize >= bufferSizeInBytes);
+ return record;
}
private void doTest(String reportName, boolean localRecord, boolean customHandler,
diff --git a/tests/tests/media/src/android/media/cts/AudioRecord_BufferSizeTest.java b/tests/tests/media/src/android/media/cts/AudioRecord_BufferSizeTest.java
index e597827..1de6302 100644
--- a/tests/tests/media/src/android/media/cts/AudioRecord_BufferSizeTest.java
+++ b/tests/tests/media/src/android/media/cts/AudioRecord_BufferSizeTest.java
@@ -53,6 +53,11 @@
} catch (Throwable e) {
Log.e(TAG, "Sample rate: " + SAMPLE_RATES_IN_HZ[i], e);
failedSampleRates.add(SAMPLE_RATES_IN_HZ[i]);
+ if (mAudioRecord != null) {
+ // clean up. AudioRecords are in scarce supply.
+ mAudioRecord.release();
+ mAudioRecord = null;
+ }
}
}
assertTrue("Failed sample rates: " + failedSampleRates + " See log for more details.",
@@ -61,21 +66,26 @@
private void record(int sampleRateInHz) {
int bufferSize = AudioRecord.getMinBufferSize(sampleRateInHz, CHANNEL_CONFIG, AUDIO_FORMAT);
- byte[] buffer = new byte[bufferSize];
assertTrue(bufferSize > 0);
createAudioRecord(sampleRateInHz, bufferSize);
- checkRecordingState(AudioRecord.STATE_INITIALIZED);
+ // RecordingState changes are reflected synchronously (no need to poll)
+ assertEquals(AudioRecord.RECORDSTATE_STOPPED, mAudioRecord.getRecordingState());
mAudioRecord.startRecording();
- checkRecordingState(AudioRecord.RECORDSTATE_RECORDING);
+ assertEquals(AudioRecord.RECORDSTATE_RECORDING, mAudioRecord.getRecordingState());
+ // it is preferred to use a short array to read AudioFormat.ENCODING_PCM_16BIT data
+ // but it's ok to read using using a byte array. 16 bit PCM data will be
+ // stored as two bytes, native endian.
+ byte[] buffer = new byte[bufferSize];
assertTrue(mAudioRecord.read(buffer, 0, bufferSize) > 0);
mAudioRecord.stop();
- checkRecordingState(AudioRecord.RECORDSTATE_STOPPED);
+ assertEquals(AudioRecord.RECORDSTATE_STOPPED, mAudioRecord.getRecordingState());
mAudioRecord.release();
+ mAudioRecord = null;
}
private void createAudioRecord(final int sampleRateInHz, final int bufferSize) {
@@ -84,15 +94,6 @@
assertNotNull(mAudioRecord);
}
- private void checkRecordingState(final int state) {
- new PollingCheck() {
- @Override
- protected boolean check() {
- return mAudioRecord.getRecordingState() == state;
- }
- }.run();
- }
-
private boolean hasMicrophone() {
return getContext().getPackageManager().hasSystemFeature(
PackageManager.FEATURE_MICROPHONE);
diff --git a/tests/tests/media/src/android/media/cts/AudioTrackTest.java b/tests/tests/media/src/android/media/cts/AudioTrackTest.java
index 0e12f70..f7032b2 100644
--- a/tests/tests/media/src/android/media/cts/AudioTrackTest.java
+++ b/tests/tests/media/src/android/media/cts/AudioTrackTest.java
@@ -23,8 +23,9 @@
import android.media.AudioManager;
import android.media.AudioTimestamp;
import android.media.AudioTrack;
-import android.media.PlaybackSettings;
+import android.media.PlaybackParams;
import android.util.Log;
+
import com.android.cts.util.ReportLog;
import com.android.cts.util.ResultType;
import com.android.cts.util.ResultUnit;
@@ -263,6 +264,116 @@
}
// -----------------------------------------------------------------
+ // AudioTrack construction with Builder
+ // ----------------------------------
+
+ // Test case 1: build AudioTrack with default parameters, test documented default params
+ public void testBuilderDefault() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testBuilderDefault";
+ final int expectedDefaultEncoding = AudioFormat.ENCODING_PCM_16BIT;
+ final int expectedDefaultRate =
+ AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC);
+ final int expectedDefaultChannels = AudioFormat.CHANNEL_OUT_STEREO;
+ // use Builder
+ final int buffSizeInBytes = AudioTrack.getMinBufferSize(
+ expectedDefaultRate, expectedDefaultChannels, expectedDefaultEncoding);
+ final AudioTrack track = new AudioTrack.Builder()
+ .setBufferSizeInBytes(buffSizeInBytes)
+ .build();
+ // save results
+ final int observedState = track.getState();
+ final int observedFormat = track.getAudioFormat();
+ final int observedChannelConf = track.getChannelConfiguration();
+ final int observedRate = track.getSampleRate();
+ // release track before the test exits (either successfully or with an exception)
+ track.release();
+ // compare results
+ assertEquals(TEST_NAME + ": Track initialized", AudioTrack.STATE_INITIALIZED,
+ observedState);
+ assertEquals(TEST_NAME + ": Default track encoding", expectedDefaultEncoding,
+ observedFormat);
+ assertEquals(TEST_NAME + ": Default track channels", expectedDefaultChannels,
+ observedChannelConf);
+ assertEquals(TEST_NAME + ": Default track sample rate", expectedDefaultRate,
+ observedRate);
+ }
+
+ // Test case 2: build AudioTrack with AudioFormat, test it's used
+ public void testBuilderFormat() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testBuilderFormat";
+ final int TEST_RATE = 32000;
+ final int TEST_CHANNELS = AudioFormat.CHANNEL_OUT_STEREO;
+ // use Builder
+ final int buffSizeInBytes = AudioTrack.getMinBufferSize(
+ TEST_RATE, TEST_CHANNELS, AudioFormat.ENCODING_PCM_16BIT);
+ final AudioTrack track = new AudioTrack.Builder()
+ .setAudioAttributes(new AudioAttributes.Builder().build())
+ .setBufferSizeInBytes(buffSizeInBytes)
+ .setAudioFormat(new AudioFormat.Builder()
+ .setChannelMask(TEST_CHANNELS).setSampleRate(TEST_RATE).build())
+ .build();
+ // save results
+ final int observedState = track.getState();
+ final int observedChannelConf = track.getChannelConfiguration();
+ final int observedRate = track.getSampleRate();
+ // release track before the test exits (either successfully or with an exception)
+ track.release();
+ // compare results
+ assertEquals(TEST_NAME + ": Track initialized", AudioTrack.STATE_INITIALIZED,
+ observedState);
+ assertEquals(TEST_NAME + ": Track channels", TEST_CHANNELS, observedChannelConf);
+ assertEquals(TEST_NAME + ": Track sample rate", TEST_RATE, observedRate);
+ }
+
+ // Test case 3: build AudioTrack with session ID, test it's used
+ public void testBuilderSession() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testBuilderSession";
+ // generate a session ID
+ final int expectedSessionId = new AudioManager(getContext()).generateAudioSessionId();
+ // use builder
+ final AudioTrack track = new AudioTrack.Builder()
+ .setSessionId(expectedSessionId)
+ .build();
+ // save results
+ final int observedSessionId = track.getAudioSessionId();
+ // release track before the test exits (either successfully or with an exception)
+ track.release();
+ // compare results
+ assertEquals(TEST_NAME + ": Assigned track session ID", expectedSessionId,
+ observedSessionId);
+ }
+
+ // Test case 4: build AudioTrack with AudioAttributes built from stream type, test it's used
+ public void testBuilderAttributesStream() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testBuilderAttributesStream";
+ // use a stream type documented in AudioAttributes.Builder.setLegacyStreamType(int)
+ final int expectedStreamType = AudioManager.STREAM_ALARM;
+ final int expectedContentType = AudioAttributes.CONTENT_TYPE_SPEECH;
+ final AudioAttributes aa = new AudioAttributes.Builder()
+ .setLegacyStreamType(expectedStreamType)
+ .setContentType(expectedContentType)
+ .build();
+ // use builder
+ final AudioTrack track = new AudioTrack.Builder()
+ .setAudioAttributes(aa)
+ .build();
+ // save results
+ final int observedStreamType = track.getStreamType();
+ // release track before the test exits (either successfully or with an exception)
+ track.release();
+ // compare results
+ assertEquals(TEST_NAME + ": track stream type", expectedStreamType, observedStreamType);
+ // also test content type was preserved in the attributes even though they
+ // were first configured with a legacy stream type
+ assertEquals(TEST_NAME + ": attributes content type", expectedContentType,
+ aa.getContentType());
+ }
+
+ // -----------------------------------------------------------------
// Playback head position
// ----------------------------------
@@ -1293,7 +1404,8 @@
// -------- initialization --------------
int bufferSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
- byte data[] = createSoundDataInByteArray(bufferSize, TEST_SR, 1024);
+ byte data[] = AudioHelper.createSoundDataInByteArray(
+ bufferSize, TEST_SR, 1024 /* frequency */, 0 /* sweep */);
AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
bufferSize, TEST_MODE);
// -------- test --------------
@@ -1311,56 +1423,6 @@
track.release();
}
- public static byte[] createSoundDataInByteArray(int bufferSamples, final int sampleRate,
- final double frequency, double sweep) {
- final double rad = 2 * Math.PI * frequency / sampleRate;
- byte[] vai = new byte[bufferSamples];
- sweep = Math.PI * sweep / ((double)sampleRate * vai.length);
- for (int j = 0; j < vai.length; j++) {
- int unsigned = (int)(Math.sin(j * (rad + j * sweep)) * Byte.MAX_VALUE)
- + Byte.MAX_VALUE & 0xFF;
- vai[j] = (byte) unsigned;
- }
- return vai;
- }
-
- public static short[] createSoundDataInShortArray(int bufferSamples, final int sampleRate,
- final double frequency, double sweep) {
- final double rad = 2 * Math.PI * frequency / sampleRate;
- short[] vai = new short[bufferSamples];
- sweep = Math.PI * sweep / ((double)sampleRate * vai.length);
- for (int j = 0; j < vai.length; j++) {
- vai[j] = (short)(Math.sin(j * (rad + j * sweep)) * Short.MAX_VALUE);
- }
- return vai;
- }
-
- public static float[] createSoundDataInFloatArray(int bufferSamples, final int sampleRate,
- final double frequency, double sweep) {
- final double rad = 2 * Math.PI * frequency / sampleRate;
- float[] vaf = new float[bufferSamples];
- sweep = Math.PI * sweep / ((double)sampleRate * vaf.length);
- for (int j = 0; j < vaf.length; j++) {
- vaf[j] = (float)(Math.sin(j * (rad + j * sweep)));
- }
- return vaf;
- }
-
- public static byte[] createSoundDataInByteArray(int bufferSamples, final int sampleRate,
- final double frequency) {
- return createSoundDataInByteArray(bufferSamples, sampleRate, frequency, 0 /*sweep*/);
- }
-
- public static short[] createSoundDataInShortArray(int bufferSamples, final int sampleRate,
- final double frequency) {
- return createSoundDataInShortArray(bufferSamples, sampleRate, frequency, 0 /*sweep*/);
- }
-
- public static float[] createSoundDataInFloatArray(int bufferSamples, final int sampleRate,
- final double frequency) {
- return createSoundDataInFloatArray(bufferSamples, sampleRate, frequency, 0 /*sweep*/);
- }
-
public void testPlayStaticData() throws Exception {
if (!hasAudioOutput()) {
Log.w(TAG,"AUDIO_OUTPUT feature not found. This system might not have a valid "
@@ -1418,7 +1480,7 @@
// only need to write once to the static track
switch (TEST_FORMAT) {
case AudioFormat.ENCODING_PCM_8BIT: {
- byte data[] = createSoundDataInByteArray(
+ byte data[] = AudioHelper.createSoundDataInByteArray(
bufferSamples, TEST_SR,
testFrequency, TEST_SWEEP);
assertEquals(TEST_NAME,
@@ -1426,7 +1488,7 @@
track.write(data, 0 /*offsetInBytes*/, data.length));
} break;
case AudioFormat.ENCODING_PCM_16BIT: {
- short data[] = createSoundDataInShortArray(
+ short data[] = AudioHelper.createSoundDataInShortArray(
bufferSamples, TEST_SR,
testFrequency, TEST_SWEEP);
assertEquals(TEST_NAME,
@@ -1434,7 +1496,7 @@
track.write(data, 0 /*offsetInBytes*/, data.length));
} break;
case AudioFormat.ENCODING_PCM_FLOAT: {
- float data[] = createSoundDataInFloatArray(
+ float data[] = AudioHelper.createSoundDataInFloatArray(
bufferSamples, TEST_SR,
testFrequency, TEST_SWEEP);
assertEquals(TEST_NAME,
@@ -1500,6 +1562,7 @@
};
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+ final float TEST_SWEEP = 0; // sine wave only
for (int TEST_FORMAT : TEST_FORMAT_ARRAY) {
double frequency = 400; // frequency changes for each test
@@ -1541,9 +1604,9 @@
(track.getNativeFrameCount() / buffers) * channelCount;
switch (TEST_FORMAT) {
case AudioFormat.ENCODING_PCM_8BIT: {
- byte data[] = createSoundDataInByteArray(
+ byte data[] = AudioHelper.createSoundDataInByteArray(
sourceSamples, TEST_SR,
- testFrequency);
+ testFrequency, TEST_SWEEP);
while (written < data.length) {
int samples = Math.min(data.length - written, samplesPerWrite);
int ret = track.write(data, written, samples);
@@ -1552,9 +1615,9 @@
}
} break;
case AudioFormat.ENCODING_PCM_16BIT: {
- short data[] = createSoundDataInShortArray(
+ short data[] = AudioHelper.createSoundDataInShortArray(
sourceSamples, TEST_SR,
- testFrequency);
+ testFrequency, TEST_SWEEP);
while (written < data.length) {
int samples = Math.min(data.length - written, samplesPerWrite);
int ret = track.write(data, written, samples);
@@ -1563,9 +1626,9 @@
}
} break;
case AudioFormat.ENCODING_PCM_FLOAT: {
- float data[] = createSoundDataInFloatArray(
+ float data[] = AudioHelper.createSoundDataInFloatArray(
sourceSamples, TEST_SR,
- testFrequency);
+ testFrequency, TEST_SWEEP);
while (written < data.length) {
int samples = Math.min(data.length - written, samplesPerWrite);
int ret = track.write(data, written, samples,
@@ -1608,6 +1671,7 @@
};
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+ final float TEST_SWEEP = 0; // sine wave only
for (int TEST_FORMAT : TEST_FORMAT_ARRAY) {
double frequency = 800; // frequency changes for each test
@@ -1621,10 +1685,19 @@
int bufferSize = 12 * minBufferSize;
int bufferSamples = bufferSize
/ AudioFormat.getBytesPerSample(TEST_FORMAT);
+
+ // create audio track and confirm settings
AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR,
TEST_CONF, TEST_FORMAT, minBufferSize, TEST_MODE);
- assertTrue(TEST_NAME,
- track.getState() == AudioTrack.STATE_INITIALIZED);
+ assertEquals(TEST_NAME + ": state",
+ AudioTrack.STATE_INITIALIZED, track.getState());
+ assertEquals(TEST_NAME + ": sample rate",
+ TEST_SR, track.getSampleRate());
+ assertEquals(TEST_NAME + ": channel mask",
+ TEST_CONF, track.getChannelConfiguration());
+ assertEquals(TEST_NAME + ": encoding",
+ TEST_FORMAT, track.getAudioFormat());
+
ByteBuffer bb = (useDirect == 1)
? ByteBuffer.allocateDirect(bufferSize)
: ByteBuffer.allocate(bufferSize);
@@ -1633,24 +1706,24 @@
// -------- test --------------
switch (TEST_FORMAT) {
case AudioFormat.ENCODING_PCM_8BIT: {
- byte data[] = createSoundDataInByteArray(
+ byte data[] = AudioHelper.createSoundDataInByteArray(
bufferSamples, TEST_SR,
- frequency);
+ frequency, TEST_SWEEP);
bb.put(data);
bb.flip();
} break;
case AudioFormat.ENCODING_PCM_16BIT: {
- short data[] = createSoundDataInShortArray(
+ short data[] = AudioHelper.createSoundDataInShortArray(
bufferSamples, TEST_SR,
- frequency);
+ frequency, TEST_SWEEP);
ShortBuffer sb = bb.asShortBuffer();
sb.put(data);
bb.limit(sb.limit() * 2);
} break;
case AudioFormat.ENCODING_PCM_FLOAT: {
- float data[] = createSoundDataInFloatArray(
+ float data[] = AudioHelper.createSoundDataInFloatArray(
bufferSamples, TEST_SR,
- frequency);
+ frequency, TEST_SWEEP);
FloatBuffer fb = bb.asFloatBuffer();
fb.put(data);
bb.limit(fb.limit() * 4);
@@ -1712,6 +1785,7 @@
AudioTrack.WRITE_BLOCKING,
AudioTrack.WRITE_NON_BLOCKING,
};
+ final float TEST_SWEEP = 0;
for (int TEST_FORMAT : TEST_FORMAT_ARRAY) {
for (int TEST_CONF : TEST_CONF_ARRAY) {
@@ -1743,24 +1817,24 @@
switch (TEST_FORMAT) {
case AudioFormat.ENCODING_PCM_8BIT: {
- byte data[] = createSoundDataInByteArray(
+ byte data[] = AudioHelper.createSoundDataInByteArray(
bufferSamples, TEST_SR,
- frequency);
+ frequency, TEST_SWEEP);
bb.put(data);
bb.flip();
} break;
case AudioFormat.ENCODING_PCM_16BIT: {
- short data[] = createSoundDataInShortArray(
+ short data[] = AudioHelper.createSoundDataInShortArray(
bufferSamples, TEST_SR,
- frequency);
+ frequency, TEST_SWEEP);
ShortBuffer sb = bb.asShortBuffer();
sb.put(data);
bb.limit(sb.limit() * 2);
} break;
case AudioFormat.ENCODING_PCM_FLOAT: {
- float data[] = createSoundDataInFloatArray(
+ float data[] = AudioHelper.createSoundDataInFloatArray(
bufferSamples, TEST_SR,
- frequency);
+ frequency, TEST_SWEEP);
FloatBuffer fb = bb.asFloatBuffer();
fb.put(data);
bb.limit(fb.limit() * 4);
@@ -1970,7 +2044,8 @@
TEST_FORMAT, bufferSizeInBytes, TEST_MODE);
// create byte array and write it
- byte[] vai = createSoundDataInByteArray(bufferSizeInBytes, TEST_SR, 600);
+ byte[] vai = AudioHelper.createSoundDataInByteArray(bufferSizeInBytes, TEST_SR,
+ 600 /* frequency */, 0 /* sweep */);
assertEquals(vai.length, track.write(vai, 0 /* offsetInBytes */, vai.length));
// sweep up test and sweep down test
@@ -2038,7 +2113,8 @@
// create float array and write it
final int sampleCount = frameCount * format.getChannelCount();
- float[] vaf = createSoundDataInFloatArray(sampleCount, TEST_SR, 600);
+ float[] vaf = AudioHelper.createSoundDataInFloatArray(
+ sampleCount, TEST_SR, 600 /* frequency */, 0 /* sweep */);
assertEquals(vaf.length, track.write(vaf, 0 /* offsetInFloats */, vaf.length,
AudioTrack.WRITE_NON_BLOCKING));
@@ -2050,6 +2126,39 @@
{ {1.0f, 0.5f}, {1.0f, 2.0f} }, // pitch by SR conversion (chirp)
};
+ // sanity test that playback params works as expected
+ PlaybackParams params = new PlaybackParams().allowDefaults();
+ assertEquals(TEST_NAME, 1.0f, params.getSpeed());
+ assertEquals(TEST_NAME, 1.0f, params.getPitch());
+ assertEquals(TEST_NAME,
+ params.AUDIO_FALLBACK_MODE_DEFAULT,
+ params.getAudioFallbackMode());
+ track.setPlaybackParams(params); // OK
+ params.setAudioFallbackMode(params.AUDIO_FALLBACK_MODE_FAIL);
+ assertEquals(TEST_NAME,
+ params.AUDIO_FALLBACK_MODE_FAIL, params.getAudioFallbackMode());
+ params.setPitch(0.0f);
+ try {
+ track.setPlaybackParams(params);
+ fail("IllegalArgumentException should be thrown on out of range data");
+ } catch (IllegalArgumentException e) {
+ ; // expect this is invalid
+ }
+ // on failure, the AudioTrack params should not change.
+ PlaybackParams paramCheck = track.getPlaybackParams();
+ assertEquals(TEST_NAME,
+ paramCheck.AUDIO_FALLBACK_MODE_DEFAULT, paramCheck.getAudioFallbackMode());
+ assertEquals(TEST_NAME,
+ 1.0f, paramCheck.getPitch());
+
+ // now try to see if we can do extreme pitch correction that should probably be muted.
+ params.setAudioFallbackMode(params.AUDIO_FALLBACK_MODE_MUTE);
+ assertEquals(TEST_NAME,
+ params.AUDIO_FALLBACK_MODE_MUTE, params.getAudioFallbackMode());
+ params.setPitch(0.1f);
+ track.setPlaybackParams(params); // OK
+
+ // now do our actual playback
final int TEST_TIME_MS = 2000;
final int TEST_DELTA_MS = 100;
final int testSteps = TEST_TIME_MS / TEST_DELTA_MS;
@@ -2062,7 +2171,7 @@
final float speedInc = (speedEnd - speedStart) / testSteps;
final float pitchInc = (pitchEnd - pitchStart) / testSteps;
- PlaybackSettings playbackSettings = new PlaybackSettings()
+ PlaybackParams playbackParams = new PlaybackParams()
.setPitch(pitchStart)
.setSpeed(speedStart)
.allowDefaults();
@@ -2076,23 +2185,23 @@
int anticipatedPosition = track.getPlaybackHeadPosition();
for (int j = 0; j < testSteps; ++j) {
// set playback settings
- final float pitch = playbackSettings.getPitch();
- final float speed = playbackSettings.getSpeed();
+ final float pitch = playbackParams.getPitch();
+ final float speed = playbackParams.getSpeed();
- track.setPlaybackSettings(playbackSettings);
+ track.setPlaybackParams(playbackParams);
// verify that settings have changed
- PlaybackSettings checkSettings = track.getPlaybackSettings();
- assertEquals(TAG, pitch, checkSettings.getPitch());
- assertEquals(TAG, speed, checkSettings.getSpeed());
+ PlaybackParams checkParams = track.getPlaybackParams();
+ assertEquals(TAG, pitch, checkParams.getPitch());
+ assertEquals(TAG, speed, checkParams.getSpeed());
// sleep for playback
Thread.sleep(TEST_DELTA_MS);
// Log.d(TAG, "position[" + j + "] " + track.getPlaybackHeadPosition());
anticipatedPosition +=
- playbackSettings.getSpeed() * TEST_DELTA_MS * TEST_SR / 1000;
- playbackSettings.setPitch(playbackSettings.getPitch() + pitchInc);
- playbackSettings.setSpeed(playbackSettings.getSpeed() + speedInc);
+ playbackParams.getSpeed() * TEST_DELTA_MS * TEST_SR / 1000;
+ playbackParams.setPitch(playbackParams.getPitch() + pitchInc);
+ playbackParams.setSpeed(playbackParams.getSpeed() + speedInc);
}
final int endPosition = track.getPlaybackHeadPosition();
final int tolerance100MsInFrames = 100 * TEST_SR / 1000;
diff --git a/tests/tests/media/src/android/media/cts/AudioTrack_ListenerTest.java b/tests/tests/media/src/android/media/cts/AudioTrack_ListenerTest.java
index 353dbcb..e059e36 100644
--- a/tests/tests/media/src/android/media/cts/AudioTrack_ListenerTest.java
+++ b/tests/tests/media/src/android/media/cts/AudioTrack_ListenerTest.java
@@ -119,19 +119,29 @@
listener = new MockOnPlaybackPositionUpdateListener(track);
}
- byte[] vai = AudioTrackTest.createSoundDataInByteArray(bufferSizeInBytes, TEST_SR, 1024);
+ byte[] vai = AudioHelper.createSoundDataInByteArray(
+ bufferSizeInBytes, TEST_SR, 1024 /* frequency */, 0 /* sweep */);
int markerPeriods = Math.max(3, mFrameCount * markerPeriodsPerSecond / TEST_SR);
mMarkerPeriodInFrames = mFrameCount / markerPeriods;
markerPeriods = mFrameCount / mMarkerPeriodInFrames; // recalculate due to round-down
mMarkerPosition = mMarkerPeriodInFrames;
+
+ // check that we can get and set notification marker position
+ assertEquals(0, track.getNotificationMarkerPosition());
assertEquals(AudioTrack.SUCCESS,
track.setNotificationMarkerPosition(mMarkerPosition));
+ assertEquals(mMarkerPosition, track.getNotificationMarkerPosition());
+
int updatePeriods = Math.max(3, mFrameCount * periodsPerSecond / TEST_SR);
final int updatePeriodInFrames = mFrameCount / updatePeriods;
updatePeriods = mFrameCount / updatePeriodInFrames; // recalculate due to round-down
+
+ // we set the notification period before running for better period positional accuracy.
+ // check that we can get and set notification periods
+ assertEquals(0, track.getPositionNotificationPeriod());
assertEquals(AudioTrack.SUCCESS,
track.setPositionNotificationPeriod(updatePeriodInFrames));
- // set NotificationPeriod before running to ensure better period positional accuracy.
+ assertEquals(updatePeriodInFrames, track.getPositionNotificationPeriod());
if (mode == AudioTrack.MODE_STATIC && TEST_LOOP_FACTOR > 1) {
track.setLoopPoints(0, vai.length, TEST_LOOP_FACTOR - 1);
diff --git a/tests/tests/media/src/android/media/cts/EncodeDecodeTest.java b/tests/tests/media/src/android/media/cts/EncodeDecodeTest.java
index 5152d98..40934f5 100644
--- a/tests/tests/media/src/android/media/cts/EncodeDecodeTest.java
+++ b/tests/tests/media/src/android/media/cts/EncodeDecodeTest.java
@@ -458,7 +458,7 @@
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (inSurf != null) {
Log.d(TAG, "using persistent surface");
- encoder.usePersistentInputSurface(inputSurface.getSurface());
+ encoder.setInputSurface(inputSurface.getSurface());
inputSurface.updateSize(mWidth, mHeight);
} else {
inputSurface = new InputSurface(encoder.createInputSurface());
diff --git a/tests/tests/media/src/android/media/cts/MediaCodecCapabilitiesTest.java b/tests/tests/media/src/android/media/cts/MediaCodecCapabilitiesTest.java
index 159d13f..daf55a7 100644
--- a/tests/tests/media/src/android/media/cts/MediaCodecCapabilitiesTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaCodecCapabilitiesTest.java
@@ -39,6 +39,7 @@
import java.util.HashSet;
import java.util.Set;
import java.util.Arrays;
+import java.util.Vector;
/**
* Basic sanity test of data returned by MediaCodeCapabilities.
@@ -528,4 +529,84 @@
MediaUtils.skipTest("no non-tunneled/non-secure video decoders found");
}
}
+
+ private static MediaFormat createMinFormat(String mime, VideoCapabilities vcaps, int color) {
+ int minWidth = vcaps.getSupportedWidths().getLower();
+ int minHeight = vcaps.getSupportedHeightsFor(minWidth).getLower();
+ int minBitrate = vcaps.getBitrateRange().getLower();
+
+ MediaFormat format = MediaFormat.createVideoFormat(mime, minWidth, minHeight);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, color);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, minBitrate);
+ format.setInteger(MediaFormat.KEY_FRAME_RATE, 10);
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
+ return format;
+ }
+
+ private static int getActualMax(
+ boolean isEncoder, String name, String mime, CodecCapabilities caps, int max) {
+ int flag = isEncoder ? MediaCodec.CONFIGURE_FLAG_ENCODE : 0;
+ MediaFormat format =
+ createMinFormat(mime, caps.getVideoCapabilities(), caps.colorFormats[0]);
+ Vector<MediaCodec> codecs = new Vector<MediaCodec>();
+ for (int i = 0; i < max; ++i) {
+ try {
+ Log.d(TAG, "Create codec " + name + " #" + i);
+ MediaCodec codec = MediaCodec.createByCodecName(name);
+ codec.configure(format, null, null, flag);
+ codec.start();
+ codecs.add(codec);
+ } catch (IllegalArgumentException e) {
+ fail("Got unexpected IllegalArgumentException " + e.getMessage());
+ } catch (IOException e) {
+ fail("Got unexpected IOException " + e.getMessage());
+ } catch (MediaCodec.CodecException e) {
+ // ERROR_INSUFFICIENT_RESOURCE is expected as the test keep creating codecs.
+ // But other exception should be treated as failure.
+ if (e.getErrorCode() == MediaCodec.CodecException.ERROR_INSUFFICIENT_RESOURCE) {
+ Log.d(TAG, "Got CodecException with ERROR_INSUFFICIENT_RESOURCE.");
+ break;
+ } else {
+ fail("Unexpected CodecException " + e.getDiagnosticInfo());
+ }
+ }
+ }
+ int actualMax = codecs.size();
+ for (int i = 0; i < codecs.size(); ++i) {
+ codecs.get(i).release();
+ }
+ return actualMax;
+ }
+
+ private static boolean shouldTestActual(CodecCapabilities caps) {
+ if (caps.getVideoCapabilities() == null) {
+ // TODO: test audio codecs.
+ return false;
+ }
+ return true;
+ }
+
+ public void testGetMaxSupportedInstances() {
+ MediaCodecList allCodecs = new MediaCodecList(MediaCodecList.ALL_CODECS);
+ for (MediaCodecInfo info : allCodecs.getCodecInfos()) {
+ Log.d(TAG, "codec: " + info.getName());
+ Log.d(TAG, " isEncoder = " + info.isEncoder());
+
+ String[] types = info.getSupportedTypes();
+ for (int j = 0; j < types.length; ++j) {
+ Log.d(TAG, "calling getCapabilitiesForType " + types[j]);
+ CodecCapabilities caps = info.getCapabilitiesForType(types[j]);
+ int max = caps.getMaxSupportedInstances();
+ Log.d(TAG, "getMaxSupportedInstances returns " + max);
+ assertTrue(max > 0);
+
+ if (shouldTestActual(caps)) {
+ int actualMax = getActualMax(
+ info.isEncoder(), info.getName(), types[j], caps, max + 1);
+ Log.d(TAG, "actualMax " + actualMax + " vs reported max " + max);
+ assertTrue(actualMax >= (int)(max * 0.9));
+ }
+ }
+ }
+ }
}
diff --git a/tests/tests/media/src/android/media/cts/MediaDrmMockTest.java b/tests/tests/media/src/android/media/cts/MediaDrmMockTest.java
index 52fd395..a197cca 100644
--- a/tests/tests/media/src/android/media/cts/MediaDrmMockTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaDrmMockTest.java
@@ -17,10 +17,10 @@
package android.media.cts;
import android.media.MediaDrm;
-import android.media.MediaDrm.ProvisionRequest;
+import android.media.MediaDrm.CryptoSession;
import android.media.MediaDrm.KeyRequest;
import android.media.MediaDrm.KeyStatus;
-import android.media.MediaDrm.CryptoSession;
+import android.media.MediaDrm.ProvisionRequest;
import android.media.MediaDrmException;
import android.media.NotProvisionedException;
import android.media.ResourceBusyException;
@@ -245,7 +245,7 @@
optionalParameters);
assertTrue(Arrays.equals(request.getData(), testRequest));
assertTrue(request.getDefaultUrl().equals(testDefaultUrl));
- assertEquals(request.getRequestType(), MediaDrm.REQUEST_TYPE_INITIAL);
+ assertEquals(request.getRequestType(), MediaDrm.KeyRequest.REQUEST_TYPE_INITIAL);
assertTrue(Arrays.equals(initData, md.getPropertyByteArray("mock-initdata")));
assertTrue(mimeType.equals(md.getPropertyString("mock-mimetype")));
@@ -278,7 +278,7 @@
null);
assertTrue(Arrays.equals(request.getData(), testRequest));
assertTrue(request.getDefaultUrl().equals(testDefaultUrl));
- assertEquals(request.getRequestType(), MediaDrm.REQUEST_TYPE_INITIAL);
+ assertEquals(request.getRequestType(), MediaDrm.KeyRequest.REQUEST_TYPE_INITIAL);
assertTrue(Arrays.equals(initData, md.getPropertyByteArray("mock-initdata")));
assertTrue(mimeType.equals(md.getPropertyString("mock-mimetype")));
@@ -310,7 +310,7 @@
null);
assertTrue(Arrays.equals(request.getData(), testRequest));
assertTrue(request.getDefaultUrl().equals(testDefaultUrl));
- assertEquals(request.getRequestType(), MediaDrm.REQUEST_TYPE_RENEWAL);
+ assertEquals(request.getRequestType(), MediaDrm.KeyRequest.REQUEST_TYPE_RENEWAL);
assertTrue(Arrays.equals(initData, md.getPropertyByteArray("mock-initdata")));
assertTrue(mimeType.equals(md.getPropertyString("mock-mimetype")));
@@ -340,7 +340,7 @@
null);
assertTrue(Arrays.equals(request.getData(), testRequest));
assertTrue(request.getDefaultUrl().equals(testDefaultUrl));
- assertEquals(request.getRequestType(), MediaDrm.REQUEST_TYPE_RELEASE);
+ assertEquals(request.getRequestType(), MediaDrm.KeyRequest.REQUEST_TYPE_RELEASE);
assertTrue(mimeType.equals(md.getPropertyString("mock-mimetype")));
assertTrue(md.getPropertyString("mock-keytype").equals("2"));
@@ -862,7 +862,7 @@
assertTrue(mGotEvent);
}
- public void testKeysChange() throws Exception {
+ public void testKeyStatusChange() throws Exception {
if (!isMockPluginInstalled()) {
return;
}
@@ -893,30 +893,30 @@
synchronized(mLock) {
mLock.notify();
- mMediaDrm.setOnKeysChangeListener(new MediaDrm.OnKeysChangeListener() {
+ mMediaDrm.setOnKeyStatusChangeListener(new MediaDrm.OnKeyStatusChangeListener() {
@Override
- public void onKeysChange(MediaDrm md, byte[] sessionId,
+ public void onKeyStatusChange(MediaDrm md, byte[] sessionId,
List<KeyStatus> keyInformation, boolean hasNewUsableKey) {
synchronized(mLock) {
- Log.d(TAG,"testKeysChange.onKeysChange");
+ Log.d(TAG,"testKeyStatusChange.onKeyStatusChange");
assertTrue(md == mMediaDrm);
assertTrue(Arrays.equals(sessionId, expected_sessionId));
try {
KeyStatus keyStatus = keyInformation.get(0);
assertTrue(Arrays.equals(keyStatus.getKeyId(), "key1".getBytes()));
- assertTrue(keyStatus.getStatusCode() == MediaDrm.KEY_STATUS_USABLE);
+ assertTrue(keyStatus.getStatusCode() == MediaDrm.KeyStatus.STATUS_USABLE);
keyStatus = keyInformation.get(1);
assertTrue(Arrays.equals(keyStatus.getKeyId(), "key2".getBytes()));
- assertTrue(keyStatus.getStatusCode() == MediaDrm.KEY_STATUS_EXPIRED);
+ assertTrue(keyStatus.getStatusCode() == MediaDrm.KeyStatus.STATUS_EXPIRED);
keyStatus = keyInformation.get(2);
assertTrue(Arrays.equals(keyStatus.getKeyId(), "key3".getBytes()));
- assertTrue(keyStatus.getStatusCode() == MediaDrm.KEY_STATUS_OUTPUT_NOT_ALLOWED);
+ assertTrue(keyStatus.getStatusCode() == MediaDrm.KeyStatus.STATUS_OUTPUT_NOT_ALLOWED);
keyStatus = keyInformation.get(3);
assertTrue(Arrays.equals(keyStatus.getKeyId(), "key4".getBytes()));
- assertTrue(keyStatus.getStatusCode() == MediaDrm.KEY_STATUS_PENDING);
+ assertTrue(keyStatus.getStatusCode() == MediaDrm.KeyStatus.STATUS_PENDING);
keyStatus = keyInformation.get(4);
assertTrue(Arrays.equals(keyStatus.getKeyId(), "key5".getBytes()));
- assertTrue(keyStatus.getStatusCode() == MediaDrm.KEY_STATUS_INTERNAL_ERROR);
+ assertTrue(keyStatus.getStatusCode() == MediaDrm.KeyStatus.STATUS_INTERNAL_ERROR);
assertTrue(hasNewUsableKey);
mGotEvent = true;
} catch (IndexOutOfBoundsException e) {
diff --git a/tests/tests/media/src/android/media/cts/MediaExtractorTest.java b/tests/tests/media/src/android/media/cts/MediaExtractorTest.java
index 7ca498f..9db54ff 100644
--- a/tests/tests/media/src/android/media/cts/MediaExtractorTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaExtractorTest.java
@@ -83,7 +83,7 @@
public void testExtractorFailsIfMediaDataSourceReturnsAnError() throws Exception {
TestMediaDataSource dataSource = getDataSourceFor(R.raw.testvideo);
- dataSource.returnFromReadAt(-1);
+ dataSource.returnFromReadAt(-2);
try {
mExtractor.setDataSource(dataSource);
fail("Expected IOException.");
diff --git a/tests/tests/media/src/android/media/cts/MediaMetadataRetrieverTest.java b/tests/tests/media/src/android/media/cts/MediaMetadataRetrieverTest.java
index 622c0ec..562656b 100644
--- a/tests/tests/media/src/android/media/cts/MediaMetadataRetrieverTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaMetadataRetrieverTest.java
@@ -146,7 +146,7 @@
public void testRetrieveFailsIfMediaDataSourceReturnsAnError() throws Exception {
TestMediaDataSource dataSource = setDataSourceCallback(R.raw.testvideo);
- dataSource.returnFromReadAt(-1);
+ dataSource.returnFromReadAt(-2);
assertTrue(mRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_TITLE) == null);
}
}
diff --git a/tests/tests/media/src/android/media/cts/MediaMuxerTest.java b/tests/tests/media/src/android/media/cts/MediaMuxerTest.java
index 67eeca0..0f664a5 100644
--- a/tests/tests/media/src/android/media/cts/MediaMuxerTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaMuxerTest.java
@@ -104,6 +104,8 @@
fail("should throw IllegalStateException.");
} catch (IllegalStateException e) {
// expected
+ } finally {
+ muxer.release();
}
// Throws exception b/c 2 video tracks were added.
@@ -115,6 +117,8 @@
fail("should throw IllegalStateException.");
} catch (IllegalStateException e) {
// expected
+ } finally {
+ muxer.release();
}
// Throws exception b/c 2 audio tracks were added.
@@ -125,6 +129,8 @@
fail("should throw IllegalStateException.");
} catch (IllegalStateException e) {
// expected
+ } finally {
+ muxer.release();
}
// Throws exception b/c 3 tracks were added.
@@ -137,6 +143,8 @@
fail("should throw IllegalStateException.");
} catch (IllegalStateException e) {
// expected
+ } finally {
+ muxer.release();
}
// Throws exception b/c no tracks was added.
@@ -146,6 +154,8 @@
fail("should throw IllegalStateException.");
} catch (IllegalStateException e) {
// expected
+ } finally {
+ muxer.release();
}
// Throws exception b/c a wrong format.
@@ -155,6 +165,8 @@
fail("should throw IllegalStateException.");
} catch (IllegalStateException e) {
// expected
+ } finally {
+ muxer.release();
}
new File(outputFile).delete();
}
diff --git a/tests/tests/media/src/android/media/cts/MediaPlayerTest.java b/tests/tests/media/src/android/media/cts/MediaPlayerTest.java
index 661b815..18cd353 100644
--- a/tests/tests/media/src/android/media/cts/MediaPlayerTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaPlayerTest.java
@@ -26,10 +26,13 @@
import android.media.MediaDataSource;
import android.media.MediaExtractor;
import android.media.MediaFormat;
+import android.media.MediaMetadataRetriever;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnErrorListener;
import android.media.MediaRecorder;
-import android.media.MediaMetadataRetriever;
+import android.media.MediaTimestamp;
+import android.media.PlaybackParams;
+import android.media.SyncParams;
import android.media.TimedText;
import android.media.audiofx.AudioEffect;
import android.media.audiofx.Visualizer;
@@ -68,6 +71,8 @@
private static final int RECORDED_VIDEO_WIDTH = 176;
private static final int RECORDED_VIDEO_HEIGHT = 144;
private static final long RECORDED_DURATION_MS = 3000;
+ private static final float FLOAT_TOLERANCE = .0001f;
+
private Vector<Integer> mTimedTextTrackIndex = new Vector<Integer>();
private int mSelectedTimedTextIndex;
private Monitor mOnTimedTextCalled = new Monitor();
@@ -840,15 +845,22 @@
mMediaPlayer.setDisplay(mActivity.getSurfaceHolder());
mMediaPlayer.prepare();
+ SyncParams sync = new SyncParams().allowDefaults();
+ mMediaPlayer.setSyncParams(sync);
+ sync = mMediaPlayer.getSyncParams();
+
float[] rates = { 0.25f, 0.5f, 1.0f, 2.0f };
for (float playbackRate : rates) {
mMediaPlayer.seekTo(0);
Thread.sleep(1000);
int playTime = 4000; // The testing clip is about 10 second long.
- mMediaPlayer.setPlaybackRate(playbackRate,
- MediaPlayer.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
+ mMediaPlayer.setPlaybackParams(new PlaybackParams().setSpeed(playbackRate));
mMediaPlayer.start();
Thread.sleep(playTime);
+ PlaybackParams pbp = mMediaPlayer.getPlaybackParams();
+ assertEquals(
+ playbackRate, pbp.getSpeed(),
+ FLOAT_TOLERANCE + playbackRate * sync.getTolerance());
assertTrue("MediaPlayer should still be playing", mMediaPlayer.isPlaying());
int playedMediaDurationMs = mMediaPlayer.getCurrentPosition();
@@ -858,10 +870,58 @@
+ ", play time is " + playTime + " vs expected " + playedMediaDurationMs);
}
mMediaPlayer.pause();
+ pbp = mMediaPlayer.getPlaybackParams();
+ assertEquals(0.f, pbp.getSpeed(), FLOAT_TOLERANCE);
}
mMediaPlayer.stop();
}
+ public void testGetTimestamp() throws Exception {
+ final int toleranceUs = 100000;
+ final float playbackRate = 1.0f;
+ if (!checkLoadResource(
+ R.raw.video_480x360_mp4_h264_1000kbps_30fps_aac_stereo_128kbps_44100hz)) {
+ return; // skip
+ }
+
+ mMediaPlayer.setDisplay(mActivity.getSurfaceHolder());
+ mMediaPlayer.prepare();
+ mMediaPlayer.start();
+ mMediaPlayer.setPlaybackParams(new PlaybackParams().setSpeed(playbackRate));
+ Thread.sleep(SLEEP_TIME); // let player get into stable state.
+ long nt1 = System.nanoTime();
+ MediaTimestamp ts1 = mMediaPlayer.getTimestamp();
+ long nt2 = System.nanoTime();
+ assertTrue("Media player should return a valid time stamp", ts1 != null);
+ assertEquals("MediaPlayer had error in clockRate " + ts1.getMediaClockRate(),
+ playbackRate, ts1.getMediaClockRate(), 0.001f);
+ assertTrue("The nanoTime of Media timestamp should be taken when getTimestamp is called.",
+ nt1 <= ts1.nanoTime && ts1.nanoTime <= nt2);
+
+ mMediaPlayer.pause();
+ ts1 = mMediaPlayer.getTimestamp();
+ assertTrue("Media player should return a valid time stamp", ts1 != null);
+ assertTrue("Media player should have play rate of 0.0f when paused",
+ ts1.getMediaClockRate() == 0.0f);
+
+ mMediaPlayer.seekTo(0);
+ mMediaPlayer.start();
+ Thread.sleep(SLEEP_TIME); // let player get into stable state.
+ int playTime = 4000; // The testing clip is about 10 second long.
+ ts1 = mMediaPlayer.getTimestamp();
+ assertTrue("Media player should return a valid time stamp", ts1 != null);
+ Thread.sleep(playTime);
+ MediaTimestamp ts2 = mMediaPlayer.getTimestamp();
+ assertTrue("Media player should return a valid time stamp", ts2 != null);
+ assertTrue("The clockRate should not be changed.",
+ ts1.getMediaClockRate() == ts2.getMediaClockRate());
+ assertEquals("MediaPlayer had error in timestamp.",
+ ts1.getAnchorMediaTimeUs() + (long)(playTime * ts1.getMediaClockRate() * 1000),
+ ts2.getAnchorMediaTimeUs(), toleranceUs);
+
+ mMediaPlayer.stop();
+ }
+
public void testLocalVideo_MP4_H264_480x360_500kbps_25fps_AAC_Stereo_128kbps_44110Hz()
throws Exception {
playVideoTest(
@@ -1589,7 +1649,7 @@
mMediaPlayer.setDataSource(dataSource);
mMediaPlayer.prepare();
- dataSource.returnFromReadAt(-1);
+ dataSource.returnFromReadAt(-2);
mMediaPlayer.start();
assertTrue(mOnErrorCalled.waitForSignal());
}
diff --git a/tests/tests/media/src/android/media/cts/MediaRecorderTest.java b/tests/tests/media/src/android/media/cts/MediaRecorderTest.java
index 78b5cfd..b6ee1db 100644
--- a/tests/tests/media/src/android/media/cts/MediaRecorderTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaRecorderTest.java
@@ -18,10 +18,16 @@
import android.content.pm.PackageManager;
import android.cts.util.MediaUtils;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
import android.hardware.Camera;
+import android.media.EncoderCapabilities;
+import android.media.MediaCodec;
import android.media.MediaFormat;
import android.media.MediaMetadataRetriever;
import android.media.MediaRecorder;
+import android.media.EncoderCapabilities.VideoEncoderCap;
import android.media.MediaRecorder.OnErrorListener;
import android.media.MediaRecorder.OnInfoListener;
import android.media.MediaMetadataRetriever;
@@ -38,6 +44,7 @@
import java.io.FileOutputStream;
import java.lang.InterruptedException;
import java.lang.Runnable;
+import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
@@ -62,6 +69,12 @@
private static final int MAX_DURATION_MSEC = 2000;
private static final float LATITUDE = 0.0000f;
private static final float LONGITUDE = -180.0f;
+ private static final int NORMAL_FPS = 30;
+ private static final int TIME_LAPSE_FPS = 5;
+ private static final int SLOW_MOTION_FPS = 120;
+ private static final List<VideoEncoderCap> mVideoEncoders =
+ EncoderCapabilities.getVideoEncoders();
+
private boolean mOnInfoCalled;
private boolean mOnErrorCalled;
private File mOutFile;
@@ -138,8 +151,10 @@
@Override
protected void tearDown() throws Exception {
- mMediaRecorder.release();
- mMediaRecorder = null;
+ if (mMediaRecorder != null) {
+ mMediaRecorder.release();
+ mMediaRecorder = null;
+ }
if (mOutFile != null && mOutFile.exists()) {
mOutFile.delete();
}
@@ -477,6 +492,441 @@
assertFalse(mOnErrorCalled);
}
+ private void setupRecorder(String filename, boolean useSurface, boolean hasAudio)
+ throws Exception {
+ int codec = MediaRecorder.VideoEncoder.H264;
+ int frameRate = getMaxFrameRateForCodec(codec);
+ if (mMediaRecorder == null) {
+ mMediaRecorder = new MediaRecorder();
+ }
+
+ if (!useSurface) {
+ mCamera = Camera.open(0);
+ Camera.Parameters params = mCamera.getParameters();
+ frameRate = params.getPreviewFrameRate();
+ mCamera.unlock();
+ mMediaRecorder.setCamera(mCamera);
+ mMediaRecorder.setPreviewDisplay(mActivity.getSurfaceHolder().getSurface());
+ }
+
+ mMediaRecorder.setVideoSource(useSurface ?
+ MediaRecorder.VideoSource.SURFACE : MediaRecorder.VideoSource.CAMERA);
+
+ if (hasAudio) {
+ mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
+ }
+
+ mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
+ mMediaRecorder.setOutputFile(filename);
+
+ mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
+ mMediaRecorder.setVideoFrameRate(frameRate);
+ mMediaRecorder.setVideoSize(VIDEO_WIDTH, VIDEO_HEIGHT);
+
+ if (hasAudio) {
+ mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
+ }
+ }
+
+ private Surface tryGetSurface(boolean shouldThrow) throws Exception {
+ Surface surface = null;
+ try {
+ surface = mMediaRecorder.getSurface();
+ assertFalse("failed to throw IllegalStateException", shouldThrow);
+ } catch (IllegalStateException e) {
+ assertTrue("threw unexpected exception: " + e, shouldThrow);
+ }
+ return surface;
+ }
+
+ private boolean validateGetSurface(boolean useSurface) {
+ Log.v(TAG,"validateGetSurface, useSurface=" + useSurface);
+ if (!useSurface && !hasCamera()) {
+ // pass if testing camera source but no hardware
+ return true;
+ }
+ Surface surface = null;
+ boolean success = true;
+ try {
+ setupRecorder(OUTPUT_PATH, useSurface, false /* hasAudio */);
+
+ /* Test: getSurface() before prepare()
+ * should throw IllegalStateException
+ */
+ surface = tryGetSurface(true /* shouldThow */);
+
+ mMediaRecorder.prepare();
+
+ /* Test: getSurface() after prepare()
+ * should succeed for surface source
+ * should fail for camera source
+ */
+ surface = tryGetSurface(!useSurface);
+
+ mMediaRecorder.start();
+
+ /* Test: getSurface() after start()
+ * should succeed for surface source
+ * should fail for camera source
+ */
+ surface = tryGetSurface(!useSurface);
+
+ try {
+ mMediaRecorder.stop();
+ } catch (Exception e) {
+ // stop() could fail if the recording is empty, as we didn't render anything.
+ // ignore any failure in stop, we just want it stopped.
+ }
+
+ /* Test: getSurface() after stop()
+ * should throw IllegalStateException
+ */
+ surface = tryGetSurface(true /* shouldThow */);
+ } catch (Exception e) {
+ Log.d(TAG, e.toString());
+ success = false;
+ } finally {
+ // reset to clear states, as stop() might have failed
+ mMediaRecorder.reset();
+
+ if (mCamera != null) {
+ mCamera.release();
+ mCamera = null;
+ }
+ if (surface != null) {
+ surface.release();
+ surface = null;
+ }
+ }
+
+ return success;
+ }
+
+ private void trySetInputSurface(Surface surface) throws Exception {
+ boolean testBadArgument = (surface == null);
+ try {
+ mMediaRecorder.setInputSurface(testBadArgument ? new Surface() : surface);
+ fail("failed to throw exception");
+ } catch (IllegalArgumentException e) {
+ // OK only if testing bad arg
+ assertTrue("threw unexpected exception: " + e, testBadArgument);
+ } catch (IllegalStateException e) {
+ // OK only if testing error case other than bad arg
+ assertFalse("threw unexpected exception: " + e, testBadArgument);
+ }
+ }
+
+ private boolean validatePersistentSurface(boolean errorCase) {
+ Log.v(TAG, "validatePersistentSurface, errorCase=" + errorCase);
+
+ Surface surface = MediaCodec.createPersistentInputSurface();
+ if (surface == null) {
+ return false;
+ }
+ Surface dummy = null;
+
+ boolean success = true;
+ try {
+ setupRecorder(OUTPUT_PATH, true /* useSurface */, false /* hasAudio */);
+
+ if (errorCase) {
+ /*
+ * Test: should throw if called with non-persistent surface
+ */
+ trySetInputSurface(null);
+ } else {
+ /*
+ * Test: should succeed if called with a persistent surface before prepare()
+ */
+ mMediaRecorder.setInputSurface(surface);
+ }
+
+ /*
+ * Test: getSurface() should fail before prepare
+ */
+ dummy = tryGetSurface(true /* shouldThow */);
+
+ mMediaRecorder.prepare();
+
+ /*
+ * Test: setInputSurface() should fail after prepare
+ */
+ trySetInputSurface(surface);
+
+ /*
+ * Test: getSurface() should fail if setInputSurface() succeeded
+ */
+ dummy = tryGetSurface(!errorCase /* shouldThow */);
+
+ mMediaRecorder.start();
+
+ /*
+ * Test: setInputSurface() should fail after start
+ */
+ trySetInputSurface(surface);
+
+ /*
+ * Test: getSurface() should fail if setInputSurface() succeeded
+ */
+ dummy = tryGetSurface(!errorCase /* shouldThow */);
+
+ try {
+ mMediaRecorder.stop();
+ } catch (Exception e) {
+ // stop() could fail if the recording is empty, as we didn't render anything.
+ // ignore any failure in stop, we just want it stopped.
+ }
+
+ /*
+ * Test: getSurface() should fail after stop
+ */
+ dummy = tryGetSurface(true /* shouldThow */);
+ } catch (Exception e) {
+ Log.d(TAG, e.toString());
+ success = false;
+ } finally {
+ // reset to clear states, as stop() might have failed
+ mMediaRecorder.reset();
+
+ if (mCamera != null) {
+ mCamera.release();
+ mCamera = null;
+ }
+ if (surface != null) {
+ surface.release();
+ surface = null;
+ }
+ if (dummy != null) {
+ dummy.release();
+ dummy = null;
+ }
+ }
+
+ return success;
+ }
+
+ public void testGetSurfaceApi() {
+ if (!hasH264()) {
+ MediaUtils.skipTest("no codecs");
+ return;
+ }
+
+ if (hasCamera()) {
+ // validate getSurface() with CAMERA source
+ assertTrue(validateGetSurface(false /* useSurface */));
+ }
+
+ // validate getSurface() with SURFACE source
+ assertTrue(validateGetSurface(true /* useSurface */));
+ }
+
+ public void testPersistentSurfaceApi() {
+ if (!hasH264()) {
+ MediaUtils.skipTest("no codecs");
+ return;
+ }
+
+ // test valid use case
+ assertTrue(validatePersistentSurface(false /* errorCase */));
+
+ // test invalid use case
+ assertTrue(validatePersistentSurface(true /* errorCase */));
+ }
+
+ private static int getMaxFrameRateForCodec(int codec) {
+ for (VideoEncoderCap cap : mVideoEncoders) {
+ if (cap.mCodec == codec) {
+ return cap.mMaxFrameRate < NORMAL_FPS ? cap.mMaxFrameRate : NORMAL_FPS;
+ }
+ }
+ fail("didn't find max FPS for codec");
+ return -1;
+ }
+
+ private boolean recordFromSurface(
+ String filename,
+ int captureRate,
+ boolean hasAudio,
+ Surface persistentSurface) {
+ Log.v(TAG, "recordFromSurface");
+ Surface surface = null;
+ try {
+ setupRecorder(filename, true /* useSurface */, hasAudio);
+
+ int sleepTimeMs;
+ if (captureRate > 0) {
+ mMediaRecorder.setCaptureRate(captureRate);
+ sleepTimeMs = 1000 / captureRate;
+ } else {
+ sleepTimeMs = 1000 / getMaxFrameRateForCodec(MediaRecorder.VideoEncoder.H264);
+ }
+
+ if (persistentSurface != null) {
+ Log.v(TAG, "using persistent surface");
+ surface = persistentSurface;
+ mMediaRecorder.setInputSurface(surface);
+ }
+
+ mMediaRecorder.prepare();
+
+ if (persistentSurface == null) {
+ surface = mMediaRecorder.getSurface();
+ }
+
+ Paint paint = new Paint();
+ paint.setTextSize(16);
+ paint.setColor(Color.RED);
+ int i;
+
+ /* Test: draw 10 frames at 30fps before start
+ * these should be dropped and not causing malformed stream.
+ */
+ for(i = 0; i < 10; i++) {
+ Canvas canvas = surface.lockCanvas(null);
+ int background = (i * 255 / 99);
+ canvas.drawARGB(255, background, background, background);
+ String text = "Frame #" + i;
+ canvas.drawText(text, 50, 50, paint);
+ surface.unlockCanvasAndPost(canvas);
+ Thread.sleep(sleepTimeMs);
+ }
+
+ Log.v(TAG, "start");
+ mMediaRecorder.start();
+
+ /* Test: draw another 90 frames at 30fps after start */
+ for(i = 10; i < 100; i++) {
+ Canvas canvas = surface.lockCanvas(null);
+ int background = (i * 255 / 99);
+ canvas.drawARGB(255, background, background, background);
+ String text = "Frame #" + i;
+ canvas.drawText(text, 50, 50, paint);
+ surface.unlockCanvasAndPost(canvas);
+ Thread.sleep(sleepTimeMs);
+ }
+
+ Log.v(TAG, "stop");
+ mMediaRecorder.stop();
+ } catch (Exception e) {
+ Log.v(TAG, "record video failed: " + e.toString());
+ return false;
+ } finally {
+ // We need to test persistent surface across multiple MediaRecorder
+ // instances, so must destroy mMediaRecorder here.
+ if (mMediaRecorder != null) {
+ mMediaRecorder.release();
+ mMediaRecorder = null;
+ }
+
+ // release surface if not using persistent surface
+ if (persistentSurface == null && surface != null) {
+ surface.release();
+ surface = null;
+ }
+ }
+ return true;
+ }
+
+ private boolean checkCaptureFps(String filename, int captureRate) {
+ MediaMetadataRetriever retriever = new MediaMetadataRetriever();
+
+ retriever.setDataSource(filename);
+
+ // verify capture rate meta key is present and correct
+ String captureFps = retriever.extractMetadata(
+ MediaMetadataRetriever.METADATA_KEY_CAPTURE_FRAMERATE);
+
+ if (captureFps == null) {
+ Log.d(TAG, "METADATA_KEY_CAPTURE_FRAMERATE is missing");
+ return false;
+ }
+
+ if (Math.abs(Float.parseFloat(captureFps) - captureRate) > 0.001) {
+ Log.d(TAG, "METADATA_KEY_CAPTURE_FRAMERATE is incorrect: "
+ + captureFps + "vs. " + captureRate);
+ return false;
+ }
+
+ // verify other meta keys here if necessary
+ return true;
+ }
+
+ private boolean testRecordFromSurface(boolean persistent, boolean timelapse) {
+ Log.v(TAG, "testRecordFromSurface: " +
+ "persistent=" + persistent + ", timelapse=" + timelapse);
+ boolean success = false;
+ Surface surface = null;
+ int noOfFailure = 0;
+ try {
+ if (persistent) {
+ surface = MediaCodec.createPersistentInputSurface();
+ }
+
+ for (int k = 0; k < 2; k++) {
+ String filename = (k == 0) ? OUTPUT_PATH : OUTPUT_PATH2;
+ boolean hasAudio = false;
+ int captureRate = 0;
+
+ if (timelapse) {
+ // if timelapse/slow-mo, k chooses between low/high capture fps
+ captureRate = (k == 0) ? TIME_LAPSE_FPS : SLOW_MOTION_FPS;
+ } else {
+ // otherwise k chooses between no-audio and audio
+ hasAudio = (k == 0) ? false : true;
+ }
+
+ if (hasAudio && (!hasMicrophone() || !hasAmrNb())) {
+ // audio test waived if no audio support
+ continue;
+ }
+
+ Log.v(TAG, "testRecordFromSurface - round " + k);
+ success = recordFromSurface(filename, captureRate, hasAudio, surface);
+ if (success) {
+ checkTracksAndDuration(0, true /* hasVideo */, hasAudio, filename);
+
+ // verify capture fps meta key
+ if (timelapse && !checkCaptureFps(filename, captureRate)) {
+ noOfFailure++;
+ }
+ }
+ if (!success) {
+ noOfFailure++;
+ }
+ }
+ } catch (Exception e) {
+ Log.v(TAG, e.toString());
+ noOfFailure++;
+ } finally {
+ if (surface != null) {
+ Log.v(TAG, "releasing persistent surface");
+ surface.release();
+ surface = null;
+ }
+ }
+ return (noOfFailure == 0);
+ }
+
+ // Test recording from surface source with/without audio)
+ public void testSurfaceRecording() {
+ assertTrue(testRecordFromSurface(false /* persistent */, false /* timelapse */));
+ }
+
+ // Test recording from persistent surface source with/without audio
+ public void testPersistentSurfaceRecording() {
+ assertTrue(testRecordFromSurface(true /* persistent */, false /* timelapse */));
+ }
+
+ // Test timelapse recording from surface without audio
+ public void testSurfaceRecordingTimeLapse() {
+ assertTrue(testRecordFromSurface(false /* persistent */, true /* timelapse */));
+ }
+
+ // Test timelapse recording from persisent surface without audio
+ public void testPersistentSurfaceRecordingTimeLapse() {
+ assertTrue(testRecordFromSurface(true /* persistent */, true /* timelapse */));
+ }
+
private void recordMedia(long maxFileSize, File outFile) throws Exception {
mMediaRecorder.setMaxFileSize(maxFileSize);
mMediaRecorder.prepare();
diff --git a/tests/tests/media/src/android/media/cts/MediaSyncTest.java b/tests/tests/media/src/android/media/cts/MediaSyncTest.java
index 50e12da..6f9e2a2 100644
--- a/tests/tests/media/src/android/media/cts/MediaSyncTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaSyncTest.java
@@ -31,6 +31,8 @@
import android.media.MediaFormat;
import android.media.MediaSync;
import android.media.MediaTimestamp;
+import android.media.PlaybackParams;
+import android.media.SyncParams;
import android.test.ActivityInstrumentationTestCase2;
import android.util.Log;
import android.view.Surface;
@@ -60,6 +62,7 @@
R.raw.video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_192kbps_44100hz;
private final int APPLICATION_AUDIO_PERIOD_MS = 200;
private final int TEST_MAX_SPEED = 2;
+ private static final float FLOAT_TOLERANCE = .00001f;
private Context mContext;
private Resources mResources;
@@ -151,12 +154,12 @@
}
/**
- * Tests setPlaybackRate is handled correctly for wrong rate.
+ * Tests setPlaybackParams is handled correctly for wrong rate.
*/
- public void testSetPlaybackRateFail() throws InterruptedException {
+ public void testSetPlaybackParamsFail() throws InterruptedException {
final float rate = -1.0f;
try {
- mMediaSync.setPlaybackRate(rate, MediaSync.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
+ mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(rate));
fail("playback rate " + rate + " is not handled correctly");
} catch (IllegalArgumentException e) {
}
@@ -170,7 +173,7 @@
mMediaSync.setAudioTrack(mAudioTrack);
try {
- mMediaSync.setPlaybackRate(rate, MediaSync.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
+ mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(rate));
fail("With audio track set, playback rate " + rate
+ " is not handled correctly");
} catch (IllegalArgumentException e) {
@@ -178,13 +181,15 @@
}
/**
- * Tests setPlaybackRate is handled correctly for good rate without audio track set.
+ * Tests setPlaybackParams is handled correctly for good rate without audio track set.
* The case for good rate with audio track set is tested in testPlaybackRate*.
*/
- public void testSetPlaybackRateSucceed() throws InterruptedException {
+ public void testSetPlaybackParamsSucceed() throws InterruptedException {
final float rate = (float)TEST_MAX_SPEED;
try {
- mMediaSync.setPlaybackRate(rate, MediaSync.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
+ mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(rate));
+ PlaybackParams pbp = mMediaSync.getPlaybackParams();
+ assertEquals(rate, pbp.getSpeed(), FLOAT_TOLERANCE);
} catch (IllegalArgumentException e) {
fail("playback rate " + rate + " is not handled correctly");
}
@@ -201,6 +206,9 @@
}
}
+ private PlaybackParams PAUSED_RATE = new PlaybackParams().setSpeed(0.f);
+ private PlaybackParams NORMAL_RATE = new PlaybackParams().setSpeed(1.f);
+
private boolean runCheckAudioBuffer(int inputResourceId, int timeOutMs) {
final int NUM_LOOPS = 10;
final Object condition = new Object();
@@ -233,7 +241,7 @@
}
}, null);
- mMediaSync.setPlaybackRate(1.0f, MediaSync.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
+ mMediaSync.setPlaybackParams(NORMAL_RATE);
synchronized (condition) {
mDecoderAudio.start();
@@ -247,6 +255,75 @@
}
/**
+ * Tests flush.
+ */
+ public void testFlush() throws InterruptedException {
+ final int timeOutMs = 5000;
+ boolean completed = runFlush(INPUT_RESOURCE_ID, timeOutMs);
+ if (!completed) {
+ throw new RuntimeException("timed out waiting for flush");
+ }
+ }
+
+ private boolean runFlush(int inputResourceId, int timeOutMs) {
+ final int INDEX_BEFORE_FLUSH = 1;
+ final int INDEX_AFTER_FLUSH = 2;
+ final int BUFFER_SIZE = 1024;
+ final int[] returnedIndex = new int[1];
+ final Object condition = new Object();
+
+ returnedIndex[0] = -1;
+
+ mHasAudio = true;
+ if (mDecoderAudio.setup(inputResourceId, null, Long.MAX_VALUE) == false) {
+ return true;
+ }
+
+ // get audio track.
+ mAudioTrack = mDecoderAudio.getAudioTrack();
+
+ mMediaSync.setAudioTrack(mAudioTrack);
+
+ mMediaSync.setCallback(new MediaSync.Callback() {
+ @Override
+ public void onAudioBufferConsumed(
+ MediaSync sync, ByteBuffer byteBuffer, int bufferIndex) {
+ synchronized (condition) {
+ if (returnedIndex[0] == -1) {
+ returnedIndex[0] = bufferIndex;
+ condition.notify();
+ }
+ }
+ }
+ }, null);
+
+ mMediaSync.setOnErrorListener(new MediaSync.OnErrorListener() {
+ @Override
+ public void onError(MediaSync sync, int what, int extra) {
+ fail("got error from media sync (" + what + ", " + extra + ")");
+ }
+ }, null);
+
+ mMediaSync.setPlaybackParams(PAUSED_RATE);
+
+ ByteBuffer buffer1 = ByteBuffer.allocate(BUFFER_SIZE);
+ ByteBuffer buffer2 = ByteBuffer.allocate(BUFFER_SIZE);
+ mMediaSync.queueAudio(buffer1, INDEX_BEFORE_FLUSH, 0 /* presentationTimeUs */);
+ mMediaSync.flush();
+ mMediaSync.queueAudio(buffer2, INDEX_AFTER_FLUSH, 0 /* presentationTimeUs */);
+
+ synchronized (condition) {
+ mMediaSync.setPlaybackParams(NORMAL_RATE);
+
+ try {
+ condition.wait(timeOutMs);
+ } catch (InterruptedException e) {
+ }
+ return (returnedIndex[0] == INDEX_AFTER_FLUSH);
+ }
+ }
+
+ /**
* Tests playing back audio successfully.
*/
public void testPlayVideo() throws InterruptedException {
@@ -369,7 +446,11 @@
mHasAudio = true;
}
- mMediaSync.setPlaybackRate(playbackRate, MediaSync.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
+ SyncParams sync = new SyncParams().allowDefaults();
+ mMediaSync.setSyncParams(sync);
+ sync = mMediaSync.getSyncParams();
+
+ mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(playbackRate));
synchronized (conditionFirstAudioBuffer) {
if (video) {
@@ -419,7 +500,10 @@
+ ", play time is " + playTimeUs + " vs expected " + mediaDurationUs,
mediaDurationUs,
playTimeUs * playbackRate,
- mediaDurationUs * PLAYBACK_RATE_TOLERANCE_PERCENT / 100
+ // sync.getTolerance() is MediaSync's tolerance of the playback rate, whereas
+ // PLAYBACK_RATE_TOLERANCE_PERCENT / 100 is our test's tolerance.
+ // We need to add both to get an upperbound for allowable error.
+ mediaDurationUs * (sync.getTolerance() + PLAYBACK_RATE_TOLERANCE_PERCENT / 100)
+ TIME_MEASUREMENT_TOLERANCE_US);
}
@@ -629,7 +713,6 @@
mMediaSync.queueAudio(
outputByteBuffer,
index,
- info.size,
info.presentationTimeUs);
} else {
codec.releaseOutputBuffer(index, info.presentationTimeUs * 1000);
diff --git a/tests/tests/media/src/android/media/cts/ParamsTest.java b/tests/tests/media/src/android/media/cts/ParamsTest.java
new file mode 100644
index 0000000..4e42004
--- /dev/null
+++ b/tests/tests/media/src/android/media/cts/ParamsTest.java
@@ -0,0 +1,351 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.cts;
+
+import com.android.cts.media.R;
+
+import android.media.PlaybackParams;
+import android.media.SyncParams;
+import android.test.AndroidTestCase;
+
+/**
+ * General Params tests.
+ *
+ * In particular, check Params objects' behavior.
+ */
+public class ParamsTest extends AndroidTestCase {
+ private static final String TAG = "ParamsTest";
+ private static final float FLOAT_TOLERANCE = .00001f;
+ private static final float MAX_DEFAULT_TOLERANCE = 1/24.f;
+
+ public void testSyncParamsConstants() {
+ assertEquals(0, SyncParams.SYNC_SOURCE_DEFAULT);
+ assertEquals(1, SyncParams.SYNC_SOURCE_SYSTEM_CLOCK);
+ assertEquals(2, SyncParams.SYNC_SOURCE_AUDIO);
+ assertEquals(3, SyncParams.SYNC_SOURCE_VSYNC);
+
+ assertEquals(0, SyncParams.AUDIO_ADJUST_MODE_DEFAULT);
+ assertEquals(1, SyncParams.AUDIO_ADJUST_MODE_STRETCH);
+ assertEquals(2, SyncParams.AUDIO_ADJUST_MODE_RESAMPLE);
+ }
+
+ public void testSyncParamsDefaults() {
+ SyncParams p = new SyncParams();
+ try { fail("got " + p.getAudioAdjustMode()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getSyncSource()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getTolerance()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getFrameRate()); } catch (IllegalStateException e) {}
+
+ SyncParams q = p.allowDefaults();
+ assertSame(p, q);
+ assertEquals(p.AUDIO_ADJUST_MODE_DEFAULT, p.getAudioAdjustMode());
+ assertEquals(p.SYNC_SOURCE_DEFAULT, p.getSyncSource());
+ assertTrue(p.getTolerance() >= 0.f
+ && p.getTolerance() < MAX_DEFAULT_TOLERANCE + FLOAT_TOLERANCE);
+ try { fail("got " + p.getFrameRate()); } catch (IllegalStateException e) {}
+ }
+
+ public void testSyncParamsAudioAdjustMode() {
+ // setting this cannot fail
+ SyncParams p = new SyncParams();
+ for (int i : new int[] {
+ SyncParams.AUDIO_ADJUST_MODE_STRETCH,
+ SyncParams.AUDIO_ADJUST_MODE_RESAMPLE,
+ -1 /* invalid */}) {
+ SyncParams q = p.setAudioAdjustMode(i); // verify both initial set and update
+ assertSame(p, q);
+ assertEquals(i, p.getAudioAdjustMode());
+ try { fail("got " + p.getSyncSource()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getTolerance()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getFrameRate()); } catch (IllegalStateException e) {}
+ }
+ }
+
+ public void testSyncParamsSyncSource() {
+ // setting this cannot fail
+ SyncParams p = new SyncParams();
+ for (int i : new int[] {
+ SyncParams.SYNC_SOURCE_SYSTEM_CLOCK,
+ SyncParams.SYNC_SOURCE_AUDIO,
+ -1 /* invalid */}) {
+ SyncParams q = p.setSyncSource(i); // verify both initial set and update
+ assertSame(p, q);
+ try { fail("got " + p.getAudioAdjustMode()); } catch (IllegalStateException e) {}
+ assertEquals(i, p.getSyncSource());
+ try { fail("got " + p.getTolerance()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getFrameRate()); } catch (IllegalStateException e) {}
+ }
+ }
+
+ public void testSyncParamsTolerance() {
+ // this can fail on values not in [0, 1)
+
+ // test good values
+ SyncParams p = new SyncParams();
+ float lastValue = 2.f; /* some initial value to avoid compile error */
+ for (float f : new float[] { 0.f, .1f, .9999f }) {
+ SyncParams q = p.setTolerance(f); // verify both initial set and update
+ assertSame(p, q);
+ try { fail("got " + p.getAudioAdjustMode()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getSyncSource()); } catch (IllegalStateException e) {}
+ assertEquals(f, p.getTolerance(), FLOAT_TOLERANCE);
+ try { fail("got " + p.getFrameRate()); } catch (IllegalStateException e) {}
+ lastValue = f;
+ }
+
+ // test bad values - these should have no effect
+ boolean update = true;
+ for (float f : new float[] { -.0001f, 1.f }) {
+ try {
+ p.setTolerance(f);
+ fail("set tolerance to " + f);
+ } catch (IllegalArgumentException e) {}
+ try { fail("got " + p.getAudioAdjustMode()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getSyncSource()); } catch (IllegalStateException e) {}
+ if (update) {
+ // if updating, last value should remain
+ assertEquals(lastValue, p.getTolerance(), FLOAT_TOLERANCE);
+ } else {
+ // otherwise, it should remain undefined
+ try { fail("got " + p.getTolerance()); } catch (IllegalStateException e) {}
+ }
+ try { fail("got " + p.getFrameRate()); } catch (IllegalStateException e) {}
+
+ // no longer updating in subsequent iterations
+ p = new SyncParams();
+ update = false;
+ }
+ }
+
+ public void testSyncParamsFrameRate() {
+ // setting this cannot fail, but negative values may be normalized to some negative value
+ SyncParams p = new SyncParams();
+ for (float f : new float[] { 0.f, .0001f, 30.f, 300.f, -.0001f, -1.f }) {
+ SyncParams q = p.setFrameRate(f);
+ assertSame(p, q);
+ try { fail("got " + p.getAudioAdjustMode()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getSyncSource()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getTolerance()); } catch (IllegalStateException e) {}
+ if (f >= 0) {
+ assertEquals(f, p.getFrameRate(), FLOAT_TOLERANCE);
+ } else {
+ assertTrue(p.getFrameRate() < 0.f);
+ }
+ }
+ }
+
+ public void testSyncParamsMultipleSettings() {
+ {
+ SyncParams p = new SyncParams();
+ p.setAudioAdjustMode(p.AUDIO_ADJUST_MODE_STRETCH);
+ SyncParams q = p.setTolerance(.5f);
+ assertSame(p, q);
+
+ assertEquals(p.AUDIO_ADJUST_MODE_STRETCH, p.getAudioAdjustMode());
+ try { fail("got " + p.getSyncSource()); } catch (IllegalStateException e) {}
+ assertEquals(.5f, p.getTolerance(), FLOAT_TOLERANCE);
+ try { fail("got " + p.getFrameRate()); } catch (IllegalStateException e) {}
+
+ // allowDefaults should not change set values
+ q = p.allowDefaults();
+ assertSame(p, q);
+
+ assertEquals(p.AUDIO_ADJUST_MODE_STRETCH, p.getAudioAdjustMode());
+ assertEquals(p.SYNC_SOURCE_DEFAULT, p.getSyncSource());
+ assertEquals(.5f, p.getTolerance(), FLOAT_TOLERANCE);
+ try { fail("got " + p.getFrameRate()); } catch (IllegalStateException e) {}
+ }
+
+ {
+ SyncParams p = new SyncParams();
+ p.setSyncSource(p.SYNC_SOURCE_VSYNC);
+ SyncParams q = p.setFrameRate(25.f);
+ assertSame(p, q);
+
+ try { fail("got " + p.getAudioAdjustMode()); } catch (IllegalStateException e) {}
+ assertEquals(p.SYNC_SOURCE_VSYNC, p.getSyncSource());
+ try { fail("got " + p.getTolerance()); } catch (IllegalStateException e) {}
+ assertEquals(25.f, p.getFrameRate(), FLOAT_TOLERANCE);
+
+ // allowDefaults should not change set values
+ q = p.allowDefaults();
+ assertSame(p, q);
+
+ assertEquals(p.AUDIO_ADJUST_MODE_DEFAULT, p.getAudioAdjustMode());
+ assertEquals(p.SYNC_SOURCE_VSYNC, p.getSyncSource());
+ assertTrue(p.getTolerance() >= 0.f
+ && p.getTolerance() < MAX_DEFAULT_TOLERANCE + FLOAT_TOLERANCE);
+ assertEquals(25.f, p.getFrameRate(), FLOAT_TOLERANCE);
+ }
+ }
+
+ public void testPlaybackParamsConstants() {
+ assertEquals(0, PlaybackParams.AUDIO_STRETCH_MODE_DEFAULT);
+ assertEquals(1, PlaybackParams.AUDIO_STRETCH_MODE_VOICE);
+
+ assertEquals(0, PlaybackParams.AUDIO_FALLBACK_MODE_DEFAULT);
+ assertEquals(1, PlaybackParams.AUDIO_FALLBACK_MODE_MUTE);
+ assertEquals(2, PlaybackParams.AUDIO_FALLBACK_MODE_FAIL);
+ }
+
+ public void testPlaybackParamsDefaults() {
+ PlaybackParams p = new PlaybackParams();
+ try { fail("got " + p.getAudioFallbackMode()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getAudioStretchMode()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getPitch()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getSpeed()); } catch (IllegalStateException e) {}
+
+ PlaybackParams q = p.allowDefaults();
+ assertSame(p, q);
+ assertEquals(p.AUDIO_FALLBACK_MODE_DEFAULT, p.getAudioFallbackMode());
+ assertEquals(p.AUDIO_STRETCH_MODE_DEFAULT, p.getAudioStretchMode());
+ assertEquals(1.f, p.getPitch(), FLOAT_TOLERANCE);
+ assertEquals(1.f, p.getSpeed(), FLOAT_TOLERANCE);
+ }
+
+ public void testPlaybackParamsAudioFallbackMode() {
+ // setting this cannot fail
+ PlaybackParams p = new PlaybackParams();
+ for (int i : new int[] {
+ PlaybackParams.AUDIO_FALLBACK_MODE_MUTE,
+ PlaybackParams.AUDIO_FALLBACK_MODE_FAIL,
+ -1 /* invalid */}) {
+ PlaybackParams q = p.setAudioFallbackMode(i); // verify both initial set and update
+ assertSame(p, q);
+ assertEquals(i, p.getAudioFallbackMode());
+ try { fail("got " + p.getAudioStretchMode()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getPitch()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getSpeed()); } catch (IllegalStateException e) {}
+ }
+ }
+
+ public void testPlaybackParamsAudioStretchMode() {
+ // setting this cannot fail
+ PlaybackParams p = new PlaybackParams();
+ for (int i : new int[] {
+ PlaybackParams.AUDIO_STRETCH_MODE_DEFAULT,
+ PlaybackParams.AUDIO_STRETCH_MODE_VOICE,
+ -1 /* invalid */}) {
+ PlaybackParams q = p.setAudioStretchMode(i); // verify both initial set and update
+ assertSame(p, q);
+ try { fail("got " + p.getAudioFallbackMode()); } catch (IllegalStateException e) {}
+ assertEquals(i, p.getAudioStretchMode());
+ try { fail("got " + p.getPitch()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getSpeed()); } catch (IllegalStateException e) {}
+ }
+ }
+
+ public void testPlaybackParamsPitch() {
+ // this can fail on values not in [0, Inf)
+
+ // test good values
+ PlaybackParams p = new PlaybackParams();
+ float lastValue = 2.f; /* some initial value to avoid compile error */
+ for (float f : new float[] { 0.f, .1f, 9999.f }) {
+ PlaybackParams q = p.setPitch(f); // verify both initial set and update
+ assertSame(p, q);
+ try { fail("got " + p.getAudioFallbackMode()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getAudioStretchMode()); } catch (IllegalStateException e) {}
+ assertEquals(f, p.getPitch(), FLOAT_TOLERANCE);
+ try { fail("got " + p.getSpeed()); } catch (IllegalStateException e) {}
+ lastValue = f;
+ }
+
+ // test bad values - these should have no effect
+ boolean update = true;
+ for (float f : new float[] { -.0001f, -1.f }) {
+ try {
+ p.setPitch(f);
+ fail("set tolerance to " + f);
+ } catch (IllegalArgumentException e) {}
+ try { fail("got " + p.getAudioFallbackMode()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getAudioStretchMode()); } catch (IllegalStateException e) {}
+ if (update) {
+ // if updating, last value should remain
+ assertEquals(lastValue, p.getPitch(), FLOAT_TOLERANCE);
+ } else {
+ // otherwise, it should remain undefined
+ try { fail("got " + p.getPitch()); } catch (IllegalStateException e) {}
+ }
+ try { fail("got " + p.getSpeed()); } catch (IllegalStateException e) {}
+
+ // no longer updating in subsequent iterations
+ p = new PlaybackParams();
+ update = false;
+ }
+ }
+
+ public void testPlaybackParamsSpeed() {
+ // setting this cannot fail
+ PlaybackParams p = new PlaybackParams();
+ for (float f : new float[] { 0.f, .0001f, 30.f, 300.f, -.0001f, -1.f, -300.f }) {
+ PlaybackParams q = p.setSpeed(f);
+ assertSame(p, q);
+ try { fail("got " + p.getAudioFallbackMode()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getAudioStretchMode()); } catch (IllegalStateException e) {}
+ try { fail("got " + p.getPitch()); } catch (IllegalStateException e) {}
+ assertEquals(f, p.getSpeed(), FLOAT_TOLERANCE);
+ }
+ }
+
+ public void testPlaybackParamsMultipleSettings() {
+ {
+ PlaybackParams p = new PlaybackParams();
+ p.setAudioFallbackMode(p.AUDIO_FALLBACK_MODE_MUTE);
+ PlaybackParams q = p.setPitch(.5f);
+ assertSame(p, q);
+
+ assertEquals(p.AUDIO_FALLBACK_MODE_MUTE, p.getAudioFallbackMode());
+ try { fail("got " + p.getAudioStretchMode()); } catch (IllegalStateException e) {}
+ assertEquals(.5f, p.getPitch(), FLOAT_TOLERANCE);
+ try { fail("got " + p.getSpeed()); } catch (IllegalStateException e) {}
+
+ // allowDefaults should not change set values
+ q = p.allowDefaults();
+ assertSame(p, q);
+
+ assertEquals(p.AUDIO_FALLBACK_MODE_MUTE, p.getAudioFallbackMode());
+ assertEquals(p.AUDIO_STRETCH_MODE_DEFAULT, p.getAudioStretchMode());
+ assertEquals(.5f, p.getPitch(), FLOAT_TOLERANCE);
+ assertEquals(1.f, p.getSpeed(), FLOAT_TOLERANCE);
+ }
+
+ {
+ PlaybackParams p = new PlaybackParams();
+ p.setAudioStretchMode(p.AUDIO_STRETCH_MODE_VOICE);
+ PlaybackParams q = p.setSpeed(25.f);
+ assertSame(p, q);
+
+ try { fail("got " + p.getAudioFallbackMode()); } catch (IllegalStateException e) {}
+ assertEquals(p.AUDIO_STRETCH_MODE_VOICE, p.getAudioStretchMode());
+ try { fail("got " + p.getPitch()); } catch (IllegalStateException e) {}
+ assertEquals(25.f, p.getSpeed(), FLOAT_TOLERANCE);
+
+ // allowDefaults should not change set values
+ q = p.allowDefaults();
+ assertSame(p, q);
+
+ assertEquals(p.AUDIO_FALLBACK_MODE_DEFAULT, p.getAudioFallbackMode());
+ assertEquals(p.AUDIO_STRETCH_MODE_VOICE, p.getAudioStretchMode());
+ assertEquals(1.f, p.getPitch(), FLOAT_TOLERANCE);
+ assertEquals(25.f, p.getSpeed(), FLOAT_TOLERANCE);
+ }
+ }
+
+
+
+}
diff --git a/tests/tests/media/src/android/media/cts/ResourceManagerStubActivity.java b/tests/tests/media/src/android/media/cts/ResourceManagerStubActivity.java
new file mode 100644
index 0000000..214ced4
--- /dev/null
+++ b/tests/tests/media/src/android/media/cts/ResourceManagerStubActivity.java
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media.cts;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.Intent;
+import android.os.Bundle;
+import android.util.Log;
+import junit.framework.Assert;
+
+public class ResourceManagerStubActivity extends Activity {
+ private static final String TAG = "ResourceManagerStubActivity";
+ private final Object mFinishEvent = new Object();
+ private int[] mRequestCodes = {0, 1};
+ private boolean[] mResults = {false, false};
+ private int mNumResults = 0;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ }
+
+ @Override
+ protected void onActivityResult(int requestCode, int resultCode, Intent data) {
+ Log.d(TAG, "Activity " + requestCode + " finished.");
+ mResults[requestCode] = (resultCode == RESULT_OK);
+ if (++mNumResults == mResults.length) {
+ synchronized (mFinishEvent) {
+ mFinishEvent.notify();
+ }
+ }
+ }
+
+ public boolean testReclaimResource() throws InterruptedException {
+ Thread thread = new Thread() {
+ @Override
+ public void run() {
+ try {
+ Context context = getApplicationContext();
+ Intent intent1 = new Intent(context, ResourceManagerTestActivity1.class);
+ startActivityForResult(intent1, mRequestCodes[0]);
+ Thread.sleep(2000); // wait for process to launch.
+
+ Intent intent2 = new Intent(context, ResourceManagerTestActivity2.class);
+ startActivityForResult(intent2, mRequestCodes[1]);
+
+ synchronized (mFinishEvent) {
+ mFinishEvent.wait();
+ }
+ } catch(Exception e) {
+ Log.d(TAG, "testReclaimResource got exception " + e.toString());
+ }
+ }
+ };
+ thread.start();
+ thread.join(10000);
+
+ for (int i = 0; i < mResults.length; ++i) {
+ Assert.assertTrue("Result from activity " + i + " is a fail.", mResults[i]);
+ }
+ return true;
+ }
+}
diff --git a/tests/tests/media/src/android/media/cts/ResourceManagerTest.java b/tests/tests/media/src/android/media/cts/ResourceManagerTest.java
new file mode 100644
index 0000000..5170aac
--- /dev/null
+++ b/tests/tests/media/src/android/media/cts/ResourceManagerTest.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.cts;
+
+import android.os.Bundle;
+import android.test.ActivityInstrumentationTestCase2;
+
+public class ResourceManagerTest
+ extends ActivityInstrumentationTestCase2<ResourceManagerStubActivity> {
+
+ public ResourceManagerTest() {
+ super("com.android.cts.media", ResourceManagerStubActivity.class);
+ }
+
+ public void testReclaimResource() throws Exception {
+ Bundle extras = new Bundle();
+ ResourceManagerStubActivity activity = launchActivity(
+ "com.android.cts.media", ResourceManagerStubActivity.class, extras);
+ activity.testReclaimResource();
+ activity.finish();
+ }
+}
diff --git a/tests/tests/media/src/android/media/cts/ResourceManagerTestActivity1.java b/tests/tests/media/src/android/media/cts/ResourceManagerTestActivity1.java
new file mode 100644
index 0000000..aff3f03
--- /dev/null
+++ b/tests/tests/media/src/android/media/cts/ResourceManagerTestActivity1.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.cts;
+
+import android.app.Activity;
+import android.os.Bundle;
+import android.util.Log;
+
+public class ResourceManagerTestActivity1 extends ResourceManagerTestActivityBase {
+ private static final int MAX_INSTANCES = 32;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ TAG = "ResourceManagerTestActivity1";
+
+ Log.d(TAG, "onCreate called.");
+ super.onCreate(savedInstanceState);
+ moveTaskToBack(true);
+
+ if (allocateCodecs(MAX_INSTANCES) == MAX_INSTANCES) {
+ // haven't reached the limit with MAX_INSTANCES, report RESULT_OK directly and
+ // skip additional test.
+ setResult(Activity.RESULT_OK);
+ finish();
+ }
+ useCodecs();
+ }
+}
diff --git a/tests/tests/media/src/android/media/cts/ResourceManagerTestActivity2.java b/tests/tests/media/src/android/media/cts/ResourceManagerTestActivity2.java
new file mode 100644
index 0000000..f4c57f5
--- /dev/null
+++ b/tests/tests/media/src/android/media/cts/ResourceManagerTestActivity2.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.cts;
+
+import android.app.Activity;
+import android.os.Bundle;
+import android.util.Log;
+
+public class ResourceManagerTestActivity2 extends ResourceManagerTestActivityBase {
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ TAG = "ResourceManagerTestActivity2";
+
+ Log.d(TAG, "onCreate called.");
+ super.onCreate(savedInstanceState);
+
+ if (allocateCodecs(1) == 1) {
+ setResult(Activity.RESULT_OK);
+ finish();
+ }
+ }
+}
diff --git a/tests/tests/media/src/android/media/cts/ResourceManagerTestActivityBase.java b/tests/tests/media/src/android/media/cts/ResourceManagerTestActivityBase.java
new file mode 100644
index 0000000..9c48fc4
--- /dev/null
+++ b/tests/tests/media/src/android/media/cts/ResourceManagerTestActivityBase.java
@@ -0,0 +1,176 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.cts;
+
+import android.app.Activity;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecInfo.VideoCapabilities;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.os.Bundle;
+import android.util.Log;
+import java.io.IOException;
+import java.util.Vector;
+
+public class ResourceManagerTestActivityBase extends Activity {
+ protected String TAG;
+ private static final int IFRAME_INTERVAL = 10; // 10 seconds between I-frames
+ private static final String MIME = MediaFormat.MIMETYPE_VIDEO_AVC;
+
+ private Vector<MediaCodec> mCodecs = new Vector<MediaCodec>();
+
+ private class TestCodecCallback extends MediaCodec.Callback {
+ @Override
+ public void onInputBufferAvailable(MediaCodec codec, int index) {
+ Log.d(TAG, "onInputBufferAvailable " + codec.toString());
+ }
+
+ @Override
+ public void onOutputBufferAvailable(
+ MediaCodec codec, int index, MediaCodec.BufferInfo info) {
+ Log.d(TAG, "onOutputBufferAvailable " + codec.toString());
+ }
+
+ @Override
+ public void onError(MediaCodec codec, MediaCodec.CodecException e) {
+ Log.d(TAG, "onError " + codec.toString() + " errorCode " + e.getErrorCode());
+ }
+
+ @Override
+ public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
+ Log.d(TAG, "onOutputFormatChanged " + codec.toString());
+ }
+ }
+
+ private MediaCodec.Callback mCallback = new TestCodecCallback();
+
+ private static MediaFormat getTestFormat(VideoCapabilities vcaps) {
+ int maxWidth = vcaps.getSupportedWidths().getUpper();
+ int maxHeight = vcaps.getSupportedHeightsFor(maxWidth).getUpper();
+ int maxBitrate = vcaps.getBitrateRange().getUpper();
+ int maxFramerate = vcaps.getSupportedFrameRatesFor(maxWidth, maxHeight)
+ .getUpper().intValue();
+
+ MediaFormat format = MediaFormat.createVideoFormat(MIME, maxWidth, maxHeight);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
+ CodecCapabilities.COLOR_FormatYUV420Flexible);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, maxBitrate);
+ format.setInteger(MediaFormat.KEY_FRAME_RATE, maxFramerate);
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
+ return format;
+ }
+
+ private MediaCodecInfo getTestCodecInfo() {
+ // Use avc decoder for testing.
+ boolean isEncoder = false;
+
+ MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
+ for (MediaCodecInfo info : mcl.getCodecInfos()) {
+ if (info.isEncoder() != isEncoder) {
+ continue;
+ }
+ CodecCapabilities caps;
+ try {
+ caps = info.getCapabilitiesForType(MIME);
+ } catch (IllegalArgumentException e) {
+ // mime is not supported
+ continue;
+ }
+ return info;
+ }
+
+ return null;
+ }
+
+ protected int allocateCodecs(int max) {
+ MediaCodecInfo info = getTestCodecInfo();
+ if (info == null) {
+ // skip the test
+ return 0;
+ }
+
+ String name = info.getName();
+ VideoCapabilities vcaps = info.getCapabilitiesForType(MIME).getVideoCapabilities();
+ MediaFormat format = getTestFormat(vcaps);
+ for (int i = 0; i < max; ++i) {
+ try {
+ Log.d(TAG, "Create codec " + name + " #" + i);
+ MediaCodec codec = MediaCodec.createByCodecName(name);
+ codec.setCallback(mCallback);
+ Log.d(TAG, "Configure codec " + format);
+ codec.configure(format, null, null, 0);
+ Log.d(TAG, "Start codec " + format);
+ codec.start();
+ mCodecs.add(codec);
+ } catch (IllegalArgumentException e) {
+ Log.d(TAG, "IllegalArgumentException " + e.getMessage());
+ break;
+ } catch (IOException e) {
+ Log.d(TAG, "IOException " + e.getMessage());
+ break;
+ } catch (MediaCodec.CodecException e) {
+ Log.d(TAG, "CodecException 0x" + Integer.toHexString(e.getErrorCode()));
+ break;
+ }
+ }
+
+ return mCodecs.size();
+ }
+
+ private void doUseCodecs() {
+ int current = 0;
+ try {
+ for (current = 0; current < mCodecs.size(); ++current) {
+ mCodecs.get(current).getName();
+ }
+ } catch (MediaCodec.CodecException e) {
+ Log.d(TAG, "useCodecs got CodecException 0x" + Integer.toHexString(e.getErrorCode()));
+ if (e.getErrorCode() == MediaCodec.CodecException.ERROR_RECLAIMED) {
+ Log.d(TAG, "Remove codec " + current + " from the list");
+ mCodecs.remove(current);
+ setResult(Activity.RESULT_OK);
+ finish();
+ }
+ return;
+ }
+ }
+
+ private Thread mWorkerThread;
+ protected void useCodecs() {
+ mWorkerThread = new Thread(new Runnable() {
+ @Override
+ public void run() {
+ while (true) {
+ doUseCodecs();
+ }
+ }
+ });
+ mWorkerThread.start();
+ }
+
+ @Override
+ protected void onDestroy() {
+ Log.d(TAG, "onDestroy called.");
+ super.onDestroy();
+
+ for (int i = 0; i < mCodecs.size(); ++i) {
+ mCodecs.get(i).release();
+ }
+ }
+}
diff --git a/tests/tests/media/src/android/media/cts/SoundPoolTest.java b/tests/tests/media/src/android/media/cts/SoundPoolTest.java
index 15d18a0..23c4a7c 100644
--- a/tests/tests/media/src/android/media/cts/SoundPoolTest.java
+++ b/tests/tests/media/src/android/media/cts/SoundPoolTest.java
@@ -21,6 +21,7 @@
import android.content.Context;
import android.content.res.AssetFileDescriptor;
+import android.media.AudioAttributes;
import android.media.AudioManager;
import android.media.SoundPool;
import android.test.AndroidTestCase;
@@ -29,6 +30,8 @@
import java.io.FileDescriptor;
import java.io.FileOutputStream;
import java.io.InputStream;
+import java.util.Arrays;
+import java.util.concurrent.atomic.AtomicInteger;
abstract class SoundPoolTest extends AndroidTestCase {
@@ -230,6 +233,104 @@
mSoundPool.release();
}
+ public void testAutoPauseResume() throws Exception {
+ // The number of possible SoundPool streams simultaneously active is limited by
+ // track resources. Generally this is no greater than 32, but the actual
+ // amount may be less depending on concurrently running applications.
+ // Here we attempt to create more streams than what is normally possible;
+ // SoundPool should gracefully degrade to play those streams it can.
+ //
+ // Try to keep the maxStreams less than the number required to be active
+ // and certainly less than 20 to be cooperative to other applications.
+ final int TEST_STREAMS = 40;
+ SoundPool soundPool = null;
+ try {
+ soundPool = new SoundPool.Builder()
+ .setAudioAttributes(new AudioAttributes.Builder().build())
+ .setMaxStreams(TEST_STREAMS)
+ .build();
+
+ // get our sounds
+ final int[] sounds = getSounds();
+
+ // set our completion listener
+ final int[] loadIds = new int[TEST_STREAMS];
+ final Object done = new Object();
+ final int[] loaded = new int[1]; // used as a "pointer" to an integer
+ final SoundPool fpool = soundPool; // final reference in scope of try block
+ soundPool.setOnLoadCompleteListener(new SoundPool.OnLoadCompleteListener() {
+ @Override
+ public void onLoadComplete(SoundPool pool, int sampleId, int status) {
+ assertEquals(fpool, pool);
+ assertEquals(0 /* success */, status);
+ synchronized(done) {
+ loadIds[loaded[0]++] = sampleId;
+ if (loaded[0] == loadIds.length) {
+ done.notify();
+ }
+ }
+ }
+ });
+
+ // initiate loading
+ final int[] soundIds = new int[TEST_STREAMS];
+ for (int i = 0; i < soundIds.length; i++) {
+ soundIds[i] = soundPool.load(mContext, sounds[i % sounds.length], PRIORITY);
+ }
+
+ // wait for all sounds to load
+ final long LOAD_TIMEOUT_IN_MS = 10000;
+ final long startTime = System.currentTimeMillis();
+ synchronized(done) {
+ while (loaded[0] != soundIds.length) {
+ final long waitTime =
+ LOAD_TIMEOUT_IN_MS - (System.currentTimeMillis() - startTime);
+ assertTrue(waitTime > 0);
+ done.wait(waitTime);
+ }
+ }
+
+ // verify the Ids match (actually does sorting too)
+ Arrays.sort(loadIds);
+ Arrays.sort(soundIds);
+ assertTrue(Arrays.equals(loadIds, soundIds));
+
+ // play - should hear the following:
+ // 1 second of sound
+ // 1 second of silence
+ // 1 second of sound.
+ int[] streamIds = new int[soundIds.length];
+ for (int i = 0; i < soundIds.length; i++) {
+ streamIds[i] = soundPool.play(soundIds[i],
+ 0.5f /* leftVolume */, 0.5f /* rightVolume */, PRIORITY,
+ -1 /* loop (infinite) */, 1.0f /* rate */);
+ }
+ Thread.sleep(1000 /* millis */);
+ soundPool.autoPause();
+ Thread.sleep(1000 /* millis */);
+ soundPool.autoResume();
+ Thread.sleep(1000 /* millis */);
+
+ // clean up
+ for (int stream : streamIds) {
+ assertTrue(stream != 0);
+ soundPool.stop(stream);
+ }
+ for (int sound : soundIds) {
+ assertEquals(true, soundPool.unload(sound));
+ }
+ // check to see we're really unloaded
+ for (int sound : soundIds) {
+ assertEquals(false, soundPool.unload(sound));
+ }
+ } finally {
+ if (soundPool != null) {
+ soundPool.release();
+ soundPool = null;
+ }
+ }
+ }
+
/**
* Load a sample and wait until it is ready to be played.
* @return The sample ID.
diff --git a/tests/tests/media/src/android/media/cts/StreamingMediaPlayerTest.java b/tests/tests/media/src/android/media/cts/StreamingMediaPlayerTest.java
index dd7c1f6..7497da2 100644
--- a/tests/tests/media/src/android/media/cts/StreamingMediaPlayerTest.java
+++ b/tests/tests/media/src/android/media/cts/StreamingMediaPlayerTest.java
@@ -18,12 +18,16 @@
import android.cts.util.MediaUtils;
import android.media.MediaFormat;
import android.media.MediaPlayer;
+import android.media.MediaPlayer.TrackInfo;
+import android.media.TimedMetaData;
import android.os.Looper;
+import android.os.PowerManager;
import android.os.SystemClock;
import android.util.Log;
import android.webkit.cts.CtsTestServer;
import java.io.IOException;
+import java.util.concurrent.atomic.AtomicInteger;
/**
* Tests of MediaPlayer streaming capabilities.
@@ -307,6 +311,94 @@
localHlsTest("hls.m3u8", false, true);
}
+ public void testPlayHlsStreamWithTimedId3() throws Throwable {
+ mServer = new CtsTestServer(mContext);
+ try {
+ // counter must be final if we want to access it inside onTimedMetaData;
+ // use AtomicInteger so we can have a final counter object with mutable integer value.
+ final AtomicInteger counter = new AtomicInteger();
+ String stream_url = mServer.getAssetUrl("prog_index.m3u8");
+ mMediaPlayer.setDataSource(stream_url);
+ mMediaPlayer.setDisplay(getActivity().getSurfaceHolder());
+ mMediaPlayer.setScreenOnWhilePlaying(true);
+ mMediaPlayer.setWakeMode(mContext, PowerManager.PARTIAL_WAKE_LOCK);
+ mMediaPlayer.setOnTimedMetaDataAvailableListener(new MediaPlayer.OnTimedMetaDataAvailableListener() {
+ @Override
+ public void onTimedMetaDataAvailable(MediaPlayer mp, TimedMetaData md) {
+ counter.incrementAndGet();
+ int pos = mp.getCurrentPosition();
+ long timeUs = md.getTimestamp();
+ byte[] rawData = md.getMetaData();
+ // Raw data contains an id3 tag holding the decimal string representation of
+ // the associated time stamp rounded to the closest half second.
+
+ int offset = 0;
+ offset += 3; // "ID3"
+ offset += 2; // version
+ offset += 1; // flags
+ offset += 4; // size
+ offset += 4; // "TXXX"
+ offset += 4; // frame size
+ offset += 2; // frame flags
+ offset += 1; // "\x03" : UTF-8 encoded Unicode
+ offset += 1; // "\x00" : null-terminated empty description
+
+ int length = rawData.length;
+ length -= offset;
+ length -= 1; // "\x00" : terminating null
+
+ String data = new String(rawData, offset, length);
+ int dataTimeUs = Integer.parseInt(data);
+ assertTrue("Timed ID3 timestamp does not match content",
+ Math.abs(dataTimeUs - timeUs) < 500000);
+ assertTrue("Timed ID3 arrives after timestamp", pos * 1000 < timeUs);
+ }
+ });
+
+ final Object completion = new Object();
+ mMediaPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
+ int run;
+ @Override
+ public void onCompletion(MediaPlayer mp) {
+ if (run++ == 0) {
+ mMediaPlayer.seekTo(0);
+ mMediaPlayer.start();
+ } else {
+ mMediaPlayer.stop();
+ synchronized (completion) {
+ completion.notify();
+ }
+ }
+ }
+ });
+
+ mMediaPlayer.prepare();
+ mMediaPlayer.start();
+ assertTrue("MediaPlayer not playing", mMediaPlayer.isPlaying());
+
+ int i = -1;
+ TrackInfo[] trackInfos = mMediaPlayer.getTrackInfo();
+ for (i = 0; i < trackInfos.length; i++) {
+ TrackInfo trackInfo = trackInfos[i];
+ if (trackInfo.getTrackType() == TrackInfo.MEDIA_TRACK_TYPE_METADATA) {
+ break;
+ }
+ }
+ assertTrue("Stream has no timed ID3 track", i >= 0);
+ mMediaPlayer.selectTrack(i);
+
+ synchronized (completion) {
+ completion.wait();
+ }
+
+ // There are a total of 19 metadata access units in the test stream; every one of them
+ // should be received twice: once before the seek and once after.
+ assertTrue("Incorrect number of timed ID3s recieved", counter.get() == 38);
+ } finally {
+ mServer.shutdown();
+ }
+ }
+
private static class WorkerWithPlayer implements Runnable {
private final Object mLock = new Object();
private Looper mLooper;
diff --git a/tests/tests/media/src/android/media/cts/TestMediaDataSource.java b/tests/tests/media/src/android/media/cts/TestMediaDataSource.java
index 87b4c59..a10840b 100644
--- a/tests/tests/media/src/android/media/cts/TestMediaDataSource.java
+++ b/tests/tests/media/src/android/media/cts/TestMediaDataSource.java
@@ -28,7 +28,7 @@
/**
* A MediaDataSource that reads from a byte array for use in tests.
*/
-public class TestMediaDataSource implements MediaDataSource {
+public class TestMediaDataSource extends MediaDataSource {
private static final String TAG = "TestMediaDataSource";
private byte[] mData;
@@ -62,29 +62,30 @@
}
@Override
- public synchronized int readAt(long offset, byte[] buffer, int size) {
+ public synchronized int readAt(long position, byte[] buffer, int offset, int size)
+ throws IOException {
if (mThrowFromReadAt) {
- throw new RuntimeException("Test exception from readAt()");
+ throw new IOException("Test exception from readAt()");
}
if (mReturnFromReadAt != null) {
return mReturnFromReadAt;
}
// Clamp reads past the end of the source.
- if (offset >= mData.length) {
- return 0;
+ if (position >= mData.length) {
+ return -1; // -1 indicates EOF
}
- if (offset + size > mData.length) {
- size -= (offset + size) - mData.length;
+ if (position + size > mData.length) {
+ size -= (position + size) - mData.length;
}
- System.arraycopy(mData, (int)offset, buffer, 0, size);
+ System.arraycopy(mData, (int)position, buffer, offset, size);
return size;
}
@Override
- public synchronized long getSize() {
+ public synchronized long getSize() throws IOException {
if (mThrowFromGetSize) {
- throw new RuntimeException("Test exception from getSize()");
+ throw new IOException("Test exception from getSize()");
}
if (mReturnFromGetSize != null) {
return mReturnFromGetSize;
diff --git a/tests/tests/mediastress/src/android/mediastress/cts/MediaPlayerStressTest.java b/tests/tests/mediastress/src/android/mediastress/cts/MediaPlayerStressTest.java
index 7c65824..6f4ebdd 100644
--- a/tests/tests/mediastress/src/android/mediastress/cts/MediaPlayerStressTest.java
+++ b/tests/tests/mediastress/src/android/mediastress/cts/MediaPlayerStressTest.java
@@ -105,13 +105,18 @@
* @throws Exception
*/
protected void doTestVideoPlayback(int mediaNumber, int repeatCounter) throws Exception {
+ Instrumentation inst = getInstrumentation();
+ String mediaName = getFullVideoClipName(mediaNumber);
+ if (!MediaUtils.checkCodecsForPath(inst.getTargetContext(), mediaName)) {
+ return; // not supported, message is already logged
+ }
+
File playbackOutput = new File(WorkDir.getTopDir(), "PlaybackTestResult.txt");
Writer output = new BufferedWriter(new FileWriter(playbackOutput, true));
boolean testResult = true;
boolean onCompleteSuccess = false;
- Instrumentation inst = getInstrumentation();
Intent intent = new Intent();
intent.setClass(inst.getTargetContext(), MediaFrameworkTest.class);
@@ -119,10 +124,6 @@
Activity act = inst.startActivitySync(intent);
- String mediaName = getFullVideoClipName(mediaNumber);
- if (!MediaUtils.checkCodecsForPath(inst.getTargetContext(), mediaName)) {
- return; // not supported, message is already logged
- }
for (int i = 0; i < repeatCounter; i++) {
Log.v(TAG, "start playing " + mediaName);
onCompleteSuccess =
diff --git a/tests/tests/net/src/android/net/ipv6/cts/PingTest.java b/tests/tests/net/src/android/net/ipv6/cts/PingTest.java
index eddb416..c23ad30 100644
--- a/tests/tests/net/src/android/net/ipv6/cts/PingTest.java
+++ b/tests/tests/net/src/android/net/ipv6/cts/PingTest.java
@@ -155,7 +155,7 @@
public void testLoopbackPing() throws ErrnoException, IOException {
// Generate a random ping packet and send it to localhost.
InetAddress ipv6Loopback = InetAddress.getByName(null);
- assertEquals("localhost/::1", ipv6Loopback.toString());
+ assertEquals("::1", ipv6Loopback.getHostAddress());
for (int i = 0; i < NUM_PACKETS; i++) {
byte[] packet = pingPacket((int) (Math.random() * (MAX_SIZE - ICMP_HEADER_SIZE)));
diff --git a/tests/tests/os/src/android/os/cts/MessageQueueTest.java b/tests/tests/os/src/android/os/cts/MessageQueueTest.java
index f5d6415..8906c42 100644
--- a/tests/tests/os/src/android/os/cts/MessageQueueTest.java
+++ b/tests/tests/os/src/android/os/cts/MessageQueueTest.java
@@ -21,7 +21,7 @@
import android.os.Looper;
import android.os.Message;
import android.os.MessageQueue;
-import android.os.MessageQueue.FileDescriptorCallback;
+import android.os.MessageQueue.OnFileDescriptorEventListener;
import android.os.ParcelFileDescriptor;
import android.os.ParcelFileDescriptor.AutoCloseInputStream;
import android.os.ParcelFileDescriptor.AutoCloseOutputStream;
@@ -214,8 +214,13 @@
public void testRegisterFileDescriptorCallbackThrowsWhenFdIsNull() {
MessageQueue queue = Looper.getMainLooper().getQueue();
try {
- queue.registerFileDescriptorCallback(null, 0,
- new FileDescriptorCallback() { });
+ queue.addOnFileDescriptorEventListener(null, 0,
+ new OnFileDescriptorEventListener() {
+ @Override
+ public int onFileDescriptorEvents(FileDescriptor fd, int events) {
+ return 0;
+ }
+ });
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException ex) {
// expected
@@ -228,7 +233,7 @@
try (ParcelFileDescriptor reader = pipe[0];
ParcelFileDescriptor writer = pipe[1]) {
try {
- queue.registerFileDescriptorCallback(reader.getFileDescriptor(), 0, null);
+ queue.addOnFileDescriptorEventListener(reader.getFileDescriptor(), 0, null);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException ex) {
// expected
@@ -239,7 +244,7 @@
public void testUnregisterFileDescriptorCallbackThrowsWhenFdIsNull() throws Exception {
MessageQueue queue = Looper.getMainLooper().getQueue();
try {
- queue.unregisterFileDescriptorCallback(null);
+ queue.removeOnFileDescriptorEventListener(null);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException ex) {
// expected
@@ -252,7 +257,7 @@
ParcelFileDescriptor[] pipe = ParcelFileDescriptor.createPipe();
try (ParcelFileDescriptor reader = pipe[0];
ParcelFileDescriptor writer = pipe[1]) {
- queue.unregisterFileDescriptorCallback(reader.getFileDescriptor());
+ queue.removeOnFileDescriptorEventListener(reader.getFileDescriptor());
}
}
@@ -272,7 +277,7 @@
// Prepare to write a lot of data to the pipe asynchronously.
// We don't actually care about the content (assume pipes work correctly)
// so we just write lots of zeros.
- FileDescriptorCallback writerCallback = new FileDescriptorCallback() {
+ OnFileDescriptorEventListener writerCallback = new OnFileDescriptorEventListener() {
private byte[] mBuffer = new byte[4096];
private int mRemaining = size;
private boolean mDone;
@@ -283,14 +288,14 @@
if (!mDone) {
// When an error happens because the reader closed its end,
// signal the test, and remove the callback.
- if ((events & FileDescriptorCallback.EVENT_ERROR) != 0) {
+ if ((events & OnFileDescriptorEventListener.EVENT_ERROR) != 0) {
writerSawError.countDown();
mDone = true;
return 0;
}
// Write all output until an error is observed.
- if ((events & FileDescriptorCallback.EVENT_OUTPUT) != 0) {
+ if ((events & OnFileDescriptorEventListener.EVENT_OUTPUT) != 0) {
int count = Math.min(mBuffer.length, mRemaining);
try {
writer.write(mBuffer, 0, count);
@@ -309,7 +314,7 @@
};
// Prepare to read all of that data.
- FileDescriptorCallback readerCallback = new FileDescriptorCallback() {
+ OnFileDescriptorEventListener readerCallback = new OnFileDescriptorEventListener() {
private byte[] mBuffer = new byte[4096];
private int mRemaining = size;
private boolean mDone;
@@ -319,14 +324,14 @@
assertEquals(pipe[0].getFileDescriptor(), fd);
if (!mDone) {
// Errors should not happen.
- if ((events & FileDescriptorCallback.EVENT_ERROR) != 0) {
+ if ((events & OnFileDescriptorEventListener.EVENT_ERROR) != 0) {
fail("Saw unexpected error.");
return 0;
}
// Read until everything is read, signal the test,
// and remove the callback.
- if ((events & FileDescriptorCallback.EVENT_INPUT) != 0) {
+ if ((events & OnFileDescriptorEventListener.EVENT_INPUT) != 0) {
try {
int count = reader.read(mBuffer, 0, mBuffer.length);
mRemaining -= count;
@@ -349,10 +354,10 @@
};
// Register the callbacks.
- queue.registerFileDescriptorCallback(reader.getFD(),
- FileDescriptorCallback.EVENT_INPUT, readerCallback);
- queue.registerFileDescriptorCallback(writer.getFD(),
- FileDescriptorCallback.EVENT_OUTPUT, writerCallback);
+ queue.addOnFileDescriptorEventListener(reader.getFD(),
+ OnFileDescriptorEventListener.EVENT_INPUT, readerCallback);
+ queue.addOnFileDescriptorEventListener(writer.getFD(),
+ OnFileDescriptorEventListener.EVENT_OUTPUT, writerCallback);
// Wait for the reader to see all of the data that the writer
// is prepared to send.
@@ -368,8 +373,8 @@
// The reader and writer should already be unregistered.
// Try to unregistered them again to ensure nothing bad happens.
- queue.unregisterFileDescriptorCallback(reader.getFD());
- queue.unregisterFileDescriptorCallback(writer.getFD());
+ queue.removeOnFileDescriptorEventListener(reader.getFD());
+ queue.removeOnFileDescriptorEventListener(writer.getFD());
}
} finally {
thread.quitAndRethrow();
@@ -401,8 +406,8 @@
final FileOutputStream writer = new AutoCloseOutputStream(pipe[1])) {
// Register the callback.
final boolean[] awoke = new boolean[1];
- queue.registerFileDescriptorCallback(reader.getFD(),
- FileDescriptorCallback.EVENT_ERROR, new FileDescriptorCallback() {
+ queue.addOnFileDescriptorEventListener(reader.getFD(),
+ OnFileDescriptorEventListener.EVENT_ERROR, new OnFileDescriptorEventListener() {
@Override
public int onFileDescriptorEvents(FileDescriptor fd, int events) {
awoke[0] = true;
@@ -438,8 +443,8 @@
final FileOutputStream writer2 = new AutoCloseOutputStream(pipe2[1])) {
// Register the callback.
final boolean[] awoke = new boolean[1];
- queue.registerFileDescriptorCallback(reader2.getFD(),
- FileDescriptorCallback.EVENT_INPUT, new FileDescriptorCallback() {
+ queue.addOnFileDescriptorEventListener(reader2.getFD(),
+ OnFileDescriptorEventListener.EVENT_INPUT, new OnFileDescriptorEventListener() {
@Override
public int onFileDescriptorEvents(FileDescriptor fd, int events) {
awoke[0] = true;
@@ -487,8 +492,8 @@
final FileOutputStream writer = new AutoCloseOutputStream(pipe[1])) {
// Register the callback.
final boolean[] awoke = new boolean[1];
- queue.registerFileDescriptorCallback(reader.getFD(),
- FileDescriptorCallback.EVENT_ERROR, new FileDescriptorCallback() {
+ queue.addOnFileDescriptorEventListener(reader.getFD(),
+ OnFileDescriptorEventListener.EVENT_ERROR, new OnFileDescriptorEventListener() {
@Override
public int onFileDescriptorEvents(FileDescriptor fd, int events) {
awoke[0] = true;
@@ -528,8 +533,8 @@
final FileOutputStream writer2 = new AutoCloseOutputStream(pipe[1])) {
// Register the callback.
final boolean[] awoke = new boolean[1];
- queue.registerFileDescriptorCallback(reader2.getFD(),
- FileDescriptorCallback.EVENT_INPUT, new FileDescriptorCallback() {
+ queue.addOnFileDescriptorEventListener(reader2.getFD(),
+ OnFileDescriptorEventListener.EVENT_INPUT, new OnFileDescriptorEventListener() {
@Override
public int onFileDescriptorEvents(FileDescriptor fd, int events) {
awoke[0] = true;
@@ -580,8 +585,8 @@
final FileOutputStream writer = new AutoCloseOutputStream(pipe[1])) {
// Register the callback.
final boolean[] awoke = new boolean[1];
- queue.registerFileDescriptorCallback(reader.getFD(),
- FileDescriptorCallback.EVENT_ERROR, new FileDescriptorCallback() {
+ queue.addOnFileDescriptorEventListener(reader.getFD(),
+ OnFileDescriptorEventListener.EVENT_ERROR, new OnFileDescriptorEventListener() {
@Override
public int onFileDescriptorEvents(FileDescriptor fd, int events) {
awoke[0] = true;
@@ -605,9 +610,9 @@
}
// Now we have a new pipe, make sure we can register it successfully.
- queue.registerFileDescriptorCallback(pipe[0].getFileDescriptor(),
- FileDescriptorCallback.EVENT_INPUT,
- new FileDescriptorCallback() {
+ queue.addOnFileDescriptorEventListener(pipe[0].getFileDescriptor(),
+ OnFileDescriptorEventListener.EVENT_INPUT,
+ new OnFileDescriptorEventListener() {
@Override
public int onFileDescriptorEvents(FileDescriptor fd, int events) {
awoke2[0] = true;
@@ -672,8 +677,8 @@
final FileOutputStream writer = new AutoCloseOutputStream(pipe[1])) {
// Register the callback.
final boolean[] awoke = new boolean[1];
- queue.registerFileDescriptorCallback(reader.getFD(),
- FileDescriptorCallback.EVENT_ERROR, new FileDescriptorCallback() {
+ queue.addOnFileDescriptorEventListener(reader.getFD(),
+ OnFileDescriptorEventListener.EVENT_ERROR, new OnFileDescriptorEventListener() {
@Override
public int onFileDescriptorEvents(FileDescriptor fd, int events) {
awoke[0] = true;
diff --git a/tests/tests/provider/src/android/provider/cts/ContactsContract_IsSuperPrimaryName.java b/tests/tests/provider/src/android/provider/cts/ContactsContract_IsSuperPrimaryName.java
index 4ff6a88..603cb619 100644
--- a/tests/tests/provider/src/android/provider/cts/ContactsContract_IsSuperPrimaryName.java
+++ b/tests/tests/provider/src/android/provider/cts/ContactsContract_IsSuperPrimaryName.java
@@ -112,10 +112,10 @@
//
// Execute: make the non primary name IS_SUPER_PRIMARY
- TestData nonPrimaryName = isFirstNamePrimary ? name1 : name2;
+ TestData nonPrimaryName = !isFirstNamePrimary ? name1 : name2;
ContentValues values = new ContentValues();
values.put(StructuredName.IS_SUPER_PRIMARY, 1);
- mResolver.update(nonPrimaryName.getContentUri(), values, null, null);
+ mResolver.update(nonPrimaryName.getUri(), values, null, null);
// Verify: the IS_SUPER_PRIMARY values swap
name1.load();
diff --git a/tests/tests/renderscript/src/android/renderscript/cts/AllocationCopyToTest.java b/tests/tests/renderscript/src/android/renderscript/cts/AllocationCopyToTest.java
index 71c4f64..f6bef0a 100644
--- a/tests/tests/renderscript/src/android/renderscript/cts/AllocationCopyToTest.java
+++ b/tests/tests/renderscript/src/android/renderscript/cts/AllocationCopyToTest.java
@@ -570,7 +570,48 @@
result);
}
+ public void test_AllocationCopy3DRangeFrom_Alloc() {
+ Random random = new Random(0x172d8ab9);
+ int width = random.nextInt(64);
+ int height = random.nextInt(64);
+ int depth = random.nextInt(64);
+ int xoff = random.nextInt(width);
+ int yoff = random.nextInt(height);
+ int zoff = random.nextInt(height);
+
+ int xcount = width - xoff;
+ int ycount = height - yoff;
+ int zcount = depth - zoff;
+ int arr_len = xcount * ycount * zcount;
+
+ long[] inArray = new long[arr_len];
+ long[] outArray = new long[arr_len];
+
+ for (int i = 0; i < arr_len; i++) {
+ inArray[i] = random.nextLong();
+ }
+
+ Type.Builder typeBuilder = new Type.Builder(mRS, Element.I64(mRS));
+ typeBuilder.setX(width).setY(height).setZ(depth);
+ alloc = Allocation.createTyped(mRS, typeBuilder.create());
+ Allocation allocRef = Allocation.createTyped(mRS, typeBuilder.create());
+
+ allocRef.copy3DRangeFrom(xoff, yoff, zoff, xcount, ycount, zcount, (Object)inArray);
+ alloc.copy3DRangeFrom(xoff, yoff, zoff, xcount, ycount, zcount, allocRef, xoff, yoff, zoff);
+ alloc.copy3DRangeTo(xoff, yoff, zoff, xcount, ycount, zcount, (Object)outArray);
+
+ boolean result = true;
+ for (int i = 0; i < arr_len; i++) {
+ if (inArray[i] != outArray[i]) {
+ result = false;
+ android.util.Log.v("Allocation Copy3DRangeFrom (alloc) Test", "Failed: " + i + " " + inArray[i] + " " + outArray[i]);
+ break;
+ }
+ }
+ assertTrue("test_AllocationCopy3DRangeFrom_Alloc failed, output array does not match input",
+ result);
+ }
public void test_Allocationcopy1DRangeToUnchecked_Byte() {
Random random = new Random(0x172d8ab9);
diff --git a/tests/tests/renderscript/src/android/renderscript/cts/ElementTest.java b/tests/tests/renderscript/src/android/renderscript/cts/ElementTest.java
index 131b3fd4..9fa9c15 100644
--- a/tests/tests/renderscript/src/android/renderscript/cts/ElementTest.java
+++ b/tests/tests/renderscript/src/android/renderscript/cts/ElementTest.java
@@ -183,6 +183,13 @@
// A_8 is in U8
Element[] BOOLEAN = { Element.BOOLEAN(mRS) };
Element[] ELEMENT = { Element.ELEMENT(mRS) };
+ Element[] F16 = { Element.F16(mRS) };
+ Element[] F16_2 = { Element.F16_2(mRS),
+ Element.createVector(mRS, Element.DataType.FLOAT_16, 2) };
+ Element[] F16_3 = { Element.F16_3(mRS),
+ Element.createVector(mRS, Element.DataType.FLOAT_16, 3) };
+ Element[] F16_4 = { Element.F16_4(mRS),
+ Element.createVector(mRS, Element.DataType.FLOAT_16, 4) };
Element[] F32 = { Element.F32(mRS) };
Element[] F32_2 = { Element.F32_2(mRS),
Element.createVector(mRS, Element.DataType.FLOAT_32, 2) };
@@ -236,8 +243,10 @@
Element.createPixel(mRS, Element.DataType.UNSIGNED_8,
Element.DataKind.PIXEL_RGBA) };
- Element[][] ElementArrs = { ALLOCATION, BOOLEAN, ELEMENT, F32, F32_2,
- F32_3, F32_4, F64, I16, I32, I64, I8,
+ Element[][] ElementArrs = { ALLOCATION, BOOLEAN, ELEMENT,
+ F16, F16_2, F16_3, F16_4,
+ F32, F32_2, F32_3, F32_4,
+ F64, I16, I32, I64, I8,
MATRIX_2X2, MATRIX_3X3, MATRIX_4X4, MESH,
PROGRAM_FRAGMENT, PROGRAM_RASTER,
PROGRAM_STORE, PROGRAM_VERTEX, RGBA_4444,
@@ -272,6 +281,10 @@
eb.add(Element.RGB_565(mRS), "RGB_565", arraySize);
eb.add(Element.RGB_888(mRS), "RGB_888", arraySize);
eb.add(Element.RGBA_8888(mRS), "RGBA_8888", arraySize);
+ eb.add(Element.F16(mRS), "F16", arraySize);
+ eb.add(Element.F16_2(mRS), "F16_2", arraySize);
+ eb.add(Element.F16_3(mRS), "F16_3", arraySize);
+ eb.add(Element.F16_4(mRS), "F16_4", arraySize);
eb.add(Element.F32(mRS), "F32", arraySize);
eb.add(Element.F32_2(mRS), "F32_2", arraySize);
eb.add(Element.F32_3(mRS), "F32_3", arraySize);
@@ -338,6 +351,10 @@
assertFalse(Element.RGB_565(mRS).isComplex());
assertFalse(Element.RGB_888(mRS).isComplex());
assertFalse(Element.RGBA_8888(mRS).isComplex());
+ assertFalse(Element.F16(mRS).isComplex());
+ assertFalse(Element.F16_2(mRS).isComplex());
+ assertFalse(Element.F16_3(mRS).isComplex());
+ assertFalse(Element.F16_4(mRS).isComplex());
assertFalse(Element.F32(mRS).isComplex());
assertFalse(Element.F32_2(mRS).isComplex());
assertFalse(Element.F32_3(mRS).isComplex());
@@ -416,6 +433,7 @@
// Uncomment when NONE is no longer hidden.
//assertEquals(DataType.NONE, DataType.valueOf("NONE"));
+ assertEquals(DataType.FLOAT_16, DataType.valueOf("FLOAT_16"));
assertEquals(DataType.FLOAT_32, DataType.valueOf("FLOAT_32"));
assertEquals(DataType.FLOAT_64, DataType.valueOf("FLOAT_64"));
assertEquals(DataType.SIGNED_8, DataType.valueOf("SIGNED_8"));
@@ -452,6 +470,7 @@
for (DataType dt : DataType.values()) {
switch (dt) {
+ case FLOAT_16:
case FLOAT_32:
case FLOAT_64:
case SIGNED_8:
diff --git a/tests/tests/renderscript/src/android/renderscript/cts/Intrinsic3DLut.java b/tests/tests/renderscript/src/android/renderscript/cts/Intrinsic3DLut.java
index 87a03ad..4ec84ad 100644
--- a/tests/tests/renderscript/src/android/renderscript/cts/Intrinsic3DLut.java
+++ b/tests/tests/renderscript/src/android/renderscript/cts/Intrinsic3DLut.java
@@ -136,5 +136,12 @@
checkError();
}
+ public void test_ID() {
+ ScriptIntrinsic3DLUT s = ScriptIntrinsic3DLUT.create(mRS, Element.U8_4(mRS));
+ Script.KernelID kid = s.getKernelID();
+ if (kid == null) {
+ throw new IllegalStateException("kid must be valid");
+ }
+ }
}
diff --git a/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicBLAS.java b/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicBLAS.java
index ff5bf84..f6b3176 100644
--- a/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicBLAS.java
+++ b/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicBLAS.java
@@ -1999,14 +1999,8 @@
if (cM != cN) {
return false;
}
- if (TransA != ScriptIntrinsicBLAS.NO_TRANSPOSE) {
- if (aN != cM) {
- return false;
- }
- } else {
- if (aM != cM) {
- return false;
- }
+ if (aM != cM) {
+ return false;
}
} else if (A != null && B != null) {
// A and B only
diff --git a/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicBlur.java b/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicBlur.java
index 076dcd4..4e99391 100644
--- a/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicBlur.java
+++ b/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicBlur.java
@@ -141,5 +141,17 @@
checkError();
}
+ public void test_ID() {
+ ScriptIntrinsicBlur s = ScriptIntrinsicBlur.create(mRS, Element.U8_4(mRS));
+ Script.KernelID kid = s.getKernelID();
+ if (kid == null) {
+ throw new IllegalStateException("kid must be valid");
+ }
+
+ Script.FieldID fid = s.getFieldID_Input();
+ if (fid == null) {
+ throw new IllegalStateException("fid must be valid");
+ }
+ }
}
diff --git a/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicConvolve3x3.java b/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicConvolve3x3.java
index 8faeb22..8a2bc27 100644
--- a/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicConvolve3x3.java
+++ b/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicConvolve3x3.java
@@ -247,4 +247,17 @@
checkError();
}
+ public void test_ID() {
+ ScriptIntrinsicConvolve3x3 s = ScriptIntrinsicConvolve3x3.create(mRS, Element.U8_4(mRS));
+ Script.KernelID kid = s.getKernelID();
+ if (kid == null) {
+ throw new IllegalStateException("kid must be valid");
+ }
+
+ Script.FieldID fid = s.getFieldID_Input();
+ if (fid == null) {
+ throw new IllegalStateException("fid must be valid");
+ }
+ }
+
}
diff --git a/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicConvolve5x5.java b/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicConvolve5x5.java
index 0753c62..410aebd 100644
--- a/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicConvolve5x5.java
+++ b/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicConvolve5x5.java
@@ -184,4 +184,17 @@
checkError();
}
+ public void test_ID() {
+ ScriptIntrinsicConvolve5x5 s = ScriptIntrinsicConvolve5x5.create(mRS, Element.U8_4(mRS));
+ Script.KernelID kid = s.getKernelID();
+ if (kid == null) {
+ throw new IllegalStateException("kid must be valid");
+ }
+
+ Script.FieldID fid = s.getFieldID_Input();
+ if (fid == null) {
+ throw new IllegalStateException("fid must be valid");
+ }
+ }
+
}
diff --git a/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicLut.java b/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicLut.java
index 1567639..3309bb0 100644
--- a/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicLut.java
+++ b/tests/tests/renderscript/src/android/renderscript/cts/IntrinsicLut.java
@@ -83,5 +83,12 @@
}
+ public void test_ID() {
+ ScriptIntrinsicLUT s = ScriptIntrinsicLUT.create(mRS, Element.U8_4(mRS));
+ Script.KernelID kid = s.getKernelID();
+ if (kid == null) {
+ throw new IllegalStateException("kid must be valid");
+ }
+ }
}
diff --git a/tests/tests/renderscript/src/android/renderscript/cts/TypeTest.java b/tests/tests/renderscript/src/android/renderscript/cts/TypeTest.java
index 13d4977..abb532d 100644
--- a/tests/tests/renderscript/src/android/renderscript/cts/TypeTest.java
+++ b/tests/tests/renderscript/src/android/renderscript/cts/TypeTest.java
@@ -143,6 +143,14 @@
assertTrue(t.getY() == 4);
}
+ public void testGetYuv() {
+ Type.Builder b = new Type.Builder(mRS, Element.F32(mRS));
+ b.setX(64).setY(64);
+ b.setYuvFormat(android.graphics.ImageFormat.YV12);
+ Type t = b.create();
+ assertTrue(t.getYuv() == android.graphics.ImageFormat.YV12);
+ }
+
public void testGetZ() {
Type.Builder b = new Type.Builder(mRS, Element.F32(mRS));
b.setX(3).setY(4);
diff --git a/tests/tests/renderscript/src/android/renderscript/cts/rsAllocationCopyTest.java b/tests/tests/renderscript/src/android/renderscript/cts/rsAllocationCopyTest.java
new file mode 100644
index 0000000..f74fa38
--- /dev/null
+++ b/tests/tests/renderscript/src/android/renderscript/cts/rsAllocationCopyTest.java
@@ -0,0 +1,539 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.renderscript.cts;
+
+import android.renderscript.Allocation;
+import android.renderscript.Element;
+import android.renderscript.Type;
+import java.util.Random;
+import android.util.Log;
+
+public class rsAllocationCopyTest extends RSBaseCompute {
+
+ public void test_rsAllocationCopy1D_Byte() {
+ Random random = new Random(0x172d8ab9);
+ int width = random.nextInt(512);
+ int arr_len = width;
+ int offset = random.nextInt(arr_len);
+ int count = random.nextInt(arr_len - offset);
+
+ byte[] inArray = new byte[arr_len];
+ byte[] outArray = new byte[arr_len];
+ random.nextBytes(inArray);
+
+ Type.Builder typeBuilder = new Type.Builder(mRS, Element.I8(mRS));
+ typeBuilder.setX(width);
+ Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+ Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+ aIn.copyFrom(inArray);
+ aOut.copyFrom(outArray);
+
+ ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+ s.set_aIn1D(aIn);
+ s.set_aOut1D(aOut);
+ s.set_xOff(offset);
+ s.set_xCount(count);
+ s.invoke_test1D();
+ mRS.finish();
+ aOut.copyTo(outArray);
+
+ boolean result = true;
+ for (int i = 0; i < arr_len; i++) {
+ if (offset <= i && i < offset + count) {
+ if (inArray[i] != outArray[i]) {
+ result = false;
+ break;
+ }
+ } else {
+ if (outArray[i] != 0) {
+ result = false;
+ break;
+ }
+ }
+ }
+ assertTrue("test_rsAllocationCopy1D_Byte failed, output array does not match input",
+ result);
+ }
+
+ public void test_rsAllocationCopy1D_Short() {
+ Random random = new Random(0x172d8ab9);
+ int width = random.nextInt(512);
+ int arr_len = width;
+ int offset = random.nextInt(arr_len);
+ int count = random.nextInt(arr_len - offset);
+
+ short[] inArray = new short[arr_len];
+ short[] outArray = new short[arr_len];
+ for (int i = 0; i < arr_len; i++) {
+ inArray[i] = (short)random.nextInt();
+ }
+
+ Type.Builder typeBuilder = new Type.Builder(mRS, Element.I16(mRS));
+ typeBuilder.setX(width);
+ Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+ Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+ aIn.copyFrom(inArray);
+ aOut.copyFrom(outArray);
+
+ ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+ s.set_aIn1D(aIn);
+ s.set_aOut1D(aOut);
+ s.set_xOff(offset);
+ s.set_xCount(count);
+ s.invoke_test1D();
+ mRS.finish();
+ aOut.copyTo(outArray);
+
+ boolean result = true;
+ for (int i = 0; i < arr_len; i++) {
+ if (offset <= i && i < offset + count) {
+ if (inArray[i] != outArray[i]) {
+ result = false;
+ break;
+ }
+ } else {
+ if (outArray[i] != 0) {
+ result = false;
+ break;
+ }
+ }
+ }
+ assertTrue("test_rsAllocationCopy1D_Short failed, output array does not match input",
+ result);
+ }
+
+ public void test_rsAllocationCopy1D_Int() {
+ Random random = new Random(0x172d8ab9);
+ int width = random.nextInt(512);
+ int arr_len = width;
+ int offset = random.nextInt(arr_len);
+ int count = random.nextInt(arr_len - offset);
+
+ int[] inArray = new int[arr_len];
+ int[] outArray = new int[arr_len];
+ for (int i = 0; i < arr_len; i++) {
+ inArray[i] = random.nextInt();
+ }
+
+ Type.Builder typeBuilder = new Type.Builder(mRS, Element.I32(mRS));
+ typeBuilder.setX(width);
+ Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+ Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+ aIn.copyFrom(inArray);
+ aOut.copyFrom(outArray);
+
+ ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+ s.set_aIn1D(aIn);
+ s.set_aOut1D(aOut);
+ s.set_xOff(offset);
+ s.set_xCount(count);
+ s.invoke_test1D();
+ mRS.finish();
+ aOut.copyTo(outArray);
+
+ boolean result = true;
+ for (int i = 0; i < arr_len; i++) {
+ if (offset <= i && i < offset + count) {
+ if (inArray[i] != outArray[i]) {
+ result = false;
+ break;
+ }
+ } else {
+ if (outArray[i] != 0) {
+ result = false;
+ break;
+ }
+ }
+ }
+ assertTrue("test_rsAllocationCopy1D_Int failed, output array does not match input",
+ result);
+ }
+
+ public void test_rsAllocationCopy1D_Float() {
+ Random random = new Random(0x172d8ab9);
+ int width = random.nextInt(512);
+ int arr_len = width;
+ int offset = random.nextInt(arr_len);
+ int count = random.nextInt(arr_len - offset);
+
+ float[] inArray = new float[arr_len];
+ float[] outArray = new float[arr_len];
+ for (int i = 0; i < arr_len; i++) {
+ inArray[i] = random.nextFloat();
+ }
+
+ Type.Builder typeBuilder = new Type.Builder(mRS, Element.F32(mRS));
+ typeBuilder.setX(width);
+ Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+ Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+ aIn.copyFrom(inArray);
+ aOut.copyFrom(outArray);
+
+ ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+ s.set_aIn1D(aIn);
+ s.set_aOut1D(aOut);
+ s.set_xOff(offset);
+ s.set_xCount(count);
+ s.invoke_test1D();
+ mRS.finish();
+ aOut.copyTo(outArray);
+
+
+ boolean result = true;
+ for (int i = 0; i < arr_len; i++) {
+ if (offset <= i && i < offset + count) {
+ if (inArray[i] != outArray[i]) {
+ result = false;
+ break;
+ }
+ } else {
+ if (outArray[i] != 0) {
+ result = false;
+ break;
+ }
+ }
+ }
+ assertTrue("test_rsAllocationCopy1D_Float failed, output array does not match input",
+ result);
+ }
+
+ public void test_rsAllocationCopy1D_Long() {
+ Random random = new Random(0x172d8ab9);
+ int width = random.nextInt(512);
+ int arr_len = width;
+ int offset = random.nextInt(arr_len);
+ int count = random.nextInt(arr_len - offset);
+
+ long[] inArray = new long[arr_len];
+ long[] outArray = new long[arr_len];
+ for (int i = 0; i < arr_len; i++) {
+ inArray[i] = random.nextLong();
+ }
+
+ Type.Builder typeBuilder = new Type.Builder(mRS, Element.I64(mRS));
+ typeBuilder.setX(width);
+ Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+ Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+ aIn.copyFrom(inArray);
+ aOut.copyFrom(outArray);
+
+ ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+ s.set_aIn1D(aIn);
+ s.set_aOut1D(aOut);
+ s.set_xOff(offset);
+ s.set_xCount(count);
+ s.invoke_test1D();
+ mRS.finish();
+ aOut.copyTo(outArray);
+
+ boolean result = true;
+ for (int i = 0; i < arr_len; i++) {
+ if (offset <= i && i < offset + count) {
+ if (inArray[i] != outArray[i]) {
+ result = false;
+ break;
+ }
+ } else {
+ if (outArray[i] != 0) {
+ result = false;
+ break;
+ }
+ }
+ }
+ assertTrue("test_rsAllocationCopy1D_Long failed, output array does not match input",
+ result);
+ }
+
+
+ public void test_rsAllocationCopy2D_Byte() {
+ Random random = new Random(0x172d8ab9);
+ int width = random.nextInt(128);
+ int height = random.nextInt(128);
+ int xOff = random.nextInt(width);
+ int yOff = random.nextInt(height);
+ int xCount = random.nextInt(width - xOff);
+ int yCount = random.nextInt(height - yOff);
+ int arr_len = width * height;
+
+ byte[] inArray = new byte[arr_len];
+ byte[] outArray = new byte[arr_len];
+ random.nextBytes(inArray);
+
+ Type.Builder typeBuilder = new Type.Builder(mRS, Element.I8(mRS));
+ typeBuilder.setX(width).setY(height);
+ Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+ Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+ aIn.copyFrom(inArray);
+ aOut.copyFrom(outArray);
+
+ ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+ s.set_aIn2D(aIn);
+ s.set_aOut2D(aOut);
+ s.set_xOff(xOff);
+ s.set_yOff(yOff);
+ s.set_xCount(xCount);
+ s.set_yCount(yCount);
+ s.invoke_test2D();
+ mRS.finish();
+ aOut.copyTo(outArray);
+
+ boolean result = true;
+ for (int i = 0; i < height; i++) {
+ for (int j = 0; j < width; j++) {
+ int pos = i * width + j;
+ if (yOff <= i && i < yOff + yCount &&
+ xOff <= j && j < xOff + xCount) {
+ if (inArray[pos] != outArray[pos]) {
+ result = false;
+ break;
+ }
+ } else {
+ if (outArray[pos] != 0) {
+ result = false;
+ break;
+ }
+ }
+ }
+ }
+ assertTrue("test_rsAllocationCopy2D_Byte failed, output array does not match input",
+ result);
+ }
+
+ public void test_rsAllocationCopy2D_Short() {
+ Random random = new Random(0x172d8ab9);
+ int width = random.nextInt(128);
+ int height = random.nextInt(128);
+ int xOff = random.nextInt(width);
+ int yOff = random.nextInt(height);
+ int xCount = random.nextInt(width - xOff);
+ int yCount = random.nextInt(height - yOff);
+ int arr_len = width * height;
+
+ short[] inArray = new short[arr_len];
+ short[] outArray = new short[arr_len];
+ for (int i = 0; i < arr_len; i++) {
+ inArray[i] = (short)random.nextInt();
+ }
+
+ Type.Builder typeBuilder = new Type.Builder(mRS, Element.I16(mRS));
+ typeBuilder.setX(width).setY(height);
+ Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+ Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+ aIn.copyFrom(inArray);
+ aOut.copyFrom(outArray);
+
+ ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+ s.set_aIn2D(aIn);
+ s.set_aOut2D(aOut);
+ s.set_xOff(xOff);
+ s.set_yOff(yOff);
+ s.set_xCount(xCount);
+ s.set_yCount(yCount);
+ s.invoke_test2D();
+ mRS.finish();
+ aOut.copyTo(outArray);
+
+ boolean result = true;
+ for (int i = 0; i < height; i++) {
+ for (int j = 0; j < width; j++) {
+ int pos = i * width + j;
+ if (yOff <= i && i < yOff + yCount &&
+ xOff <= j && j < xOff + xCount) {
+ if (inArray[pos] != outArray[pos]) {
+ result = false;
+ break;
+ }
+ } else {
+ if (outArray[pos] != 0) {
+ result = false;
+ break;
+ }
+ }
+ }
+ }
+ assertTrue("test_rsAllocationCopy2D_Short failed, output array does not match input",
+ result);
+ }
+
+ public void test_rsAllocationCopy2D_Int() {
+ Random random = new Random(0x172d8ab9);
+ int width = random.nextInt(128);
+ int height = random.nextInt(128);
+ int xOff = random.nextInt(width);
+ int yOff = random.nextInt(height);
+ int xCount = random.nextInt(width - xOff);
+ int yCount = random.nextInt(height - yOff);
+ int arr_len = width * height;
+
+ int[] inArray = new int[arr_len];
+ int[] outArray = new int[arr_len];
+ for (int i = 0; i < arr_len; i++) {
+ inArray[i] = random.nextInt();
+ }
+
+ Type.Builder typeBuilder = new Type.Builder(mRS, Element.I32(mRS));
+ typeBuilder.setX(width).setY(height);
+ Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+ Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+ aIn.copyFrom(inArray);
+ aOut.copyFrom(outArray);
+
+ ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+ s.set_aIn2D(aIn);
+ s.set_aOut2D(aOut);
+ s.set_xOff(xOff);
+ s.set_yOff(yOff);
+ s.set_xCount(xCount);
+ s.set_yCount(yCount);
+ s.invoke_test2D();
+ mRS.finish();
+ aOut.copyTo(outArray);
+
+ boolean result = true;
+ for (int i = 0; i < height; i++) {
+ for (int j = 0; j < width; j++) {
+ int pos = i * width + j;
+ if (yOff <= i && i < yOff + yCount &&
+ xOff <= j && j < xOff + xCount) {
+ if (inArray[pos] != outArray[pos]) {
+ result = false;
+ break;
+ }
+ } else {
+ if (outArray[pos] != 0) {
+ result = false;
+ break;
+ }
+ }
+ }
+ }
+ assertTrue("test_rsAllocationCopy2D_Int failed, output array does not match input",
+ result);
+ }
+
+ public void test_rsAllocationCopy2D_Float() {
+ Random random = new Random(0x172d8ab9);
+ int width = random.nextInt(128);
+ int height = random.nextInt(128);
+ int xOff = random.nextInt(width);
+ int yOff = random.nextInt(height);
+ int xCount = random.nextInt(width - xOff);
+ int yCount = random.nextInt(height - yOff);
+ int arr_len = width * height;
+
+ float[] inArray = new float[arr_len];
+ float[] outArray = new float[arr_len];
+ for (int i = 0; i < arr_len; i++) {
+ inArray[i] = random.nextFloat();
+ }
+
+ Type.Builder typeBuilder = new Type.Builder(mRS, Element.F32(mRS));
+ typeBuilder.setX(width).setY(height);
+ Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+ Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+ aIn.copyFrom(inArray);
+ aOut.copyFrom(outArray);
+
+ ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+ s.set_aIn2D(aIn);
+ s.set_aOut2D(aOut);
+ s.set_xOff(xOff);
+ s.set_yOff(yOff);
+ s.set_xCount(xCount);
+ s.set_yCount(yCount);
+ s.invoke_test2D();
+ mRS.finish();
+ aOut.copyTo(outArray);
+
+ boolean result = true;
+ for (int i = 0; i < height; i++) {
+ for (int j = 0; j < width; j++) {
+ int pos = i * width + j;
+ if (yOff <= i && i < yOff + yCount &&
+ xOff <= j && j < xOff + xCount) {
+ if (inArray[pos] != outArray[pos]) {
+ result = false;
+ break;
+ }
+ } else {
+ if (outArray[pos] != 0) {
+ result = false;
+ break;
+ }
+ }
+ }
+ }
+ assertTrue("test_rsAllocationCopy2D_Float failed, output array does not match input",
+ result);
+ }
+
+ public void test_rsAllocationCopy2D_Long() {
+ Random random = new Random(0x172d8ab9);
+ int width = random.nextInt(128);
+ int height = random.nextInt(128);
+ int xOff = random.nextInt(width);
+ int yOff = random.nextInt(height);
+ int xCount = random.nextInt(width - xOff);
+ int yCount = random.nextInt(height - yOff);
+ int arr_len = width * height;
+
+ long[] inArray = new long[arr_len];
+ long[] outArray = new long[arr_len];
+ for (int i = 0; i < arr_len; i++) {
+ inArray[i] = random.nextLong();
+ }
+
+ Type.Builder typeBuilder = new Type.Builder(mRS, Element.I64(mRS));
+ typeBuilder.setX(width).setY(height);
+ Allocation aIn = Allocation.createTyped(mRS, typeBuilder.create());
+ Allocation aOut = Allocation.createTyped(mRS, typeBuilder.create());
+ aIn.copyFrom(inArray);
+ aOut.copyFrom(outArray);
+
+ ScriptC_rsallocationcopy s = new ScriptC_rsallocationcopy(mRS);
+ s.set_aIn2D(aIn);
+ s.set_aOut2D(aOut);
+ s.set_xOff(xOff);
+ s.set_yOff(yOff);
+ s.set_xCount(xCount);
+ s.set_yCount(yCount);
+ s.invoke_test2D();
+ mRS.finish();
+ aOut.copyTo(outArray);
+
+ boolean result = true;
+ for (int i = 0; i < height; i++) {
+ for (int j = 0; j < width; j++) {
+ int pos = i * width + j;
+ if (yOff <= i && i < yOff + yCount &&
+ xOff <= j && j < xOff + xCount) {
+ if (inArray[pos] != outArray[pos]) {
+ result = false;
+ break;
+ }
+ } else {
+ if (outArray[pos] != 0) {
+ result = false;
+ break;
+ }
+ }
+ }
+ }
+ assertTrue("test_rsAllocationCopy2D_Long failed, output array does not match input",
+ result);
+ }
+}
diff --git a/tests/tests/renderscript/src/android/renderscript/cts/rsallocationcopy.rs b/tests/tests/renderscript/src/android/renderscript/cts/rsallocationcopy.rs
new file mode 100644
index 0000000..4d76493
--- /dev/null
+++ b/tests/tests/renderscript/src/android/renderscript/cts/rsallocationcopy.rs
@@ -0,0 +1,19 @@
+#include "shared.rsh"
+
+rs_allocation aIn1D;
+rs_allocation aOut1D;
+rs_allocation aIn2D;
+rs_allocation aOut2D;
+
+int xOff = 0;
+int yOff = 0;
+int xCount = 0;
+int yCount = 0;
+
+void test1D() {
+ rsAllocationCopy1DRange(aOut1D, xOff, 0, xCount, aIn1D, xOff, 0);
+}
+
+void test2D() {
+ rsAllocationCopy2DRange(aOut2D, xOff, yOff, 0, 0, xCount, yCount, aIn2D, xOff, yOff, 0, 0);
+}
diff --git a/tests/tests/security/jni/android_security_cts_NativeCodeTest.cpp b/tests/tests/security/jni/android_security_cts_NativeCodeTest.cpp
index 3b63ba9..00765c6 100644
--- a/tests/tests/security/jni/android_security_cts_NativeCodeTest.cpp
+++ b/tests/tests/security/jni/android_security_cts_NativeCodeTest.cpp
@@ -212,7 +212,7 @@
bool vulnerable = false;
if (nvmap >= 0) {
- if (0 >= ioctl(nvmap, NVMAP_IOC_FROM_ID)) {
+ if (0 == ioctl(nvmap, NVMAP_IOC_FROM_ID)) {
/* IOCTL succeeded */
vulnerable = true;
}
diff --git a/tests/tests/speech/src/android/speech/tts/cts/StubTextToSpeechService.java b/tests/tests/speech/src/android/speech/tts/cts/StubTextToSpeechService.java
index 7b5baca..88bdc74 100644
--- a/tests/tests/speech/src/android/speech/tts/cts/StubTextToSpeechService.java
+++ b/tests/tests/speech/src/android/speech/tts/cts/StubTextToSpeechService.java
@@ -20,8 +20,12 @@
import android.speech.tts.SynthesisRequest;
import android.speech.tts.TextToSpeech;
import android.speech.tts.TextToSpeechService;
+import android.speech.tts.TtsEngines;
import android.util.Log;
+import java.util.ArrayList;
+import java.util.Locale;
+
/**
* Stub implementation of {@link TextToSpeechService}. Used for testing the
* TTS engine API.
@@ -32,6 +36,17 @@
// Object that onSynthesizeText will #wait on, if set to non-null
public static volatile Object sSynthesizeTextWait;
+ private ArrayList<Locale> supportedLanguages = new ArrayList<Locale>();
+ private ArrayList<Locale> supportedCountries = new ArrayList<Locale>();
+ private ArrayList<Locale> GBFallbacks = new ArrayList<Locale>();
+
+ public StubTextToSpeechService() {
+ supportedLanguages.add(new Locale("eng"));
+ supportedCountries.add(new Locale("eng", "USA"));
+ supportedCountries.add(new Locale("eng", "GBR"));
+ GBFallbacks.add(new Locale("eng", "NZL"));
+ }
+
@Override
protected String[] onGetLanguage() {
return new String[] { "eng", "USA", "" };
@@ -39,12 +54,19 @@
@Override
protected int onIsLanguageAvailable(String lang, String country, String variant) {
- return TextToSpeech.LANG_AVAILABLE;
+ if (supportedCountries.contains(new Locale(lang, country))) {
+ return TextToSpeech.LANG_COUNTRY_AVAILABLE;
+ }
+ if (supportedLanguages.contains(new Locale(lang))) {
+ return TextToSpeech.LANG_AVAILABLE;
+ }
+
+ return TextToSpeech.LANG_NOT_SUPPORTED;
}
@Override
protected int onLoadLanguage(String lang, String country, String variant) {
- return TextToSpeech.LANG_AVAILABLE;
+ return onIsLanguageAvailable(lang, country, variant);
}
@Override
@@ -77,4 +99,20 @@
}
}
+ @Override
+ public String onGetDefaultVoiceNameFor(String lang, String country, String variant) {
+ Locale locale = new Locale(lang, country);
+ if (supportedCountries.contains(locale)) {
+ return TtsEngines.normalizeTTSLocale(locale).toLanguageTag();
+ }
+ if (lang.equals("eng")) {
+ if (GBFallbacks.contains(new Locale(lang, country))) {
+ return "en-GB";
+ } else {
+ return "en-US";
+ }
+ }
+ return super.onGetDefaultVoiceNameFor(lang, country, variant);
+ }
+
}
diff --git a/tests/tests/speech/src/android/speech/tts/cts/TextToSpeechTest.java b/tests/tests/speech/src/android/speech/tts/cts/TextToSpeechTest.java
index c83304c..013a5ea 100644
--- a/tests/tests/speech/src/android/speech/tts/cts/TextToSpeechTest.java
+++ b/tests/tests/speech/src/android/speech/tts/cts/TextToSpeechTest.java
@@ -94,6 +94,25 @@
return false;
}
+ private void assertContainsEngine(String engine, List<TextToSpeech.EngineInfo> engines) {
+ for (TextToSpeech.EngineInfo engineInfo : engines) {
+ if (engineInfo.name.equals(engine)) {
+ return;
+ }
+ }
+ fail("Engine " + engine + " not found");
+ }
+
+ private HashMap<String, String> createParams() {
+ HashMap<String, String> params = new HashMap<String,String>();
+ params.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, UTTERANCE_ID);
+ return params;
+ }
+
+ private boolean waitForUtterance() throws InterruptedException {
+ return mTts.waitForComplete(UTTERANCE_ID);
+ }
+
public void testSynthesizeToFile() throws Exception {
if (mTts == null) {
return;
@@ -124,7 +143,6 @@
assertTrue("speak() completion timeout", waitForUtterance());
}
-
public void testSpeakStop() throws Exception {
getTts().stop();
final int iterations = 20;
@@ -157,24 +175,4 @@
assertNotNull("getEngines() returned null", engines);
assertContainsEngine(TextToSpeechWrapper.MOCK_TTS_ENGINE, engines);
}
-
- private void assertContainsEngine(String engine, List<TextToSpeech.EngineInfo> engines) {
- for (TextToSpeech.EngineInfo engineInfo : engines) {
- if (engineInfo.name.equals(engine)) {
- return;
- }
- }
- fail("Engine " + engine + " not found");
- }
-
- private HashMap<String, String> createParams() {
- HashMap<String, String> params = new HashMap<String,String>();
- params.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, UTTERANCE_ID);
- return params;
- }
-
- private boolean waitForUtterance() throws InterruptedException {
- return mTts.waitForComplete(UTTERANCE_ID);
- }
-
}
diff --git a/tests/tests/telephony/src/android/telephony/cts/SmsManagerTest.java b/tests/tests/telephony/src/android/telephony/cts/SmsManagerTest.java
index 39f5177..d3d15a5 100644
--- a/tests/tests/telephony/src/android/telephony/cts/SmsManagerTest.java
+++ b/tests/tests/telephony/src/android/telephony/cts/SmsManagerTest.java
@@ -49,6 +49,10 @@
"three separate messages.This is a very long text. This text should be broken " +
"into three separate messages.This is a very long text. This text should be " +
"broken into three separate messages.";;
+ private static final String LONG_TEXT_WITH_32BIT_CHARS =
+ "Long dkkshsh jdjsusj kbsksbdf jfkhcu hhdiwoqiwyrygrvn?*?*!\";:'/,."
+ + "__?9#9292736&4;\"$+$+((]\\[\\℅©℅™^®°¥°¥=¢£}}£∆~¶~÷|√×."
+ + " 😯😆😉😇😂😀👕🎓😀👙🐕🐀🐶🐰🐩⛪⛲ ";
private static final String SMS_SEND_ACTION = "CTS_SMS_SEND_ACTION";
private static final String SMS_DELIVERY_ACTION = "CTS_SMS_DELIVERY_ACTION";
@@ -209,17 +213,28 @@
public void testDivideMessage() {
ArrayList<String> dividedMessages = divideMessage(LONG_TEXT);
assertNotNull(dividedMessages);
- int numParts;
if (TelephonyUtils.isSkt(mTelephonyManager)) {
- assertTrue(isComplete(dividedMessages, 5) || isComplete(dividedMessages, 3));
+ assertTrue(isComplete(dividedMessages, 5, LONG_TEXT)
+ || isComplete(dividedMessages, 3, LONG_TEXT));
} else if (TelephonyUtils.isKt(mTelephonyManager)) {
- assertTrue(isComplete(dividedMessages, 4) || isComplete(dividedMessages, 3));
+ assertTrue(isComplete(dividedMessages, 4, LONG_TEXT)
+ || isComplete(dividedMessages, 3, LONG_TEXT));
} else {
- assertTrue(isComplete(dividedMessages, 3));
+ assertTrue(isComplete(dividedMessages, 3, LONG_TEXT));
}
}
- private boolean isComplete(List<String> dividedMessages, int numParts) {
+ public void testDivideUnicodeMessage() {
+ ArrayList<String> dividedMessages = divideMessage(LONG_TEXT_WITH_32BIT_CHARS);
+ assertNotNull(dividedMessages);
+ assertTrue(isComplete(dividedMessages, 3, LONG_TEXT_WITH_32BIT_CHARS));
+ for (String messagePiece : dividedMessages) {
+ assertFalse(Character.isHighSurrogate(
+ messagePiece.charAt(messagePiece.length() - 1)));
+ }
+ }
+
+ private boolean isComplete(List<String> dividedMessages, int numParts, String longText) {
if (dividedMessages.size() != numParts) {
return false;
}
@@ -228,7 +243,7 @@
for (int i = 0; i < numParts; i++) {
actualMessage += dividedMessages.get(i);
}
- return LONG_TEXT.equals(actualMessage);
+ return longText.equals(actualMessage);
}
public void testSendMessages() throws InterruptedException {
@@ -369,9 +384,10 @@
Bundle bundle = intent.getExtras();
if (bundle != null) {
Object[] obj = (Object[]) bundle.get("pdus");
+ String format = bundle.getString("format");
SmsMessage[] message = new SmsMessage[obj.length];
for (int i = 0; i < obj.length; i++) {
- message[i] = SmsMessage.createFromPdu((byte[]) obj[i]);
+ message[i] = SmsMessage.createFromPdu((byte[]) obj[i], format);
}
for (SmsMessage currentMessage : message) {
diff --git a/tests/tests/telephony/src/android/telephony/cts/SmsMessageTest.java b/tests/tests/telephony/src/android/telephony/cts/SmsMessageTest.java
index ff7b097..d90e394 100644
--- a/tests/tests/telephony/src/android/telephony/cts/SmsMessageTest.java
+++ b/tests/tests/telephony/src/android/telephony/cts/SmsMessageTest.java
@@ -62,6 +62,10 @@
private static final long TIMESTAMP_MILLIS = 1149631383000l;
private static final int SEPTETS_SKT = 80;
private static final int SEPTETS_KT = 90;
+ private static final String LONG_TEXT_WITH_32BIT_CHARS =
+ "Long dkkshsh jdjsusj kbsksbdf jfkhcu hhdiwoqiwyrygrvn?*?*!\";:'/,."
+ + "__?9#9292736&4;\"$+$+((]\\[\\℅©℅™^®°¥°¥=¢£}}£∆~¶~÷|√×."
+ + " 😯😆😉😇😂😀👕🎓😀👙🐕🐀🐶🐰🐩⛪⛲ ";
@Override
protected void setUp() throws Exception {
@@ -71,7 +75,6 @@
mPackageManager = getContext().getPackageManager();
}
- @SuppressWarnings("deprecation")
public void testCreateFromPdu() throws Exception {
if (!mPackageManager.hasSystemFeature(PackageManager.FEATURE_TELEPHONY)
|| mPackageManager.hasSystemFeature(PackageManager.FEATURE_TELEPHONY_CDMA)) {
@@ -80,7 +83,8 @@
}
String pdu = "07916164260220F0040B914151245584F600006060605130308A04D4F29C0E";
- SmsMessage sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+ SmsMessage sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu),
+ SmsMessage.FORMAT_3GPP);
assertEquals(SCA1, sms.getServiceCenterAddress());
assertEquals(OA1, sms.getOriginatingAddress());
assertEquals(MESSAGE_BODY1, sms.getMessageBody());
@@ -88,7 +92,7 @@
int[] result = SmsMessage.calculateLength(sms.getMessageBody(), true);
assertEquals(SMS_NUMBER1, result[0]);
assertEquals(sms.getMessageBody().length(), result[1]);
- assertRemaining(sms.getMessageBody().length(), result[2]);
+ assertRemaining(sms.getMessageBody().length(), result[2], SmsMessage.MAX_USER_DATA_SEPTETS);
assertEquals(SmsMessage.ENCODING_7BIT, result[3]);
assertEquals(pdu, toHexString(sms.getPdu()));
@@ -106,13 +110,13 @@
assertEquals(TIMESTAMP_MILLIS, sms.getTimestampMillis());
// Test create from null Pdu
- sms = SmsMessage.createFromPdu(null);
+ sms = SmsMessage.createFromPdu(null, SmsMessage.FORMAT_3GPP);
assertNotNull(sms);
- //Test create from long Pdu
+ // Test create from long Pdu
pdu = "07912160130310F2040B915121927786F300036060924180008A0DA"
+ "8695DAC2E8FE9296A794E07";
- sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+ sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu), SmsMessage.FORMAT_3GPP);
assertEquals(SCA2, sms.getServiceCenterAddress());
assertEquals(OA2, sms.getOriginatingAddress());
assertEquals(MESSAGE_BODY2, sms.getMessageBody());
@@ -120,30 +124,30 @@
result = SmsMessage.calculateLength(msgBody, false);
assertEquals(SMS_NUMBER2, result[0]);
assertEquals(sms.getMessageBody().length(), result[1]);
- assertRemaining(sms.getMessageBody().length(), result[2]);
+ assertRemaining(sms.getMessageBody().length(), result[2], SmsMessage.MAX_USER_DATA_SEPTETS);
assertEquals(SmsMessage.ENCODING_7BIT, result[3]);
// Test createFromPdu Ucs to Sms
pdu = "07912160130300F4040B914151245584"
+ "F600087010807121352B10212200A900AE00680065006C006C006F";
- sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+ sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu), SmsMessage.FORMAT_3GPP);
assertEquals(MESSAGE_BODY3, sms.getMessageBody());
result = SmsMessage.calculateLength(sms.getMessageBody(), true);
assertEquals(SMS_NUMBER3, result[0]);
assertEquals(sms.getMessageBody().length(), result[1]);
- assertRemaining(sms.getMessageBody().length(), result[2]);
+ assertRemaining(sms.getMessageBody().length(), result[2], SmsMessage.MAX_USER_DATA_SEPTETS);
assertEquals(SmsMessage.ENCODING_7BIT, result[3]);
}
- private void assertRemaining(int messageLength, int remaining) {
+ private void assertRemaining(int messageLength, int remaining, int maxChars) {
if (TelephonyUtils.isSkt(mTelephonyManager)) {
assertTrue(checkRemaining(SEPTETS_SKT, messageLength, remaining)
- || checkRemaining(SmsMessage.MAX_USER_DATA_SEPTETS, messageLength, remaining));
+ || checkRemaining(maxChars, messageLength, remaining));
} else if (TelephonyUtils.isKt(mTelephonyManager)) {
assertTrue(checkRemaining(SEPTETS_KT, messageLength, remaining)
- || checkRemaining(SmsMessage.MAX_USER_DATA_SEPTETS, messageLength, remaining));
+ || checkRemaining(maxChars, messageLength, remaining));
} else {
- assertTrue(checkRemaining(SmsMessage.MAX_USER_DATA_SEPTETS, messageLength, remaining));
+ assertTrue(checkRemaining(maxChars, messageLength, remaining));
}
}
@@ -160,7 +164,8 @@
// "set MWI flag"
String pdu = "07912160130310F20404D0110041006060627171118A0120";
- SmsMessage sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+ SmsMessage sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu),
+ SmsMessage.FORMAT_3GPP);
assertTrue(sms.isReplace());
assertEquals(OA3, sms.getOriginatingAddress());
assertEquals(MESSAGE_BODY4, sms.getMessageBody());
@@ -168,12 +173,12 @@
// "clear mwi flag"
pdu = "07912160130310F20404D0100041006021924193352B0120";
- sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+ sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu), SmsMessage.FORMAT_3GPP);
assertTrue(sms.isMWIClearMessage());
// "clear MWI flag"
pdu = "07912160130310F20404D0100041006060627161058A0120";
- sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+ sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu), SmsMessage.FORMAT_3GPP);
assertTrue(sms.isReplace());
assertEquals(OA4, sms.getOriginatingAddress());
assertEquals(MESSAGE_BODY5, sms.getMessageBody());
@@ -181,13 +186,13 @@
// "set MWI flag"
pdu = "07912180958750F84401800500C87020026195702B06040102000200";
- sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+ sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu), SmsMessage.FORMAT_3GPP);
assertTrue(sms.isMWISetMessage());
assertTrue(sms.isMwiDontStore());
// "clear mwi flag"
pdu = "07912180958750F84401800500C07020027160112B06040102000000";
- sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+ sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu), SmsMessage.FORMAT_3GPP);
assertTrue(sms.isMWIClearMessage());
assertTrue(sms.isMwiDontStore());
@@ -206,7 +211,8 @@
+ "66C414141414D7741414236514141414141008D908918802B3135313232393737"
+ "3638332F545950453D504C4D4E008A808E022B918805810306977F83687474703"
+ "A2F2F36";
- SmsMessage sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+ SmsMessage sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu),
+ SmsMessage.FORMAT_3GPP);
byte[] userData = sms.getUserData();
assertNotNull(userData);
}
@@ -265,7 +271,8 @@
String pdu = "07914151551512f204038105f300007011103164638a28e6f71b50c687db" +
"7076d9357eb7412f7a794e07cdeb6275794c07bde8e5391d247e93f3";
- SmsMessage sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+ SmsMessage sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu),
+ SmsMessage.FORMAT_3GPP);
assertEquals(SCA4, sms.getServiceCenterAddress());
assertTrue(sms.isEmail());
assertEquals(EMAIL_ADD, sms.getEmailFrom());
@@ -277,7 +284,7 @@
pdu = "07914151551512f204038105f400007011103105458a29e6f71b50c687db" +
"7076d9357eb741af0d0a442fcfe9c23739bfe16d289bdee6b5f1813629";
- sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu));
+ sms = SmsMessage.createFromPdu(hexStringToByteArray(pdu), SmsMessage.FORMAT_3GPP);
assertEquals(SCA3, sms.getServiceCenterAddress());
assertTrue(sms.isEmail());
assertEquals(OA, sms.getDisplayOriginatingAddress());
@@ -286,6 +293,22 @@
assertEquals(MB, sms.getEmailBody());
}
+ public void testCalculateLength() throws Exception {
+ if (!mPackageManager.hasSystemFeature(PackageManager.FEATURE_TELEPHONY)) {
+ return;
+ }
+
+ int[] result = SmsMessage.calculateLength(LONG_TEXT_WITH_32BIT_CHARS, false);
+ assertEquals(3, result[0]);
+ assertEquals(LONG_TEXT_WITH_32BIT_CHARS.length(), result[1]);
+ assertRemaining(LONG_TEXT_WITH_32BIT_CHARS.length(), result[2],
+ // 3 parts, each with (SmsMessage.MAX_USER_DATA_BYTES_WITH_HEADER / 2) 16-bit
+ // characters. We need to subtract one because a 32-bit character crosses the
+ // boundary of 2 parts.
+ 3 * SmsMessage.MAX_USER_DATA_BYTES_WITH_HEADER / 2 - 1);
+ assertEquals(SmsMessage.ENCODING_16BIT, result[3]);
+ }
+
private final static char[] HEX_DIGITS = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'A', 'B', 'C', 'D', 'E', 'F' };
diff --git a/tests/tests/telephony/src/android/telephony/cts/SubscriptionManagerTest.java b/tests/tests/telephony/src/android/telephony/cts/SubscriptionManagerTest.java
index 412bdc4..9b08397 100644
--- a/tests/tests/telephony/src/android/telephony/cts/SubscriptionManagerTest.java
+++ b/tests/tests/telephony/src/android/telephony/cts/SubscriptionManagerTest.java
@@ -52,89 +52,6 @@
super.tearDown();
}
- // OnSubscriptionsChange event gets triggered when subscriptions present are either
- // added/removed or when their contents are changed. Performing this test only when contents of
- // SubscriptionInfoRecord are changed so that OnSubscriptionsChange event can be simulated. It
- // is difficult to generate the event manually without pulling in/out sim card so generating
- // testcase when there are no Subscriptions present is skipped.
- public void testAddOnSubscriptionsChangedListener () throws Throwable {
- if (mCm.getNetworkInfo(ConnectivityManager.TYPE_MOBILE) == null) {
- Log.d(TAG, "Skipping test that requires ConnectivityManager.TYPE_MOBILE");
- return;
- }
- final List<SubscriptionInfo> subList = mSubscriptionManager.getActiveSubscriptionInfoList();
- if (subList == null || subList.size() == 0) {
- Log.d(TAG, "Skipping test when there are no active subscriptions");
- return;
- }
-
- TestThread t = new TestThread(new Runnable() {
- public void run() {
- Looper.prepare();
-
- mListener = new SubscriptionManager.OnSubscriptionsChangedListener() {
- @Override
- public void onSubscriptionsChanged() {
- synchronized(mLock) {
- mOnSubscriptionsChangedCalled = true;
- mLock.notify();
- }
- }
- };
- mSubscriptionManager.addOnSubscriptionsChangedListener(mListener);
- // Simulate onSubscriptionsChanged event
- mSubscriptionManager.setDisplayName("Test1", subList.get(0).getSubscriptionId());
- Looper.loop();
- }
- });
- mOnSubscriptionsChangedCalled = false;
- t.start();
- synchronized (mLock) {
- while (!mOnSubscriptionsChangedCalled) {
- mLock.wait();
- }
- }
- assertTrue(mOnSubscriptionsChangedCalled);
- }
-
- public void testRemoveOnSubscriptionsChangedListener () throws Throwable {
- if (mCm.getNetworkInfo(ConnectivityManager.TYPE_MOBILE) == null) {
- Log.d(TAG, "Skipping test that requires ConnectivityManager.TYPE_MOBILE");
- return;
- }
- final List<SubscriptionInfo> subList = mSubscriptionManager.getActiveSubscriptionInfoList();
- if (subList == null || subList.size() == 0) {
- Log.d(TAG, "Skipping test when there are no active subscriptions");
- return;
- }
- TestThread t = new TestThread(new Runnable() {
- public void run() {
- Looper.prepare();
- mListener = new SubscriptionManager.OnSubscriptionsChangedListener() {
- @Override
- public void onSubscriptionsChanged() {
- synchronized(mLock) {
- mOnSubscriptionsChangedCalled = true;
- mLock.notify();
- }
- }
- };
- // unregister the listener
- mSubscriptionManager.removeOnSubscriptionsChangedListener(mListener);
- // Simulate onSubscriptionsChanged event
- mSubscriptionManager.setDisplayName("Test2", subList.get(0).getSubscriptionId());
- Looper.loop();
- }
- });
-
- mOnSubscriptionsChangedCalled = false;
- t.start();
- synchronized (mLock) {
- mLock.wait(TOLERANCE);
- }
- assertFalse(mOnSubscriptionsChangedCalled);
- }
-
public void testGetActiveSubscriptionInfoCount() {
if (mCm.getNetworkInfo(ConnectivityManager.TYPE_MOBILE) == null) {
Log.d(TAG, "Skipping test that requires ConnectivityManager.TYPE_MOBILE");
diff --git a/tests/tests/text/src/android/text/cts/BidiFormatterTest.java b/tests/tests/text/src/android/text/cts/BidiFormatterTest.java
index 645ab5b..5ace8b2 100644
--- a/tests/tests/text/src/android/text/cts/BidiFormatterTest.java
+++ b/tests/tests/text/src/android/text/cts/BidiFormatterTest.java
@@ -107,6 +107,9 @@
}
public void testUnicodeWrap() {
+ // Make sure an input of null doesn't crash anything.
+ assertNull(LTR_FMT.unicodeWrap(null));
+
// Uniform directionality in opposite context.
assertEquals("uniform dir opposite to LTR context",
RLE + "." + HE + "." + PDF + LRM,
diff --git a/tests/tests/tv/src/android/media/tv/cts/TvContractTest.java b/tests/tests/tv/src/android/media/tv/cts/TvContractTest.java
index 2082d3f..b4bc6eb 100644
--- a/tests/tests/tv/src/android/media/tv/cts/TvContractTest.java
+++ b/tests/tests/tv/src/android/media/tv/cts/TvContractTest.java
@@ -26,6 +26,8 @@
import android.graphics.BitmapFactory;
import android.media.tv.TvContentRating;
import android.media.tv.TvContract;
+import android.media.tv.TvContract.Channels;
+import android.media.tv.TvContract.Programs.Genres;
import android.net.Uri;
import android.test.AndroidTestCase;
@@ -33,6 +35,8 @@
import java.io.InputStream;
import java.io.OutputStream;
+import java.util.Arrays;
+import java.util.List;
/**
* Test for {@link android.media.tv.TvContract}.
@@ -80,6 +84,11 @@
private static long OPERATION_TIME = 1000l;
+ private static final String ENCODED_GENRE_STRING = Genres.ANIMAL_WILDLIFE + "," + Genres.COMEDY
+ + "," + Genres.DRAMA + "," + Genres.EDUCATION + "," + Genres.FAMILY_KIDS + ","
+ + Genres.GAMING + "," + Genres.MOVIES + "," + Genres.NEWS + "," + Genres.SHOPPING + ","
+ + Genres.SPORTS + "," + Genres.TRAVEL;
+
private String mInputId;
private ContentResolver mContentResolver;
private Uri mChannelsUri;
@@ -510,7 +519,6 @@
values.put(TvContract.Channels.COLUMN_INPUT_ID, mInputId);
Uri channelUri = mContentResolver.insert(mChannelsUri, values);
assertNotNull(channelUri);
- long channelId = ContentUris.parseId(channelUri);
try (Cursor cursor = mContentResolver.query(
channelUri, CHANNELS_PROJECTION, null, null, null)) {
cursor.moveToNext();
@@ -522,4 +530,58 @@
}
values.clear();
}
+
+ public void testChannelsGetVideoResolution() {
+ if (!Utils.hasTvInputFramework(getContext())) {
+ return;
+ }
+ assertEquals(Channels.VIDEO_RESOLUTION_SD, Channels.getVideoResolution(
+ Channels.VIDEO_FORMAT_480I));
+ assertEquals(Channels.VIDEO_RESOLUTION_ED, Channels.getVideoResolution(
+ Channels.VIDEO_FORMAT_480P));
+ assertEquals(Channels.VIDEO_RESOLUTION_SD, Channels.getVideoResolution(
+ Channels.VIDEO_FORMAT_576I));
+ assertEquals(Channels.VIDEO_RESOLUTION_ED, Channels.getVideoResolution(
+ Channels.VIDEO_FORMAT_576P));
+ assertEquals(Channels.VIDEO_RESOLUTION_HD, Channels.getVideoResolution(
+ Channels.VIDEO_FORMAT_720P));
+ assertEquals(Channels.VIDEO_RESOLUTION_HD, Channels.getVideoResolution(
+ Channels.VIDEO_FORMAT_1080I));
+ assertEquals(Channels.VIDEO_RESOLUTION_FHD, Channels.getVideoResolution(
+ Channels.VIDEO_FORMAT_1080P));
+ assertEquals(Channels.VIDEO_RESOLUTION_UHD, Channels.getVideoResolution(
+ Channels.VIDEO_FORMAT_2160P));
+ assertEquals(Channels.VIDEO_RESOLUTION_UHD, Channels.getVideoResolution(
+ Channels.VIDEO_FORMAT_4320P));
+ assertEquals(null, Channels.getVideoResolution("Unknown format"));
+ }
+
+ public void testProgramsGenresDecode() {
+ if (!Utils.hasTvInputFramework(getContext())) {
+ return;
+ }
+ List genres = Arrays.asList(Genres.decode(ENCODED_GENRE_STRING));
+ assertEquals(11, genres.size());
+ assertTrue(genres.contains(Genres.ANIMAL_WILDLIFE));
+ assertTrue(genres.contains(Genres.COMEDY));
+ assertTrue(genres.contains(Genres.DRAMA));
+ assertTrue(genres.contains(Genres.EDUCATION));
+ assertTrue(genres.contains(Genres.FAMILY_KIDS));
+ assertTrue(genres.contains(Genres.GAMING));
+ assertTrue(genres.contains(Genres.MOVIES));
+ assertTrue(genres.contains(Genres.NEWS));
+ assertTrue(genres.contains(Genres.SHOPPING));
+ assertTrue(genres.contains(Genres.SPORTS));
+ assertTrue(genres.contains(Genres.TRAVEL));
+ assertFalse(genres.contains(","));
+ }
+
+ public void testProgramsGenresEncode() {
+ if (!Utils.hasTvInputFramework(getContext())) {
+ return;
+ }
+ assertEquals(ENCODED_GENRE_STRING, Genres.encode(Genres.ANIMAL_WILDLIFE,
+ Genres.COMEDY, Genres.DRAMA, Genres.EDUCATION, Genres.FAMILY_KIDS, Genres.GAMING,
+ Genres.MOVIES, Genres.NEWS, Genres.SHOPPING, Genres.SPORTS, Genres.TRAVEL));
+ }
}
diff --git a/tests/tests/tv/src/android/media/tv/cts/TvInputInfoTest.java b/tests/tests/tv/src/android/media/tv/cts/TvInputInfoTest.java
index 440ecb2..de91916 100644
--- a/tests/tests/tv/src/android/media/tv/cts/TvInputInfoTest.java
+++ b/tests/tests/tv/src/android/media/tv/cts/TvInputInfoTest.java
@@ -20,8 +20,10 @@
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
+import android.media.tv.TvContract;
import android.media.tv.TvInputInfo;
import android.media.tv.TvInputManager;
+import android.os.Parcel;
import android.test.AndroidTestCase;
/**
@@ -48,6 +50,53 @@
mPackageManager = getContext().getPackageManager();
}
+ public void testTvInputInfoOp() throws Exception {
+ if (!Utils.hasTvInputFramework(getContext())) {
+ return;
+ }
+ // Test describeContents
+ assertEquals(0, mStubInfo.describeContents());
+
+ // Test equals
+ assertTrue(mStubInfo.equals(mStubInfo));
+
+ // Test getId
+ final ComponentName componentName =
+ new ComponentName(getContext(), StubTunerTvInputService.class);
+ final String id = TvContract.buildInputId(componentName);
+ assertEquals(id, mStubInfo.getId());
+
+ // Test getServiceInfo
+ assertEquals(getContext().getPackageManager().getServiceInfo(componentName, 0).name,
+ mStubInfo.getServiceInfo().name);
+
+ // Test hashCode
+ assertEquals(id.hashCode(), mStubInfo.hashCode());
+
+ // Test writeToParcel
+ Parcel p = Parcel.obtain();
+ mStubInfo.writeToParcel(p, 0);
+ p.setDataPosition(0);
+ TvInputInfo infoFromParcel = TvInputInfo.CREATOR.createFromParcel(p);
+ assertEquals(mStubInfo.createSettingsIntent().getComponent(),
+ infoFromParcel.createSettingsIntent().getComponent());
+ assertEquals(mStubInfo.createSetupIntent().getComponent(),
+ infoFromParcel.createSetupIntent().getComponent());
+ assertEquals(mStubInfo.describeContents(), infoFromParcel.describeContents());
+ assertTrue(mStubInfo.equals(infoFromParcel));
+ assertEquals(mStubInfo.getId(), infoFromParcel.getId());
+ assertEquals(mStubInfo.getParentId(), infoFromParcel.getParentId());
+ assertEquals(mStubInfo.getServiceInfo().name, infoFromParcel.getServiceInfo().name);
+ assertEquals(mStubInfo.getType(), infoFromParcel.getType());
+ assertEquals(mStubInfo.hashCode(), infoFromParcel.hashCode());
+ assertEquals(mStubInfo.isPassthroughInput(), infoFromParcel.isPassthroughInput());
+ assertEquals(mStubInfo.loadIcon(getContext()).getConstantState(),
+ infoFromParcel.loadIcon(getContext()).getConstantState());
+ assertEquals(mStubInfo.loadLabel(getContext()), infoFromParcel.loadLabel(getContext()));
+ assertEquals(mStubInfo.toString(), infoFromParcel.toString());
+ p.recycle();
+ }
+
public void testGetIntentForSettingsActivity() throws Exception {
if (!Utils.hasTvInputFramework(getContext())) {
return;
diff --git a/tests/tests/tv/src/android/media/tv/cts/TvInputManagerTest.java b/tests/tests/tv/src/android/media/tv/cts/TvInputManagerTest.java
index 790adf9..48f1f44 100644
--- a/tests/tests/tv/src/android/media/tv/cts/TvInputManagerTest.java
+++ b/tests/tests/tv/src/android/media/tv/cts/TvInputManagerTest.java
@@ -17,25 +17,30 @@
package android.media.tv.cts;
import android.content.Context;
+import android.media.tv.TvContentRating;
import android.media.tv.TvInputInfo;
import android.media.tv.TvInputManager;
-import android.test.AndroidTestCase;
+import android.os.Handler;
+import android.test.ActivityInstrumentationTestCase2;
import java.util.List;
/**
* Test for {@link android.media.tv.TvInputManager}.
*/
-public class TvInputManagerTest extends AndroidTestCase {
+public class TvInputManagerTest extends ActivityInstrumentationTestCase2<TvViewStubActivity> {
private static final String[] VALID_TV_INPUT_SERVICES = {
StubTunerTvInputService.class.getName()
};
private static final String[] INVALID_TV_INPUT_SERVICES = {
NoMetadataTvInputService.class.getName(), NoPermissionTvInputService.class.getName()
};
+ private static final TvContentRating DUMMY_RATING = TvContentRating.createRating(
+ "com.android.tv", "US_TV", "US_TV_PG", "US_TV_D", "US_TV_L");
private String mStubId;
private TvInputManager mManager;
+ private TvInputManager.TvInputCallback mCallabck = new TvInputManager.TvInputCallback() {};
private static TvInputInfo getInfoForClassName(List<TvInputInfo> list, String name) {
for (TvInputInfo info : list) {
@@ -46,25 +51,29 @@
return null;
}
+ public TvInputManagerTest() {
+ super(TvViewStubActivity.class);
+ }
+
@Override
public void setUp() throws Exception {
- if (!Utils.hasTvInputFramework(getContext())) {
+ if (!Utils.hasTvInputFramework(getActivity())) {
return;
}
- mManager = (TvInputManager) mContext.getSystemService(Context.TV_INPUT_SERVICE);
+ mManager = (TvInputManager) getActivity().getSystemService(Context.TV_INPUT_SERVICE);
mStubId = getInfoForClassName(
mManager.getTvInputList(), StubTunerTvInputService.class.getName()).getId();
}
public void testGetInputState() throws Exception {
- if (!Utils.hasTvInputFramework(getContext())) {
+ if (!Utils.hasTvInputFramework(getActivity())) {
return;
}
assertEquals(mManager.getInputState(mStubId), TvInputManager.INPUT_STATE_CONNECTED);
}
public void testGetTvInputInfo() throws Exception {
- if (!Utils.hasTvInputFramework(getContext())) {
+ if (!Utils.hasTvInputFramework(getActivity())) {
return;
}
assertEquals(mManager.getTvInputInfo(mStubId), getInfoForClassName(
@@ -72,7 +81,7 @@
}
public void testGetTvInputList() throws Exception {
- if (!Utils.hasTvInputFramework(getContext())) {
+ if (!Utils.hasTvInputFramework(getActivity())) {
return;
}
List<TvInputInfo> list = mManager.getTvInputList();
@@ -85,4 +94,44 @@
getInfoForClassName(list, name));
}
}
+
+ public void testIsParentalControlsEnabled() {
+ if (!Utils.hasTvInputFramework(getActivity())) {
+ return;
+ }
+ try {
+ mManager.isParentalControlsEnabled();
+ } catch (Exception e) {
+ fail();
+ }
+ }
+
+ public void testIsRatingBlocked() {
+ if (!Utils.hasTvInputFramework(getActivity())) {
+ return;
+ }
+ try {
+ mManager.isRatingBlocked(DUMMY_RATING);
+ } catch (Exception e) {
+ fail();
+ }
+ }
+
+ public void testRegisterUnregisterCallback() {
+ if (!Utils.hasTvInputFramework(getActivity())) {
+ return;
+ }
+ getActivity().runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ mManager.registerCallback(mCallabck, new Handler());
+ mManager.unregisterCallback(mCallabck);
+ } catch (Exception e) {
+ fail();
+ }
+ }
+ });
+ getInstrumentation().waitForIdleSync();
+ }
}
diff --git a/tests/tests/tv/src/android/media/tv/cts/TvInputServiceTest.java b/tests/tests/tv/src/android/media/tv/cts/TvInputServiceTest.java
index f0ee2772..10535b0 100644
--- a/tests/tests/tv/src/android/media/tv/cts/TvInputServiceTest.java
+++ b/tests/tests/tv/src/android/media/tv/cts/TvInputServiceTest.java
@@ -20,6 +20,7 @@
import android.app.Instrumentation;
import android.content.Context;
import android.cts.util.PollingCheck;
+import android.media.PlaybackParams;
import android.media.tv.TvContentRating;
import android.media.tv.TvContract;
import android.media.tv.TvInputInfo;
@@ -28,9 +29,15 @@
import android.media.tv.TvView;
import android.media.tv.cts.TvInputServiceTest.CountingTvInputService.CountingSession;
import android.net.Uri;
+import android.os.SystemClock;
import android.test.ActivityInstrumentationTestCase2;
+import android.view.InputDevice;
import android.view.KeyEvent;
+import android.view.MotionEvent;
import android.view.Surface;
+import android.view.SurfaceView;
+import android.view.View;
+import android.widget.LinearLayout;
import com.android.cts.tv.R;
@@ -55,6 +62,8 @@
private TvInputManager mManager;
private TvInputInfo mStubInfo;
private final StubCallback mCallback = new StubCallback();
+ private final StubTimeShiftPositionCallback mTimeShiftPositionCallback =
+ new StubTimeShiftPositionCallback();
private static class StubCallback extends TvView.TvInputCallback {
private int mChannelRetunedCount;
@@ -64,6 +73,7 @@
private int mTrackChangedCount;
private int mContentAllowedCount;
private int mContentBlockedCount;
+ private int mTimeShiftStatusChangedCount;
@Override
public void onChannelRetuned(String inputId, Uri channelUri) {
@@ -99,6 +109,42 @@
public void onContentBlocked(String inputId, TvContentRating rating) {
mContentBlockedCount++;
}
+
+ @Override
+ public void onTimeShiftStatusChanged(String inputId, int status) {
+ mTimeShiftStatusChangedCount++;
+ }
+
+ public void resetCounts() {
+ mChannelRetunedCount = 0;
+ mVideoAvailableCount = 0;
+ mVideoUnavailableCount = 0;
+ mTrackSelectedCount = 0;
+ mTrackChangedCount = 0;
+ mContentAllowedCount = 0;
+ mContentBlockedCount = 0;
+ mTimeShiftStatusChangedCount = 0;
+ }
+ }
+
+ private static class StubTimeShiftPositionCallback extends TvView.TimeShiftPositionCallback {
+ private int mTimeShiftStartPositionChanged;
+ private int mTimeShiftCurrentPositionChanged;
+
+ @Override
+ public void onTimeShiftStartPositionChanged(String inputId, long timeMs) {
+ mTimeShiftStartPositionChanged++;
+ }
+
+ @Override
+ public void onTimeShiftCurrentPositionChanged(String inputId, long timeMs) {
+ mTimeShiftCurrentPositionChanged++;
+ }
+
+ public void resetCounts() {
+ mTimeShiftStartPositionChanged = 0;
+ mTimeShiftCurrentPositionChanged = 0;
+ }
}
public TvInputServiceTest() {
@@ -135,7 +181,18 @@
verifyCommandSetStreamVolume();
verifyCommandSetCaptionEnabled();
verifyCommandSelectTrack();
- verifyCommandDispatchKeyEvent();
+ verifyCommandDispatchKeyDown();
+ verifyCommandDispatchKeyMultiple();
+ verifyCommandDispatchKeyUp();
+ verifyCommandDispatchTouchEvent();
+ verifyCommandDispatchTrackballEvent();
+ verifyCommandDispatchGenericMotionEvent();
+ verifyCommandTimeShiftPause();
+ verifyCommandTimeShiftResume();
+ verifyCommandTimeShiftSeekTo();
+ verifyCommandTimeShiftSetPlaybackParams();
+ verifyCommandSetTimeShiftPositionCallback();
+ verifyCommandOverlayViewSizeChanged();
verifyCallbackChannelRetuned();
verifyCallbackVideoAvailable();
verifyCallbackVideoUnavailable();
@@ -143,6 +200,8 @@
verifyCallbackTrackSelected();
verifyCallbackContentAllowed();
verifyCallbackContentBlocked();
+ verifyCallbackTimeShiftStatusChanged();
+ verifyCallbackLayoutSurface();
runTestOnUiThread(new Runnable() {
@Override
@@ -161,12 +220,13 @@
@Override
protected boolean check() {
CountingSession session = CountingTvInputService.sSession;
- return session != null && session.mTuneCount > 0;
+ return session != null && session.mTuneCount > 0 && session.mCreateOverlayView > 0;
}
}.run();
}
public void verifyCommandSetStreamVolume() {
+ resetCounts();
mTvView.setStreamVolume(1.0f);
mInstrumentation.waitForIdleSync();
new PollingCheck(TIME_OUT) {
@@ -179,6 +239,7 @@
}
public void verifyCommandSetCaptionEnabled() {
+ resetCounts();
mTvView.setCaptionEnabled(true);
mInstrumentation.waitForIdleSync();
new PollingCheck(TIME_OUT) {
@@ -191,18 +252,21 @@
}
public void verifyCommandSelectTrack() {
- mTvView.selectTrack(TvTrackInfo.TYPE_AUDIO, "dummyTrackId");
+ resetCounts();
+ verifyCallbackTracksChanged();
+ mTvView.selectTrack(mDummyTrack.getType(), mDummyTrack.getId());
mInstrumentation.waitForIdleSync();
new PollingCheck(TIME_OUT) {
@Override
protected boolean check() {
CountingSession session = CountingTvInputService.sSession;
- return session != null && session.mSetStreamVolumeCount > 0;
+ return session != null && session.mSelectTrackCount > 0;
}
}.run();
}
- public void verifyCommandDispatchKeyEvent() {
+ public void verifyCommandDispatchKeyDown() {
+ resetCounts();
mTvView.dispatchKeyEvent(new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_K));
mInstrumentation.waitForIdleSync();
new PollingCheck(TIME_OUT) {
@@ -214,7 +278,167 @@
}.run();
}
+ public void verifyCommandDispatchKeyMultiple() {
+ resetCounts();
+ mTvView.dispatchKeyEvent(new KeyEvent(KeyEvent.ACTION_MULTIPLE, KeyEvent.KEYCODE_K));
+ mInstrumentation.waitForIdleSync();
+ new PollingCheck(TIME_OUT) {
+ @Override
+ protected boolean check() {
+ CountingSession session = CountingTvInputService.sSession;
+ return session != null && session.mKeyMultipleCount > 0;
+ }
+ }.run();
+ }
+
+ public void verifyCommandDispatchKeyUp() {
+ resetCounts();
+ mTvView.dispatchKeyEvent(new KeyEvent(KeyEvent.ACTION_UP, KeyEvent.KEYCODE_K));
+ mInstrumentation.waitForIdleSync();
+ new PollingCheck(TIME_OUT) {
+ @Override
+ protected boolean check() {
+ CountingSession session = CountingTvInputService.sSession;
+ return session != null && session.mKeyUpCount > 0;
+ }
+ }.run();
+ }
+
+ public void verifyCommandDispatchTouchEvent() {
+ resetCounts();
+ long now = SystemClock.uptimeMillis();
+ MotionEvent event = MotionEvent.obtain(now, now, MotionEvent.ACTION_DOWN, 1.0f, 1.0f,
+ 1.0f, 1.0f, 0, 1.0f, 1.0f, 0, 0);
+ event.setSource(InputDevice.SOURCE_TOUCHSCREEN);
+ mTvView.dispatchTouchEvent(event);
+ mInstrumentation.waitForIdleSync();
+ new PollingCheck(TIME_OUT) {
+ @Override
+ protected boolean check() {
+ CountingSession session = CountingTvInputService.sSession;
+ return session != null && session.mTouchEventCount > 0;
+ }
+ }.run();
+ }
+
+ public void verifyCommandDispatchTrackballEvent() {
+ resetCounts();
+ long now = SystemClock.uptimeMillis();
+ MotionEvent event = MotionEvent.obtain(now, now, MotionEvent.ACTION_DOWN, 1.0f, 1.0f,
+ 1.0f, 1.0f, 0, 1.0f, 1.0f, 0, 0);
+ event.setSource(InputDevice.SOURCE_TRACKBALL);
+ mTvView.dispatchTouchEvent(event);
+ mInstrumentation.waitForIdleSync();
+ new PollingCheck(TIME_OUT) {
+ @Override
+ protected boolean check() {
+ CountingSession session = CountingTvInputService.sSession;
+ return session != null && session.mTrackballEventCount > 0;
+ }
+ }.run();
+ }
+
+ public void verifyCommandDispatchGenericMotionEvent() {
+ resetCounts();
+ long now = SystemClock.uptimeMillis();
+ MotionEvent event = MotionEvent.obtain(now, now, MotionEvent.ACTION_DOWN, 1.0f, 1.0f,
+ 1.0f, 1.0f, 0, 1.0f, 1.0f, 0, 0);
+ mTvView.dispatchGenericMotionEvent(event);
+ mInstrumentation.waitForIdleSync();
+ new PollingCheck(TIME_OUT) {
+ @Override
+ protected boolean check() {
+ CountingSession session = CountingTvInputService.sSession;
+ return session != null && session.mGenricMotionEventCount > 0;
+ }
+ }.run();
+ }
+
+ public void verifyCommandTimeShiftPause() {
+ resetCounts();
+ mTvView.timeShiftPause();
+ mInstrumentation.waitForIdleSync();
+ new PollingCheck(TIME_OUT) {
+ @Override
+ protected boolean check() {
+ CountingSession session = CountingTvInputService.sSession;
+ return session != null && session.mTimeShiftPauseCount > 0;
+ }
+ }.run();
+ }
+
+ public void verifyCommandTimeShiftResume() {
+ resetCounts();
+ mTvView.timeShiftResume();
+ mInstrumentation.waitForIdleSync();
+ new PollingCheck(TIME_OUT) {
+ @Override
+ protected boolean check() {
+ CountingSession session = CountingTvInputService.sSession;
+ return session != null && session.mTimeShiftResumeCount > 0;
+ }
+ }.run();
+ }
+
+ public void verifyCommandTimeShiftSeekTo() {
+ resetCounts();
+ mTvView.timeShiftSeekTo(0);
+ mInstrumentation.waitForIdleSync();
+ new PollingCheck(TIME_OUT) {
+ @Override
+ protected boolean check() {
+ CountingSession session = CountingTvInputService.sSession;
+ return session != null && session.mTimeShiftSeekToCount > 0;
+ }
+ }.run();
+ }
+
+ public void verifyCommandTimeShiftSetPlaybackParams() {
+ resetCounts();
+ mTvView.timeShiftSetPlaybackParams(new PlaybackParams().setSpeed(2.0f)
+ .setAudioFallbackMode(PlaybackParams.AUDIO_FALLBACK_MODE_DEFAULT));
+ mInstrumentation.waitForIdleSync();
+ new PollingCheck(TIME_OUT) {
+ @Override
+ protected boolean check() {
+ CountingSession session = CountingTvInputService.sSession;
+ return session != null && session.mTimeShiftSetPlaybackParamsCount > 0;
+ }
+ }.run();
+ }
+
+ public void verifyCommandSetTimeShiftPositionCallback() {
+ resetCounts();
+ mTvView.setTimeShiftPositionCallback(mTimeShiftPositionCallback);
+ mInstrumentation.waitForIdleSync();
+ new PollingCheck(TIME_OUT) {
+ @Override
+ protected boolean check() {
+ return mTimeShiftPositionCallback.mTimeShiftCurrentPositionChanged > 0
+ && mTimeShiftPositionCallback.mTimeShiftStartPositionChanged > 0;
+ }
+ }.run();
+ }
+
+ public void verifyCommandOverlayViewSizeChanged() {
+ resetCounts();
+ mActivity.runOnUiThread(new Runnable() {
+ public void run() {
+ mTvView.setLayoutParams(new LinearLayout.LayoutParams(10, 20));
+ }
+ });
+ mInstrumentation.waitForIdleSync();
+ new PollingCheck(TIME_OUT) {
+ @Override
+ protected boolean check() {
+ CountingSession session = CountingTvInputService.sSession;
+ return session != null && session.mOverlayViewSizeChangedCount > 0;
+ }
+ }.run();
+ }
+
public void verifyCallbackChannelRetuned() {
+ resetCounts();
CountingSession session = CountingTvInputService.sSession;
assertNotNull(session);
Uri fakeChannelUri = TvContract.buildChannelUri(0);
@@ -228,6 +452,7 @@
}
public void verifyCallbackVideoAvailable() {
+ resetCounts();
CountingSession session = CountingTvInputService.sSession;
assertNotNull(session);
session.notifyVideoAvailable();
@@ -240,6 +465,7 @@
}
public void verifyCallbackVideoUnavailable() {
+ resetCounts();
CountingSession session = CountingTvInputService.sSession;
assertNotNull(session);
session.notifyVideoUnavailable(TvInputManager.VIDEO_UNAVAILABLE_REASON_TUNING);
@@ -252,6 +478,7 @@
}
public void verifyCallbackTracksChanged() {
+ resetCounts();
CountingSession session = CountingTvInputService.sSession;
assertNotNull(session);
ArrayList<TvTrackInfo> tracks = new ArrayList<>();
@@ -266,6 +493,7 @@
}
public void verifyCallbackTrackSelected() {
+ resetCounts();
CountingSession session = CountingTvInputService.sSession;
assertNotNull(session);
session.notifyTrackSelected(mDummyTrack.getType(), mDummyTrack.getId());
@@ -278,6 +506,7 @@
}
public void verifyCallbackContentAllowed() {
+ resetCounts();
CountingSession session = CountingTvInputService.sSession;
assertNotNull(session);
session.notifyContentAllowed();
@@ -290,6 +519,7 @@
}
public void verifyCallbackContentBlocked() {
+ resetCounts();
CountingSession session = CountingTvInputService.sSession;
assertNotNull(session);
TvContentRating rating = TvContentRating.createRating("android.media.tv", "US_TVPG",
@@ -303,13 +533,59 @@
}.run();
}
+ public void verifyCallbackTimeShiftStatusChanged() {
+ resetCounts();
+ CountingSession session = CountingTvInputService.sSession;
+ assertNotNull(session);
+ session.notifyTimeShiftStatusChanged(TvInputManager.TIME_SHIFT_STATUS_AVAILABLE);
+ new PollingCheck(TIME_OUT) {
+ @Override
+ protected boolean check() {
+ return mCallback.mTimeShiftStatusChangedCount > 0;
+ }
+ }.run();
+ }
+
+ public void verifyCallbackLayoutSurface() {
+ resetCounts();
+ final int left = 10;
+ final int top = 20;
+ final int right = 30;
+ final int bottom = 40;
+ CountingSession session = CountingTvInputService.sSession;
+ assertNotNull(session);
+ session.layoutSurface(left, top, right, bottom);
+ new PollingCheck(TIME_OUT) {
+ @Override
+ protected boolean check() {
+ int childCount = mTvView.getChildCount();
+ for (int i = 0; i < childCount; ++i) {
+ View v = mTvView.getChildAt(i);
+ if (v instanceof SurfaceView) {
+ return v.getLeft() == left && v.getTop() == top && v.getRight() == right
+ && v.getBottom() == bottom;
+ }
+ }
+ return false;
+ }
+ }.run();
+ }
+
+ private void resetCounts() {
+ if (CountingTvInputService.sSession != null) {
+ CountingTvInputService.sSession.resetCounts();
+ }
+ mCallback.resetCounts();
+ mTimeShiftPositionCallback.resetCounts();
+ }
+
public static class CountingTvInputService extends StubTvInputService {
- static CountingTvInputService sInstance;
static CountingSession sSession;
@Override
public Session onCreateSession(String inputId) {
sSession = new CountingSession(this);
+ sSession.setOverlayViewEnabled(true);
return sSession;
}
@@ -318,12 +594,48 @@
public volatile int mSetStreamVolumeCount;
public volatile int mSetCaptionEnabledCount;
public volatile int mSelectTrackCount;
+ public volatile int mCreateOverlayView;
public volatile int mKeyDownCount;
+ public volatile int mKeyLongPressCount;
+ public volatile int mKeyMultipleCount;
+ public volatile int mKeyUpCount;
+ public volatile int mTouchEventCount;
+ public volatile int mTrackballEventCount;
+ public volatile int mGenricMotionEventCount;
+ public volatile int mOverlayViewSizeChangedCount;
+ public volatile int mTimeShiftPauseCount;
+ public volatile int mTimeShiftResumeCount;
+ public volatile int mTimeShiftSeekToCount;
+ public volatile int mTimeShiftSetPlaybackParamsCount;
+ public volatile long mTimeShiftGetCurrentPositionCount;
+ public volatile long mTimeShiftGetStartPositionCount;
CountingSession(Context context) {
super(context);
}
+ public void resetCounts() {
+ mTuneCount = 0;
+ mSetStreamVolumeCount = 0;
+ mSetCaptionEnabledCount = 0;
+ mSelectTrackCount = 0;
+ mCreateOverlayView = 0;
+ mKeyDownCount = 0;
+ mKeyLongPressCount = 0;
+ mKeyMultipleCount = 0;
+ mKeyUpCount = 0;
+ mTouchEventCount = 0;
+ mTrackballEventCount = 0;
+ mGenricMotionEventCount = 0;
+ mOverlayViewSizeChangedCount = 0;
+ mTimeShiftPauseCount = 0;
+ mTimeShiftResumeCount = 0;
+ mTimeShiftSeekToCount = 0;
+ mTimeShiftSetPlaybackParamsCount = 0;
+ mTimeShiftGetCurrentPositionCount = 0;
+ mTimeShiftGetStartPositionCount = 0;
+ }
+
@Override
public void onRelease() {
}
@@ -356,10 +668,87 @@
}
@Override
+ public View onCreateOverlayView() {
+ mCreateOverlayView++;
+ return null;
+ }
+
+ @Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
mKeyDownCount++;
return false;
}
+
+ @Override
+ public boolean onKeyLongPress(int keyCode, KeyEvent event) {
+ mKeyLongPressCount++;
+ return false;
+ }
+
+ @Override
+ public boolean onKeyMultiple(int keyCode, int count, KeyEvent event) {
+ mKeyMultipleCount++;
+ return false;
+ }
+
+ @Override
+ public boolean onKeyUp(int keyCode, KeyEvent event) {
+ mKeyUpCount++;
+ return false;
+ }
+
+ @Override
+ public boolean onTouchEvent(MotionEvent event) {
+ mTouchEventCount++;
+ return false;
+ }
+
+ @Override
+ public boolean onTrackballEvent(MotionEvent event) {
+ mTrackballEventCount++;
+ return false;
+ }
+
+ @Override
+ public boolean onGenericMotionEvent(MotionEvent event) {
+ mGenricMotionEventCount++;
+ return false;
+ }
+
+ @Override
+ public void onTimeShiftPause() {
+ mTimeShiftPauseCount++;
+ }
+
+ @Override
+ public void onTimeShiftResume() {
+ mTimeShiftResumeCount++;
+ }
+
+ @Override
+ public void onTimeShiftSeekTo(long timeMs) {
+ mTimeShiftSeekToCount++;
+ }
+
+ @Override
+ public void onTimeShiftSetPlaybackParams(PlaybackParams param) {
+ mTimeShiftSetPlaybackParamsCount++;
+ }
+
+ @Override
+ public long onTimeShiftGetCurrentPosition() {
+ return ++mTimeShiftGetCurrentPositionCount;
+ }
+
+ @Override
+ public long onTimeShiftGetStartPosition() {
+ return ++mTimeShiftGetStartPositionCount;
+ }
+
+ @Override
+ public void onOverlayViewSizeChanged(int width, int height) {
+ mOverlayViewSizeChangedCount++;
+ }
}
}
}
diff --git a/tests/tests/tv/src/android/media/tv/cts/TvTrackInfoTest.java b/tests/tests/tv/src/android/media/tv/cts/TvTrackInfoTest.java
new file mode 100644
index 0000000..a99bd77
--- /dev/null
+++ b/tests/tests/tv/src/android/media/tv/cts/TvTrackInfoTest.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.cts;
+
+import android.media.tv.TvTrackInfo;
+import android.os.Bundle;
+import android.os.Parcel;
+import android.test.AndroidTestCase;
+
+/**
+ * Test {@link android.media.tv.TvTrackInfo}.
+ */
+public class TvTrackInfoTest extends AndroidTestCase {
+
+ public void testAudioTrackInfoOp() {
+ if (!Utils.hasTvInputFramework(getContext())) {
+ return;
+ }
+ final Bundle bundle = new Bundle();
+ final TvTrackInfo info = new TvTrackInfo.Builder(TvTrackInfo.TYPE_AUDIO, "id_audio")
+ .setAudioChannelCount(2)
+ .setAudioSampleRate(48000)
+ .setLanguage("eng")
+ .setExtra(bundle)
+ .build();
+ assertEquals(TvTrackInfo.TYPE_AUDIO, info.getType());
+ assertEquals("id_audio", info.getId());
+ assertEquals(2, info.getAudioChannelCount());
+ assertEquals(48000, info.getAudioSampleRate());
+ assertEquals("eng", info.getLanguage());
+ assertEquals(bundle.get("testTrue"), info.getExtra().get("testTrue"));
+ assertEquals(0, info.describeContents());
+
+ // Test writeToParcel
+ Parcel p = Parcel.obtain();
+ info.writeToParcel(p, 0);
+ p.setDataPosition(0);
+ TvTrackInfo infoFromParcel = TvTrackInfo.CREATOR.createFromParcel(p);
+ assertEquals(TvTrackInfo.TYPE_AUDIO, infoFromParcel.getType());
+ assertEquals("id_audio", infoFromParcel.getId());
+ assertEquals(2, infoFromParcel.getAudioChannelCount());
+ assertEquals(48000, infoFromParcel.getAudioSampleRate());
+ assertEquals("eng", infoFromParcel.getLanguage());
+ assertEquals(bundle.get("testTrue"), infoFromParcel.getExtra().get("testTrue"));
+ assertEquals(0, infoFromParcel.describeContents());
+ p.recycle();
+ }
+
+ public void testVideoTrackInfoOp() {
+ if (!Utils.hasTvInputFramework(getContext())) {
+ return;
+ }
+ final Bundle bundle = new Bundle();
+ bundle.putBoolean("testTrue", true);
+ final TvTrackInfo info = new TvTrackInfo.Builder(TvTrackInfo.TYPE_VIDEO, "id_video")
+ .setVideoWidth(1920)
+ .setVideoHeight(1080)
+ .setVideoFrameRate(29.97f)
+ .setLanguage("eng")
+ .setExtra(bundle)
+ .build();
+ assertEquals(TvTrackInfo.TYPE_VIDEO, info.getType());
+ assertEquals("id_video", info.getId());
+ assertEquals(1920, info.getVideoWidth());
+ assertEquals(1080, info.getVideoHeight());
+ assertEquals(29.97f, info.getVideoFrameRate());
+ assertEquals("eng", info.getLanguage());
+ assertEquals(bundle.get("testTrue"), info.getExtra().get("testTrue"));
+ assertEquals(0, info.describeContents());
+
+ // Test writeToParcel
+ Parcel p = Parcel.obtain();
+ info.writeToParcel(p, 0);
+ p.setDataPosition(0);
+ TvTrackInfo infoFromParcel = TvTrackInfo.CREATOR.createFromParcel(p);
+ assertEquals(TvTrackInfo.TYPE_VIDEO, infoFromParcel.getType());
+ assertEquals("id_video", infoFromParcel.getId());
+ assertEquals(1920, infoFromParcel.getVideoWidth());
+ assertEquals(1080, infoFromParcel.getVideoHeight());
+ assertEquals(29.97f, infoFromParcel.getVideoFrameRate());
+ assertEquals("eng", infoFromParcel.getLanguage());
+ assertEquals(bundle.get("testTrue"), infoFromParcel.getExtra().get("testTrue"));
+ assertEquals(0, infoFromParcel.describeContents());
+ p.recycle();
+ }
+
+ public void testSubtitleTrackInfoOp() {
+ if (!Utils.hasTvInputFramework(getContext())) {
+ return;
+ }
+ final Bundle bundle = new Bundle();
+ bundle.putBoolean("testTrue", true);
+ final TvTrackInfo info = new TvTrackInfo.Builder(TvTrackInfo.TYPE_SUBTITLE, "id_subtitle")
+ .setLanguage("eng")
+ .setExtra(bundle)
+ .build();
+ assertEquals(TvTrackInfo.TYPE_SUBTITLE, info.getType());
+ assertEquals("id_subtitle", info.getId());
+ assertEquals("eng", info.getLanguage());
+ assertEquals(bundle.get("testTrue"), info.getExtra().get("testTrue"));
+ assertEquals(0, info.describeContents());
+
+ // Test writeToParcel
+ Parcel p = Parcel.obtain();
+ info.writeToParcel(p, 0);
+ p.setDataPosition(0);
+ TvTrackInfo infoFromParcel = TvTrackInfo.CREATOR.createFromParcel(p);
+ assertEquals(TvTrackInfo.TYPE_SUBTITLE, infoFromParcel.getType());
+ assertEquals("id_subtitle", infoFromParcel.getId());
+ assertEquals("eng", infoFromParcel.getLanguage());
+ assertEquals(bundle.get("testTrue"), infoFromParcel.getExtra().get("testTrue"));
+ assertEquals(0, infoFromParcel.describeContents());
+ p.recycle();
+ }
+}
diff --git a/tests/tests/tv/src/android/media/tv/cts/TvViewTest.java b/tests/tests/tv/src/android/media/tv/cts/TvViewTest.java
index 930dd6a..fca3d5b 100644
--- a/tests/tests/tv/src/android/media/tv/cts/TvViewTest.java
+++ b/tests/tests/tv/src/android/media/tv/cts/TvViewTest.java
@@ -258,6 +258,8 @@
case TvTrackInfo.TYPE_VIDEO:
assertEquals(track.getVideoHeight(), selectedTrack.getVideoHeight());
assertEquals(track.getVideoWidth(), selectedTrack.getVideoWidth());
+ assertEquals(track.getVideoPixelAspectRatio(),
+ selectedTrack.getVideoPixelAspectRatio(), 0.001f);
break;
case TvTrackInfo.TYPE_AUDIO:
assertEquals(track.getAudioChannelCount(),
@@ -267,6 +269,7 @@
break;
case TvTrackInfo.TYPE_SUBTITLE:
assertEquals(track.getLanguage(), selectedTrack.getLanguage());
+ assertEquals(track.getDescription(), selectedTrack.getDescription());
break;
default:
fail("Unrecognized type: " + track.getType());
@@ -281,7 +284,7 @@
TvTrackInfo videoTrack1 = new TvTrackInfo.Builder(TvTrackInfo.TYPE_VIDEO, "video-HD")
.setVideoHeight(1920).setVideoWidth(1080).build();
TvTrackInfo videoTrack2 = new TvTrackInfo.Builder(TvTrackInfo.TYPE_VIDEO, "video-SD")
- .setVideoHeight(640).setVideoWidth(360).build();
+ .setVideoHeight(640).setVideoWidth(360).setVideoPixelAspectRatio(1.09f).build();
TvTrackInfo audioTrack1 =
new TvTrackInfo.Builder(TvTrackInfo.TYPE_AUDIO, "audio-stereo-eng")
.setLanguage("eng").setAudioChannelCount(2).setAudioSampleRate(48000).build();
@@ -293,13 +296,16 @@
TvTrackInfo subtitleTrack2 =
new TvTrackInfo.Builder(TvTrackInfo.TYPE_SUBTITLE, "subtitle-esp")
.setLanguage("esp").build();
+ TvTrackInfo subtitleTrack3 =
+ new TvTrackInfo.Builder(TvTrackInfo.TYPE_SUBTITLE, "subtitle-eng2")
+ .setLanguage("eng").setDescription("audio commentary").build();
StubTunerTvInputService.injectTrack(videoTrack1, videoTrack2, audioTrack1, audioTrack2,
subtitleTrack1, subtitleTrack2);
final List<TvTrackInfo> tracks = new ArrayList<TvTrackInfo>();
Collections.addAll(tracks, videoTrack1, videoTrack2, audioTrack1, audioTrack2,
- subtitleTrack1, subtitleTrack2);
+ subtitleTrack1, subtitleTrack2, subtitleTrack3);
tryTuneAllChannels(new Runnable() {
@Override
public void run() {
diff --git a/tests/tests/uirendering/res/layout/simple_shadow_layout.xml b/tests/tests/uirendering/res/layout/simple_shadow_layout.xml
new file mode 100644
index 0000000..2f21df0
--- /dev/null
+++ b/tests/tests/uirendering/res/layout/simple_shadow_layout.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ Copyright (C) 2015 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:layout_width="@dimen/test_width"
+ android:layout_height="@dimen/test_height">
+ <View android:layout_width="40px"
+ android:layout_height="40px"
+ android:translationX="25px"
+ android:translationY="25px"
+ android:elevation="10dp"
+ android:background="#fff" />
+</FrameLayout>
\ No newline at end of file
diff --git a/tests/tests/uirendering/src/android/uirendering/cts/testclasses/ShadowTests.java b/tests/tests/uirendering/src/android/uirendering/cts/testclasses/ShadowTests.java
new file mode 100644
index 0000000..878d5ae
--- /dev/null
+++ b/tests/tests/uirendering/src/android/uirendering/cts/testclasses/ShadowTests.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.uirendering.cts.testclasses;
+
+import android.graphics.Color;
+import android.graphics.Point;
+import android.uirendering.cts.bitmapverifiers.SamplePointVerifier;
+
+import com.android.cts.uirendering.R;
+
+import android.test.suitebuilder.annotation.SmallTest;
+import android.uirendering.cts.testinfrastructure.ActivityTestBase;
+
+public class ShadowTests extends ActivityTestBase {
+ @SmallTest
+ public void testShadowLayout() {
+ createTest()
+ .addLayout(R.layout.simple_shadow_layout, null, true/* HW only */)
+ .runWithVerifier(
+ new SamplePointVerifier(
+ new Point[] {
+ // view area
+ new Point(25, 64),
+ new Point(64, 64),
+ // shadow area
+ new Point(25, 65),
+ new Point(64, 65)
+ },
+ new int[] {
+ Color.WHITE,
+ Color.WHITE,
+ Color.rgb(222, 222, 222),
+ Color.rgb(222, 222, 222),
+ }));
+ }
+}
\ No newline at end of file
diff --git a/tests/tests/view/res/layout/view_layout.xml b/tests/tests/view/res/layout/view_layout.xml
index fa817dc..e6e1550 100644
--- a/tests/tests/view/res/layout/view_layout.xml
+++ b/tests/tests/view/res/layout/view_layout.xml
@@ -34,6 +34,7 @@
android:layout_height="200px"
android:scrollbars="horizontal|vertical"
android:fadingEdge="horizontal|vertical"
+ android:scrollIndicators="top|bottom"
android:fadingEdgeLength="20px"/>
<android.view.cts.MockView
diff --git a/tests/tests/view/src/android/view/cts/ViewStubTest.java b/tests/tests/view/src/android/view/cts/ViewStubTest.java
index 53e251a..cbe498f 100644
--- a/tests/tests/view/src/android/view/cts/ViewStubTest.java
+++ b/tests/tests/view/src/android/view/cts/ViewStubTest.java
@@ -157,13 +157,16 @@
public void testAccessInflatedId() {
ViewStub viewStub = new ViewStub(mContext);
- assertEquals(0, viewStub.getInflatedId());
+ assertEquals("Default ViewStub inflated ID is View.NO_ID",
+ View.NO_ID, viewStub.getInflatedId());
viewStub.setInflatedId(R.id.inflated_id);
- assertEquals(R.id.inflated_id, viewStub.getInflatedId());
+ assertEquals("Set ViewStub inflated ID to package resource ID",
+ R.id.inflated_id, viewStub.getInflatedId());
- viewStub.setInflatedId(-1);
- assertEquals(-1, viewStub.getInflatedId());
+ viewStub.setInflatedId(View.NO_ID);
+ assertEquals("Set ViewStub inflated ID to View.NO_ID",
+ View.NO_ID, viewStub.getInflatedId());
}
@UiThreadTest
diff --git a/tests/tests/view/src/android/view/cts/ViewTest.java b/tests/tests/view/src/android/view/cts/ViewTest.java
index 8194682..4764a02 100644
--- a/tests/tests/view/src/android/view/cts/ViewTest.java
+++ b/tests/tests/view/src/android/view/cts/ViewTest.java
@@ -56,6 +56,8 @@
import android.view.HapticFeedbackConstants;
import android.view.InputDevice;
import android.view.KeyEvent;
+import android.view.Menu;
+import android.view.MenuInflater;
import android.view.MotionEvent;
import android.view.SoundEffectConstants;
import android.view.TouchDelegate;
@@ -2077,24 +2079,24 @@
assertEquals(viewId, container.keyAt(0));
container.clear();
- container.put(viewId, new BaseSavedState(BaseSavedState.EMPTY_STATE));
+ container.put(viewId, new android.graphics.Rect());
try {
view.restoreHierarchyState(container);
- fail("should throw IllegalArgumentException");
+ fail("Parcelable state must be an AbsSaveState, should throw IllegalArgumentException");
} catch (IllegalArgumentException e) {
// expected
}
try {
view.restoreHierarchyState(null);
- fail("should throw NullPointerException");
+ fail("Cannot pass null to restoreHierarchyState(), should throw NullPointerException");
} catch (NullPointerException e) {
// expected
}
try {
view.saveHierarchyState(null);
- fail("should throw NullPointerException");
+ fail("Cannot pass null to saveHierarchyState(), should throw NullPointerException");
} catch (NullPointerException e) {
// expected
}
@@ -2434,7 +2436,6 @@
assertFalse(view.hasCalledDrawableStateChanged());
view.setPressed(true);
assertTrue(view.hasCalledDrawableStateChanged());
- assertFalse(view.hasCalledOnCreateDrawableState());
assertTrue(Arrays.equals(MockView.getPressedEnabledStateSet(), view.getDrawableState()));
assertTrue(view.hasCalledOnCreateDrawableState());
@@ -2445,7 +2446,6 @@
view.refreshDrawableState();
assertTrue(view.hasCalledDrawableStateChanged());
assertTrue(mMockParent.hasChildDrawableStateChanged());
- assertFalse(view.hasCalledOnCreateDrawableState());
assertTrue(Arrays.equals(MockView.getPressedEnabledStateSet(), view.getDrawableState()));
assertTrue(view.hasCalledOnCreateDrawableState());
}
@@ -3111,6 +3111,24 @@
assertEquals(fadingEdgeLength, view.getVerticalFadingEdgeLength());
}
+ @UiThreadTest
+ public void testScrollIndicators() {
+ MockView view = (MockView) mActivity.findViewById(R.id.scroll_view);
+
+ assertEquals("Set indicators match those specified in XML",
+ View.SCROLL_INDICATOR_TOP | View.SCROLL_INDICATOR_BOTTOM,
+ view.getScrollIndicators());
+
+ view.setScrollIndicators(0);
+ assertEquals("Cleared indicators", 0, view.getScrollIndicators());
+
+ view.setScrollIndicators(View.SCROLL_INDICATOR_START | View.SCROLL_INDICATOR_RIGHT);
+ assertEquals("Set start and right indicators",
+ View.SCROLL_INDICATOR_START | View.SCROLL_INDICATOR_RIGHT,
+ view.getScrollIndicators());
+
+ }
+
public void testOnStartAndFinishTemporaryDetach() throws Throwable {
final MockListView listView = new MockListView(mActivity);
List<String> items = Lists.newArrayList("1", "2", "3");
@@ -3367,6 +3385,126 @@
bg.hasCalledSetTint());
}
+ public void testStartActionModeWithParent() {
+ View view = new View(mActivity);
+ MockViewGroup parent = new MockViewGroup(mActivity);
+ parent.addView(view);
+
+ ActionMode mode = view.startActionMode(null);
+
+ assertNotNull(mode);
+ assertEquals(NO_OP_ACTION_MODE, mode);
+ assertTrue(parent.isStartActionModeForChildCalled);
+ assertEquals(ActionMode.TYPE_PRIMARY, parent.startActionModeForChildType);
+ }
+
+ public void testStartActionModeWithoutParent() {
+ View view = new View(mActivity);
+
+ ActionMode mode = view.startActionMode(null);
+
+ assertNull(mode);
+ }
+
+ public void testStartActionModeTypedWithParent() {
+ View view = new View(mActivity);
+ MockViewGroup parent = new MockViewGroup(mActivity);
+ parent.addView(view);
+
+ ActionMode mode = view.startActionMode(null, ActionMode.TYPE_FLOATING);
+
+ assertNotNull(mode);
+ assertEquals(NO_OP_ACTION_MODE, mode);
+ assertTrue(parent.isStartActionModeForChildCalled);
+ assertEquals(ActionMode.TYPE_FLOATING, parent.startActionModeForChildType);
+ }
+
+ public void testStartActionModeTypedWithoutParent() {
+ View view = new View(mActivity);
+
+ ActionMode mode = view.startActionMode(null, ActionMode.TYPE_FLOATING);
+
+ assertNull(mode);
+ }
+
+ private static class MockViewGroup extends ViewGroup {
+ boolean isStartActionModeForChildCalled = false;
+ int startActionModeForChildType = ActionMode.TYPE_PRIMARY;
+
+ public MockViewGroup(Context context) {
+ super(context);
+ }
+
+ @Override
+ public ActionMode startActionModeForChild(View originalView, ActionMode.Callback callback) {
+ isStartActionModeForChildCalled = true;
+ startActionModeForChildType = ActionMode.TYPE_PRIMARY;
+ return NO_OP_ACTION_MODE;
+ }
+
+ @Override
+ public ActionMode startActionModeForChild(
+ View originalView, ActionMode.Callback callback, int type) {
+ isStartActionModeForChildCalled = true;
+ startActionModeForChildType = type;
+ return NO_OP_ACTION_MODE;
+ }
+
+ @Override
+ protected void onLayout(boolean changed, int l, int t, int r, int b) {
+ // no-op
+ }
+ }
+
+ private static final ActionMode NO_OP_ACTION_MODE =
+ new ActionMode() {
+ @Override
+ public void setTitle(CharSequence title) {}
+
+ @Override
+ public void setTitle(int resId) {}
+
+ @Override
+ public void setSubtitle(CharSequence subtitle) {}
+
+ @Override
+ public void setSubtitle(int resId) {}
+
+ @Override
+ public void setCustomView(View view) {}
+
+ @Override
+ public void invalidate() {}
+
+ @Override
+ public void finish() {}
+
+ @Override
+ public Menu getMenu() {
+ return null;
+ }
+
+ @Override
+ public CharSequence getTitle() {
+ return null;
+ }
+
+ @Override
+ public CharSequence getSubtitle() {
+ return null;
+ }
+
+ @Override
+ public View getCustomView() {
+ return null;
+ }
+
+ @Override
+ public MenuInflater getMenuInflater() {
+ return null;
+ }
+ };
+
private static class MockDrawable extends Drawable {
private boolean mCalledSetTint = false;
diff --git a/tests/tests/webkit/src/android/webkit/cts/PostMessageTest.java b/tests/tests/webkit/src/android/webkit/cts/PostMessageTest.java
new file mode 100644
index 0000000..2a6af6e
--- /dev/null
+++ b/tests/tests/webkit/src/android/webkit/cts/PostMessageTest.java
@@ -0,0 +1,222 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.webkit.cts;
+
+import android.cts.util.NullWebViewUtils;
+import android.cts.util.PollingCheck;
+import android.net.Uri;
+import android.test.ActivityInstrumentationTestCase2;
+import android.test.UiThreadTest;
+import android.webkit.WebMessage;
+import android.webkit.WebMessagePort;
+import android.webkit.WebView;
+
+import java.util.concurrent.CountDownLatch;
+import junit.framework.Assert;
+
+public class PostMessageTest extends ActivityInstrumentationTestCase2<WebViewCtsActivity> {
+ public static final long TIMEOUT = 20000L;
+
+ private WebView mWebView;
+ private WebViewOnUiThread mOnUiThread;
+
+ private static final String WEBVIEW_MESSAGE = "from_webview";
+ private static final String BASE_URI = "http://www.example.com";
+
+ public PostMessageTest() {
+ super("com.android.cts.webkit", WebViewCtsActivity.class);
+ }
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ final WebViewCtsActivity activity = getActivity();
+ mWebView = activity.getWebView();
+ if (mWebView != null) {
+ mOnUiThread = new WebViewOnUiThread(this, mWebView);
+ mOnUiThread.getSettings().setJavaScriptEnabled(true);
+ }
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ if (mOnUiThread != null) {
+ mOnUiThread.cleanUp();
+ }
+ super.tearDown();
+ }
+
+ private static final String TITLE_FROM_POST_MESSAGE =
+ "<!DOCTYPE html><html><body>"
+ + " <script>"
+ + " var received = '';"
+ + " onmessage = function (e) {"
+ + " received += e.data;"
+ + " document.title = received; };"
+ + " </script>"
+ + "</body></html>";
+
+ // Acks each received message from the message channel with a seq number.
+ private static final String CHANNEL_MESSAGE =
+ "<!DOCTYPE html><html><body>"
+ + " <script>"
+ + " var counter = 0;"
+ + " onmessage = function (e) {"
+ + " var myPort = e.ports[0];"
+ + " myPort.onmessage = function (f) {"
+ + " myPort.postMessage(f.data + counter++);"
+ + " }"
+ + " }"
+ + " </script>"
+ + "</body></html>";
+
+ private void loadPage(String data) {
+ mOnUiThread.loadDataWithBaseURLAndWaitForCompletion(BASE_URI, data,
+ "text/html", "UTF-8", null);
+ }
+
+ private void waitForTitle(final String title) {
+ new PollingCheck(TIMEOUT) {
+ @Override
+ protected boolean check() {
+ return mOnUiThread.getTitle().equals(title);
+ }
+ }.run();
+ }
+
+ // Post a string message to main frame and make sure it is received.
+ public void testSimpleMessageToMainFrame() throws Throwable {
+ if (!NullWebViewUtils.isWebViewAvailable()) {
+ return;
+ }
+ loadPage(TITLE_FROM_POST_MESSAGE);
+ WebMessage message = new WebMessage(WEBVIEW_MESSAGE);
+ mOnUiThread.postWebMessage(message, Uri.parse(BASE_URI));
+ waitForTitle(WEBVIEW_MESSAGE);
+ }
+
+ // Post multiple messages to main frame and make sure they are received in
+ // correct order.
+ public void testMultipleMessagesToMainFrame() throws Throwable {
+ if (!NullWebViewUtils.isWebViewAvailable()) {
+ return;
+ }
+ loadPage(TITLE_FROM_POST_MESSAGE);
+ for (int i = 0; i < 10; i++) {
+ mOnUiThread.postWebMessage(new WebMessage(Integer.toString(i)),
+ Uri.parse(BASE_URI));
+ }
+ waitForTitle("0123456789");
+ }
+
+ // Create a message channel and make sure it can be used for data transfer to/from js.
+ public void testMessageChannel() throws Throwable {
+ if (!NullWebViewUtils.isWebViewAvailable()) {
+ return;
+ }
+ loadPage(CHANNEL_MESSAGE);
+ final WebMessagePort[] channel = mOnUiThread.createWebMessageChannel();
+ WebMessage message = new WebMessage(WEBVIEW_MESSAGE, new WebMessagePort[]{channel[1]});
+ mOnUiThread.postWebMessage(message, Uri.parse(BASE_URI));
+ final int messageCount = 3;
+ final CountDownLatch latch = new CountDownLatch(messageCount);
+ runTestOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ for (int i = 0; i < messageCount; i++) {
+ channel[0].postMessage(new WebMessage(WEBVIEW_MESSAGE + i));
+ }
+ channel[0].setWebMessageCallback(new WebMessagePort.WebMessageCallback() {
+ @Override
+ public void onMessage(WebMessagePort port, WebMessage message) {
+ int i = messageCount - (int)latch.getCount();
+ assertEquals(WEBVIEW_MESSAGE + i + i, message.getData());
+ latch.countDown();
+ }
+ });
+ }
+ });
+ // Wait for all the responses to arrive.
+ boolean ignore = latch.await(TIMEOUT, java.util.concurrent.TimeUnit.MILLISECONDS);
+ }
+
+ // Test that a message port that is closed cannot used to send a message
+ public void testClose() throws Throwable {
+ if (!NullWebViewUtils.isWebViewAvailable()) {
+ return;
+ }
+ loadPage(CHANNEL_MESSAGE);
+ final WebMessagePort[] channel = mOnUiThread.createWebMessageChannel();
+ WebMessage message = new WebMessage(WEBVIEW_MESSAGE, new WebMessagePort[]{channel[1]});
+ mOnUiThread.postWebMessage(message, Uri.parse(BASE_URI));
+ runTestOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ channel[0].close();
+ channel[0].postMessage(new WebMessage(WEBVIEW_MESSAGE));
+ } catch (IllegalStateException ex) {
+ // expect to receive an exception
+ return;
+ }
+ Assert.fail("A closed port cannot be used to transfer messages");
+ }
+ });
+ }
+
+ // Sends a new message channel from JS to Java.
+ private static final String CHANNEL_FROM_JS =
+ "<!DOCTYPE html><html><body>"
+ + " <script>"
+ + " var counter = 0;"
+ + " var mc = new MessageChannel();"
+ + " var received = '';"
+ + " mc.port1.onmessage = function (e) {"
+ + " received = e.data;"
+ + " document.title = e.data;"
+ + " };"
+ + " onmessage = function (e) {"
+ + " var myPort = e.ports[0];"
+ + " myPort.postMessage('', [mc.port2]);"
+ + " };"
+ + " </script>"
+ + "</body></html>";
+
+ // Test a message port created in JS can be received and used for message transfer.
+ public void testReceiveMessagePort() throws Throwable {
+ final String hello = "HELLO";
+ if (!NullWebViewUtils.isWebViewAvailable()) {
+ return;
+ }
+ loadPage(CHANNEL_FROM_JS);
+ final WebMessagePort[] channel = mOnUiThread.createWebMessageChannel();
+ WebMessage message = new WebMessage(WEBVIEW_MESSAGE, new WebMessagePort[]{channel[1]});
+ mOnUiThread.postWebMessage(message, Uri.parse(BASE_URI));
+ runTestOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ channel[0].setWebMessageCallback(new WebMessagePort.WebMessageCallback() {
+ @Override
+ public void onMessage(WebMessagePort port, WebMessage message) {
+ message.getPorts()[0].postMessage(new WebMessage(hello));
+ }
+ });
+ }
+ });
+ waitForTitle(hello);
+ }
+}
diff --git a/tests/tests/webkit/src/android/webkit/cts/WebViewClientTest.java b/tests/tests/webkit/src/android/webkit/cts/WebViewClientTest.java
index b053784..0697429 100644
--- a/tests/tests/webkit/src/android/webkit/cts/WebViewClientTest.java
+++ b/tests/tests/webkit/src/android/webkit/cts/WebViewClientTest.java
@@ -30,7 +30,6 @@
import android.webkit.WebResourceError;
import android.webkit.WebResourceRequest;
import android.webkit.WebResourceResponse;
-import android.webkit.WebResourceResponseBase;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
@@ -538,7 +537,7 @@
private boolean mOnLoadResourceCalled;
private int mOnReceivedErrorCode;
private WebResourceError mOnReceivedResourceError;
- private WebResourceResponseBase mOnReceivedHttpError;
+ private WebResourceResponse mOnReceivedHttpError;
private boolean mOnFormResubmissionCalled;
private boolean mDoUpdateVisitedHistoryCalled;
private boolean mOnReceivedHttpAuthRequestCalled;
@@ -571,7 +570,7 @@
return mOnReceivedResourceError;
}
- public WebResourceResponseBase hasOnReceivedHttpError() {
+ public WebResourceResponse hasOnReceivedHttpError() {
return mOnReceivedHttpError;
}
@@ -640,7 +639,7 @@
@Override
public void onReceivedHttpError(WebView view, WebResourceRequest request,
- WebResourceResponseBase errorResponse) {
+ WebResourceResponse errorResponse) {
super.onReceivedHttpError(view, request, errorResponse);
mOnReceivedHttpError = errorResponse;
}
diff --git a/tests/tests/webkit/src/android/webkit/cts/WebViewTest.java b/tests/tests/webkit/src/android/webkit/cts/WebViewTest.java
index c5b77a2..dba2243 100755
--- a/tests/tests/webkit/src/android/webkit/cts/WebViewTest.java
+++ b/tests/tests/webkit/src/android/webkit/cts/WebViewTest.java
@@ -2466,7 +2466,7 @@
mOnUiThread.loadUrl("about:blank");
- mOnUiThread.insertVisualStateCallback(kRequest, new VisualStateCallback() {
+ mOnUiThread.postVisualStateCallback(kRequest, new VisualStateCallback() {
public void onComplete(long requestId) {
assertEquals(kRequest, requestId);
callbackLatch.countDown();
diff --git a/tests/webgl/AndroidManifest.xml b/tests/webgl/AndroidManifest.xml
deleted file mode 100755
index d648032..0000000
--- a/tests/webgl/AndroidManifest.xml
+++ /dev/null
@@ -1,45 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!--
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- -->
-
-<manifest xmlns:android="http://schemas.android.com/apk/res/android"
- package="android.webgl.cts">
-
- <uses-permission android:name="android.permission.DISABLE_KEYGUARD" />
- <uses-permission android:name="android.permission.INTERNET" />
-
- <application android:maxRecents="1">
- <uses-library android:name="android.test.runner" />
- <activity android:name="android.webgl.WebGLActivity" >
- <intent-filter>
- <action android:name="android.intent.action.MAIN" />
- <category android:name="android.intent.category.LAUNCHER" />
- </intent-filter>
- </activity>
- </application>
-
-
- <!-- self-instrumenting test package. -->
- <instrumentation
- android:name="android.support.test.runner.AndroidJUnitRunner"
- android:label="CTS WebGL tests"
- android:targetPackage="android.webgl.cts" >
- <meta-data
- android:name="listener"
- android:value="com.android.cts.runner.CtsTestRunListener" />
- </instrumentation>
-</manifest>
-
diff --git a/tests/webgl/res/raw/extract_webgl_tests.py b/tests/webgl/res/raw/extract_webgl_tests.py
deleted file mode 100755
index 1511632..0000000
--- a/tests/webgl/res/raw/extract_webgl_tests.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright (C) 2014 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-import os
-
-if len(sys.argv) != 3:
- raise Exception("Usage: extract_webgl_tests.py <webgl_sdk_tests_path> <version>")
-
-top_list = sys.argv[1] + "/00_test_list.txt"
-version = sys.argv[2]
-tests = []
-lists = []
-lists.append(top_list)
-
-def filter_by_version(lines):
- version_lines = [ line for line in lines if "--min-version" in line ]
- version_lines.extend([ line for line in lines if "--max-version" in line ])
- lines = [ line for line in lines if not line in version_lines ]
- for line in version_lines:
- assert len(line.split()) == 3
- min_version = line.split()[1] if line.split()[0] == "--min-version" else "0.0.0"
- max_version = line.split()[1] if line.split()[0] == "--max-version" else "9.9.9"
- test = line.split()[2]
- if (version >= min_version and version <= max_version):
- lines.append(test)
- return lines
-
-while not len(lists) == 0:
- lists2 = lists
- lists = []
- for list in lists2:
- directory = os.path.dirname(os.path.realpath(list))
- with open(list) as file:
- # Filter out comments and --min-version
- lines = [ line.strip() for line in file.readlines()]
- lines = [ line for line in lines if not "//" in line ]
- lines = [ line for line in lines if not "#" in line ]
- lines = [ line.replace("--slow","") for line in lines ]
- lines = filter_by_version(lines)
- # Append lists and tests found in this list.
- lines = [ directory + "/" + line for line in lines ]
- lists.extend([ line for line in lines if "00_test_list.txt" in line ])
- tests.extend([ line for line in lines if ".html" in line ])
-
-# Directories for formating test-names/relative-paths.
-name_directory = os.path.dirname(os.path.realpath(top_list))
-path_directory = os.path.realpath(os.path.join(name_directory, os.pardir))
-
-tests = sorted(tests)
-for test in tests:
- test_path = test.replace(path_directory + "/", "")
- test_name = test.replace(name_directory + "/", "")
- test_name = test_name.replace("/","_")
- test_name = test_name.replace(".","_")
- test_name = test_name.replace("-","_")
- print " public void test_" + test_name + "() throws Exception { doTest(\"" + test_path + "\"); }"
-
diff --git a/tests/webgl/res/raw/harness.html b/tests/webgl/res/raw/harness.html
deleted file mode 100644
index 5ae56ef..0000000
--- a/tests/webgl/res/raw/harness.html
+++ /dev/null
@@ -1,44 +0,0 @@
-<!DOCTYPE html>
-<!-- saved from url=(0057)http://www.corp.google.com/~vollick/timing-functions.html -->
-<html>
-<head><meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1">
-
-<script type="text/javascript">
- // Check for WebGL Support.
- function supportsWebGL() {
- var canvas = document.createElement('canvas');
- gl = canvas.getContext("webgl");
- return !!gl;
- }
-
- // Pass the WebGL harness calls through to the native app.
- webglTestHarness = {
- notifyFinished: function() {
- WebGLCallback.notifyFinished();
- },
- reportResults: function(type, success, msg) {
- WebGLCallback.reportResults(type, success, msg);
- }
- }
- function navigateToTest() {
- if (supportsWebGL())
- window.open(WebGLCallback.getUrlToTest(), "TestFrame");
- else
- WebGLCallback.notifyFinished();
- }
- window.addEventListener('load', navigateToTest, false);
-</script>
-
-<style type="text/css">
-body, html { margin: 0; padding: 0; height: 100%; overflow: hidden; }
-#content { position:absolute; left: 0; right: 0; bottom: 0; top: 0px; }
-</style>
-
-</head>
-
-<body>
- <div id="content">
- <iframe name="TestFrame" width="100%" height="100%" frameborder="0"/>
- </div>
-</body>
-</html>
diff --git a/tests/webgl/res/raw/webgl_sdk_tests.zip b/tests/webgl/res/raw/webgl_sdk_tests.zip
deleted file mode 100644
index a2086b0..0000000
--- a/tests/webgl/res/raw/webgl_sdk_tests.zip
+++ /dev/null
Binary files differ
diff --git a/tests/webgl/src/android/webgl/WebGLActivity.java b/tests/webgl/src/android/webgl/WebGLActivity.java
deleted file mode 100644
index 3f911c4..0000000
--- a/tests/webgl/src/android/webgl/WebGLActivity.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.webgl;
-
-import android.app.Activity;
-import android.content.Context;
-import android.content.res.Resources;
-import android.cts.util.NullWebViewUtils;
-import android.os.Bundle;
-import android.util.Log;
-import android.webgl.cts.R;
-import android.webkit.WebView;
-import android.webkit.JavascriptInterface;
-import android.webkit.WebViewClient;
-import android.widget.Toast;
-import java.lang.Override;
-import java.io.InputStream;
-import java.util.concurrent.Semaphore;
-import java.util.concurrent.TimeUnit;
-
-/**
- * A simple activity for testing WebGL Conformance with WebView.
- */
-public class WebGLActivity extends Activity {
-
- Semaphore mFinished = new Semaphore(0, false);
- Semaphore mDestroyed = new Semaphore(0, false);
- String mWebGlHarnessUrl;
- WebView mWebView;
-
- // The following members are synchronized.
- String mWebGLTestUrl;
- boolean mPassed = true;
- StringBuilder mMessage = new StringBuilder("\n");
-
- @Override
- public void onCreate(Bundle icicle) {
- super.onCreate(icicle);
-
- mWebGlHarnessUrl = "file://" + getCacheDir() + "/harness.html";
- try {
- mWebView = new WebView(this);
- } catch (Exception e) {
- NullWebViewUtils.determineIfWebViewAvailable(this, e);
- }
-
- if (mWebView == null) {
- return;
- }
-
- mWebView.getSettings().setJavaScriptEnabled(true);
- mWebView.getSettings().setAllowFileAccessFromFileURLs(true);
- mWebView.getSettings().setMediaPlaybackRequiresUserGesture(false);
- mWebView.setWebViewClient(new WebViewClient() {
- @Override
- public boolean shouldOverrideUrlLoading(WebView webView, String url) {
- return false;
- }
- });
-
- mWebView.addJavascriptInterface(new Object() {
- @JavascriptInterface
- public String getUrlToTest() {
- synchronized(WebGLActivity.this) {
- return mWebGLTestUrl;
- }
- }
-
- @JavascriptInterface
- public void reportResults(String type, boolean success, String message) {
- synchronized(WebGLActivity.this) {
- mMessage.append((success ? "PASS " : "FAIL ") + message + "\n");
- mPassed &= success;
- }
- }
-
- @JavascriptInterface
- public void notifyFinished() {
- mFinished.release();
- }
-
- @JavascriptInterface
- public void alert(String string) {
- Log.i(mWebGLTestUrl, string);
- }
- }, "WebGLCallback");
- setContentView(mWebView);
- }
-
- public void navigateToTest(String url) throws Exception {
- if (!NullWebViewUtils.isWebViewAvailable()) {
- return;
- }
-
- synchronized(WebGLActivity.this) {
- mWebGLTestUrl = url;
- }
-
- // Load harness.html, which will load mWebGLTestUrl in an <iframe>.
- runOnUiThread(new Runnable() {
- public void run() {
- mWebView.loadUrl(mWebGlHarnessUrl);
- }
- });
-
- // Wait on test completion.
- boolean finished = mFinished.tryAcquire(60, TimeUnit.SECONDS);
- String message;
- synchronized(WebGLActivity.this) {
- message = mMessage.toString();
- }
-
- // Destroy the webview and wait for it.
- runOnUiThread(new Runnable() {
- public void run() {
- mWebView.destroy();
- finish();
- mDestroyed.release();
- }
- });
- mDestroyed.acquire();
-
- if (!finished)
- throw new Exception("\n" + url + "\n Test timed-out after 60 seconds: " + message);
- if(!mPassed)
- throw new Exception("\n" + url + "\n Test failed: " + message);
- }
-}
diff --git a/tests/webgl/src/android/webgl/cts/WebGLConformanceSuite.java b/tests/webgl/src/android/webgl/cts/WebGLConformanceSuite.java
deleted file mode 100644
index 60f663a..0000000
--- a/tests/webgl/src/android/webgl/cts/WebGLConformanceSuite.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.webgl.cts;
-
-import android.util.Log;
-import android.webgl.cts.R;
-import android.webgl.WebGLActivity;
-import java.lang.Override;
-import java.io.File;
-import java.io.InputStream;
-
-/**
- * A Singleton class to wrap the WebGL Conformance Test Suite.
- */
-public class WebGLConformanceSuite {
- private final String TAG = "WebGLConformanceSuite";
- private static volatile WebGLConformanceSuite mInstance = null;
-
- private WebGLConformanceSuite(WebGLActivity activity) throws Exception {
- Log.i(TAG, "Unzipping WebGL Conformance Suite: "
- + activity.getCacheDir().getPath());
- InputStream suite = activity.getResources().openRawResource(R.raw.webgl_sdk_tests);
- ZipUtil.unzipToPath(suite, activity.getCacheDir());
- InputStream harness = activity.getResources().openRawResource(R.raw.harness);
- ZipUtil.streamToPath(harness, activity.getCacheDir(), "harness.html");
- }
-
- public static WebGLConformanceSuite init(WebGLActivity activity)
- throws Exception {
- if (mInstance == null) {
- synchronized (WebGLConformanceSuite.class) {
- mInstance = new WebGLConformanceSuite(activity);
- }
- }
- return mInstance;
- }
-}
diff --git a/tests/webgl/src/android/webgl/cts/WebGLTest.java b/tests/webgl/src/android/webgl/cts/WebGLTest.java
deleted file mode 100644
index d45c190..0000000
--- a/tests/webgl/src/android/webgl/cts/WebGLTest.java
+++ /dev/null
@@ -1,419 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.webgl.cts;
-
-import android.webgl.WebGLActivity;
-import android.webgl.cts.R;
-import android.test.ActivityInstrumentationTestCase2;
-import java.io.InputStream;
-
-/**
- * A simple wrapper to load each WebGL conformance test in WebView.
- *
- * This test uses {@link android.test.ActivityInstrumentationTestCase2} to instrument the
- * {@link android.webgl.WebGLActivity}.
- */
-public class WebGLTest extends ActivityInstrumentationTestCase2<WebGLActivity> {
-
- /**
- * A reference to the activity whose shared preferences are being tested.
- */
- private WebGLActivity mActivity;
- private WebGLConformanceSuite mWebGL_1_0_1;
-
- public WebGLTest() {
- super(WebGLActivity.class);
- }
-
- @Override
- protected void setUp() throws Exception {
- super.setUp();
- // Start the activity and get a reference to it.
- mActivity = getActivity();
- // Wait for the UI Thread to become idle.
- getInstrumentation().waitForIdleSync();
- mWebGL_1_0_1 = WebGLConformanceSuite.init(mActivity);
- }
-
- @Override
- protected void tearDown() throws Exception {
- // Scrub the activity so it can be freed. The next time the setUp will create a new activity
- // rather than reusing the old one.
- mActivity = null;
- super.tearDown();
- }
-
- protected void doTest(String testPage) throws Exception {
- mActivity.navigateToTest(testPage);
- }
-
- /**
- * The remainder of this file is generated using this command:
- * extract_webgl_tests.py tests 1.0.1
- */
- public void test_conformance_attribs_gl_enable_vertex_attrib_html() throws Exception { doTest("tests/conformance/attribs/gl-enable-vertex-attrib.html"); }
- public void test_conformance_attribs_gl_vertex_attrib_zero_issues_html() throws Exception { doTest("tests/conformance/attribs/gl-vertex-attrib-zero-issues.html"); }
- public void test_conformance_attribs_gl_vertex_attrib_html() throws Exception { doTest("tests/conformance/attribs/gl-vertex-attrib.html"); }
- public void test_conformance_attribs_gl_vertexattribpointer_offsets_html() throws Exception { doTest("tests/conformance/attribs/gl-vertexattribpointer-offsets.html"); }
- public void test_conformance_attribs_gl_vertexattribpointer_html() throws Exception { doTest("tests/conformance/attribs/gl-vertexattribpointer.html"); }
- public void test_conformance_buffers_buffer_bind_test_html() throws Exception { doTest("tests/conformance/buffers/buffer-bind-test.html"); }
- public void test_conformance_buffers_buffer_data_array_buffer_html() throws Exception { doTest("tests/conformance/buffers/buffer-data-array-buffer.html"); }
- public void test_conformance_buffers_index_validation_copies_indices_html() throws Exception { doTest("tests/conformance/buffers/index-validation-copies-indices.html"); }
- public void test_conformance_buffers_index_validation_crash_with_buffer_sub_data_html() throws Exception { doTest("tests/conformance/buffers/index-validation-crash-with-buffer-sub-data.html"); }
- public void test_conformance_buffers_index_validation_verifies_too_many_indices_html() throws Exception { doTest("tests/conformance/buffers/index-validation-verifies-too-many-indices.html"); }
- public void test_conformance_buffers_index_validation_with_resized_buffer_html() throws Exception { doTest("tests/conformance/buffers/index-validation-with-resized-buffer.html"); }
- public void test_conformance_buffers_index_validation_html() throws Exception { doTest("tests/conformance/buffers/index-validation.html"); }
- public void test_conformance_canvas_buffer_offscreen_test_html() throws Exception { doTest("tests/conformance/canvas/buffer-offscreen-test.html"); }
- public void test_conformance_canvas_buffer_preserve_test_html() throws Exception { doTest("tests/conformance/canvas/buffer-preserve-test.html"); }
- public void test_conformance_canvas_canvas_test_html() throws Exception { doTest("tests/conformance/canvas/canvas-test.html"); }
- public void test_conformance_canvas_canvas_zero_size_html() throws Exception { doTest("tests/conformance/canvas/canvas-zero-size.html"); }
- public void test_conformance_canvas_drawingbuffer_static_canvas_test_html() throws Exception { doTest("tests/conformance/canvas/drawingbuffer-static-canvas-test.html"); }
- public void test_conformance_canvas_drawingbuffer_test_html() throws Exception { doTest("tests/conformance/canvas/drawingbuffer-test.html"); }
- public void test_conformance_canvas_viewport_unchanged_upon_resize_html() throws Exception { doTest("tests/conformance/canvas/viewport-unchanged-upon-resize.html"); }
- public void test_conformance_context_constants_and_properties_html() throws Exception { doTest("tests/conformance/context/constants-and-properties.html"); }
- public void test_conformance_context_context_attributes_alpha_depth_stencil_antialias_html() throws Exception { doTest("tests/conformance/context/context-attributes-alpha-depth-stencil-antialias.html"); }
- public void test_conformance_context_context_lost_restored_html() throws Exception { doTest("tests/conformance/context/context-lost-restored.html"); }
- public void test_conformance_context_context_lost_html() throws Exception { doTest("tests/conformance/context/context-lost.html"); }
- public void test_conformance_context_context_type_test_html() throws Exception { doTest("tests/conformance/context/context-type-test.html"); }
- public void test_conformance_context_incorrect_context_object_behaviour_html() throws Exception { doTest("tests/conformance/context/incorrect-context-object-behaviour.html"); }
- public void test_conformance_context_methods_html() throws Exception { doTest("tests/conformance/context/methods.html"); }
- public void test_conformance_context_premultiplyalpha_test_html() throws Exception { doTest("tests/conformance/context/premultiplyalpha-test.html"); }
- public void test_conformance_context_resource_sharing_test_html() throws Exception { doTest("tests/conformance/context/resource-sharing-test.html"); }
- public void test_conformance_extensions_oes_standard_derivatives_html() throws Exception { doTest("tests/conformance/extensions/oes-standard-derivatives.html"); }
- public void test_conformance_extensions_oes_texture_float_with_canvas_html() throws Exception { doTest("tests/conformance/extensions/oes-texture-float-with-canvas.html"); }
- public void test_conformance_extensions_oes_texture_float_with_image_data_html() throws Exception { doTest("tests/conformance/extensions/oes-texture-float-with-image-data.html"); }
- public void test_conformance_extensions_oes_texture_float_with_image_html() throws Exception { doTest("tests/conformance/extensions/oes-texture-float-with-image.html"); }
- public void test_conformance_extensions_oes_texture_float_with_video_html() throws Exception { doTest("tests/conformance/extensions/oes-texture-float-with-video.html"); }
- public void test_conformance_extensions_oes_texture_float_html() throws Exception { doTest("tests/conformance/extensions/oes-texture-float.html"); }
- public void test_conformance_extensions_oes_vertex_array_object_html() throws Exception { doTest("tests/conformance/extensions/oes-vertex-array-object.html"); }
- public void test_conformance_extensions_webgl_debug_renderer_info_html() throws Exception { doTest("tests/conformance/extensions/webgl-debug-renderer-info.html"); }
- public void test_conformance_extensions_webgl_debug_shaders_html() throws Exception { doTest("tests/conformance/extensions/webgl-debug-shaders.html"); }
- public void test_conformance_glsl_functions_glsl_function_abs_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-abs.html"); }
- public void test_conformance_glsl_functions_glsl_function_acos_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-acos.html"); }
- public void test_conformance_glsl_functions_glsl_function_asin_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-asin.html"); }
- public void test_conformance_glsl_functions_glsl_function_atan_xy_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-atan-xy.html"); }
- public void test_conformance_glsl_functions_glsl_function_atan_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-atan.html"); }
- public void test_conformance_glsl_functions_glsl_function_ceil_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-ceil.html"); }
- public void test_conformance_glsl_functions_glsl_function_clamp_float_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-clamp-float.html"); }
- public void test_conformance_glsl_functions_glsl_function_clamp_gentype_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-clamp-gentype.html"); }
- public void test_conformance_glsl_functions_glsl_function_cos_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-cos.html"); }
- public void test_conformance_glsl_functions_glsl_function_cross_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-cross.html"); }
- public void test_conformance_glsl_functions_glsl_function_distance_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-distance.html"); }
- public void test_conformance_glsl_functions_glsl_function_dot_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-dot.html"); }
- public void test_conformance_glsl_functions_glsl_function_faceforward_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-faceforward.html"); }
- public void test_conformance_glsl_functions_glsl_function_floor_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-floor.html"); }
- public void test_conformance_glsl_functions_glsl_function_fract_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-fract.html"); }
- public void test_conformance_glsl_functions_glsl_function_length_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-length.html"); }
- public void test_conformance_glsl_functions_glsl_function_max_float_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-max-float.html"); }
- public void test_conformance_glsl_functions_glsl_function_max_gentype_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-max-gentype.html"); }
- public void test_conformance_glsl_functions_glsl_function_min_float_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-min-float.html"); }
- public void test_conformance_glsl_functions_glsl_function_min_gentype_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-min-gentype.html"); }
- public void test_conformance_glsl_functions_glsl_function_mix_float_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-mix-float.html"); }
- public void test_conformance_glsl_functions_glsl_function_mix_gentype_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-mix-gentype.html"); }
- public void test_conformance_glsl_functions_glsl_function_mod_float_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-mod-float.html"); }
- public void test_conformance_glsl_functions_glsl_function_mod_gentype_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-mod-gentype.html"); }
- public void test_conformance_glsl_functions_glsl_function_normalize_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-normalize.html"); }
- public void test_conformance_glsl_functions_glsl_function_reflect_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-reflect.html"); }
- public void test_conformance_glsl_functions_glsl_function_sign_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-sign.html"); }
- public void test_conformance_glsl_functions_glsl_function_sin_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-sin.html"); }
- public void test_conformance_glsl_functions_glsl_function_smoothstep_float_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-smoothstep-float.html"); }
- public void test_conformance_glsl_functions_glsl_function_smoothstep_gentype_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-smoothstep-gentype.html"); }
- public void test_conformance_glsl_functions_glsl_function_step_float_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-step-float.html"); }
- public void test_conformance_glsl_functions_glsl_function_step_gentype_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function-step-gentype.html"); }
- public void test_conformance_glsl_functions_glsl_function_html() throws Exception { doTest("tests/conformance/glsl/functions/glsl-function.html"); }
- public void test_conformance_glsl_implicit_add_int_float_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/add_int_float.vert.html"); }
- public void test_conformance_glsl_implicit_add_int_mat2_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/add_int_mat2.vert.html"); }
- public void test_conformance_glsl_implicit_add_int_mat3_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/add_int_mat3.vert.html"); }
- public void test_conformance_glsl_implicit_add_int_mat4_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/add_int_mat4.vert.html"); }
- public void test_conformance_glsl_implicit_add_int_vec2_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/add_int_vec2.vert.html"); }
- public void test_conformance_glsl_implicit_add_int_vec3_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/add_int_vec3.vert.html"); }
- public void test_conformance_glsl_implicit_add_int_vec4_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/add_int_vec4.vert.html"); }
- public void test_conformance_glsl_implicit_add_ivec2_vec2_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/add_ivec2_vec2.vert.html"); }
- public void test_conformance_glsl_implicit_add_ivec3_vec3_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/add_ivec3_vec3.vert.html"); }
- public void test_conformance_glsl_implicit_add_ivec4_vec4_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/add_ivec4_vec4.vert.html"); }
- public void test_conformance_glsl_implicit_assign_int_to_float_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/assign_int_to_float.vert.html"); }
- public void test_conformance_glsl_implicit_assign_ivec2_to_vec2_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/assign_ivec2_to_vec2.vert.html"); }
- public void test_conformance_glsl_implicit_assign_ivec3_to_vec3_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/assign_ivec3_to_vec3.vert.html"); }
- public void test_conformance_glsl_implicit_assign_ivec4_to_vec4_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/assign_ivec4_to_vec4.vert.html"); }
- public void test_conformance_glsl_implicit_construct_struct_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/construct_struct.vert.html"); }
- public void test_conformance_glsl_implicit_divide_int_float_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/divide_int_float.vert.html"); }
- public void test_conformance_glsl_implicit_divide_int_mat2_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/divide_int_mat2.vert.html"); }
- public void test_conformance_glsl_implicit_divide_int_mat3_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/divide_int_mat3.vert.html"); }
- public void test_conformance_glsl_implicit_divide_int_mat4_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/divide_int_mat4.vert.html"); }
- public void test_conformance_glsl_implicit_divide_int_vec2_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/divide_int_vec2.vert.html"); }
- public void test_conformance_glsl_implicit_divide_int_vec3_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/divide_int_vec3.vert.html"); }
- public void test_conformance_glsl_implicit_divide_int_vec4_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/divide_int_vec4.vert.html"); }
- public void test_conformance_glsl_implicit_divide_ivec2_vec2_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/divide_ivec2_vec2.vert.html"); }
- public void test_conformance_glsl_implicit_divide_ivec3_vec3_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/divide_ivec3_vec3.vert.html"); }
- public void test_conformance_glsl_implicit_divide_ivec4_vec4_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/divide_ivec4_vec4.vert.html"); }
- public void test_conformance_glsl_implicit_equal_int_float_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/equal_int_float.vert.html"); }
- public void test_conformance_glsl_implicit_equal_ivec2_vec2_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/equal_ivec2_vec2.vert.html"); }
- public void test_conformance_glsl_implicit_equal_ivec3_vec3_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/equal_ivec3_vec3.vert.html"); }
- public void test_conformance_glsl_implicit_equal_ivec4_vec4_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/equal_ivec4_vec4.vert.html"); }
- public void test_conformance_glsl_implicit_function_int_float_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/function_int_float.vert.html"); }
- public void test_conformance_glsl_implicit_function_ivec2_vec2_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/function_ivec2_vec2.vert.html"); }
- public void test_conformance_glsl_implicit_function_ivec3_vec3_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/function_ivec3_vec3.vert.html"); }
- public void test_conformance_glsl_implicit_function_ivec4_vec4_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/function_ivec4_vec4.vert.html"); }
- public void test_conformance_glsl_implicit_greater_than_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/greater_than.vert.html"); }
- public void test_conformance_glsl_implicit_greater_than_equal_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/greater_than_equal.vert.html"); }
- public void test_conformance_glsl_implicit_less_than_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/less_than.vert.html"); }
- public void test_conformance_glsl_implicit_less_than_equal_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/less_than_equal.vert.html"); }
- public void test_conformance_glsl_implicit_multiply_int_float_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/multiply_int_float.vert.html"); }
- public void test_conformance_glsl_implicit_multiply_int_mat2_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/multiply_int_mat2.vert.html"); }
- public void test_conformance_glsl_implicit_multiply_int_mat3_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/multiply_int_mat3.vert.html"); }
- public void test_conformance_glsl_implicit_multiply_int_mat4_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/multiply_int_mat4.vert.html"); }
- public void test_conformance_glsl_implicit_multiply_int_vec2_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/multiply_int_vec2.vert.html"); }
- public void test_conformance_glsl_implicit_multiply_int_vec3_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/multiply_int_vec3.vert.html"); }
- public void test_conformance_glsl_implicit_multiply_int_vec4_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/multiply_int_vec4.vert.html"); }
- public void test_conformance_glsl_implicit_multiply_ivec2_vec2_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/multiply_ivec2_vec2.vert.html"); }
- public void test_conformance_glsl_implicit_multiply_ivec3_vec3_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/multiply_ivec3_vec3.vert.html"); }
- public void test_conformance_glsl_implicit_multiply_ivec4_vec4_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/multiply_ivec4_vec4.vert.html"); }
- public void test_conformance_glsl_implicit_not_equal_int_float_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/not_equal_int_float.vert.html"); }
- public void test_conformance_glsl_implicit_not_equal_ivec2_vec2_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/not_equal_ivec2_vec2.vert.html"); }
- public void test_conformance_glsl_implicit_not_equal_ivec3_vec3_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/not_equal_ivec3_vec3.vert.html"); }
- public void test_conformance_glsl_implicit_not_equal_ivec4_vec4_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/not_equal_ivec4_vec4.vert.html"); }
- public void test_conformance_glsl_implicit_subtract_int_float_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/subtract_int_float.vert.html"); }
- public void test_conformance_glsl_implicit_subtract_int_mat2_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/subtract_int_mat2.vert.html"); }
- public void test_conformance_glsl_implicit_subtract_int_mat3_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/subtract_int_mat3.vert.html"); }
- public void test_conformance_glsl_implicit_subtract_int_mat4_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/subtract_int_mat4.vert.html"); }
- public void test_conformance_glsl_implicit_subtract_int_vec2_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/subtract_int_vec2.vert.html"); }
- public void test_conformance_glsl_implicit_subtract_int_vec3_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/subtract_int_vec3.vert.html"); }
- public void test_conformance_glsl_implicit_subtract_int_vec4_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/subtract_int_vec4.vert.html"); }
- public void test_conformance_glsl_implicit_subtract_ivec2_vec2_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/subtract_ivec2_vec2.vert.html"); }
- public void test_conformance_glsl_implicit_subtract_ivec3_vec3_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/subtract_ivec3_vec3.vert.html"); }
- public void test_conformance_glsl_implicit_subtract_ivec4_vec4_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/subtract_ivec4_vec4.vert.html"); }
- public void test_conformance_glsl_implicit_ternary_int_float_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/ternary_int_float.vert.html"); }
- public void test_conformance_glsl_implicit_ternary_ivec2_vec2_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/ternary_ivec2_vec2.vert.html"); }
- public void test_conformance_glsl_implicit_ternary_ivec3_vec3_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/ternary_ivec3_vec3.vert.html"); }
- public void test_conformance_glsl_implicit_ternary_ivec4_vec4_vert_html() throws Exception { doTest("tests/conformance/glsl/implicit/ternary_ivec4_vec4.vert.html"); }
- public void test_conformance_glsl_misc_attrib_location_length_limits_html() throws Exception { doTest("tests/conformance/glsl/misc/attrib-location-length-limits.html"); }
- public void test_conformance_glsl_misc_embedded_struct_definitions_forbidden_html() throws Exception { doTest("tests/conformance/glsl/misc/embedded-struct-definitions-forbidden.html"); }
- public void test_conformance_glsl_misc_empty_main_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/empty_main.vert.html"); }
- public void test_conformance_glsl_misc_gl_position_unset_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/gl_position_unset.vert.html"); }
- public void test_conformance_glsl_misc_glsl_function_nodes_html() throws Exception { doTest("tests/conformance/glsl/misc/glsl-function-nodes.html"); }
- public void test_conformance_glsl_misc_glsl_long_variable_names_html() throws Exception { doTest("tests/conformance/glsl/misc/glsl-long-variable-names.html"); }
- public void test_conformance_glsl_misc_non_ascii_comments_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/non-ascii-comments.vert.html"); }
- public void test_conformance_glsl_misc_non_ascii_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/non-ascii.vert.html"); }
- public void test_conformance_glsl_misc_shader_with_256_character_identifier_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-256-character-identifier.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_257_character_identifier_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-257-character-identifier.frag.html"); }
- public void test_conformance_glsl_misc_shader_with__webgl_identifier_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-_webgl-identifier.vert.html"); }
- public void test_conformance_glsl_misc_shader_with_arbitrary_indexing_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-arbitrary-indexing.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_arbitrary_indexing_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-arbitrary-indexing.vert.html"); }
- public void test_conformance_glsl_misc_shader_with_attrib_array_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-attrib-array.vert.html"); }
- public void test_conformance_glsl_misc_shader_with_attrib_struct_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-attrib-struct.vert.html"); }
- public void test_conformance_glsl_misc_shader_with_clipvertex_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-clipvertex.vert.html"); }
- public void test_conformance_glsl_misc_shader_with_default_precision_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-default-precision.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_default_precision_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-default-precision.vert.html"); }
- public void test_conformance_glsl_misc_shader_with_define_line_continuation_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-define-line-continuation.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_dfdx_no_ext_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-dfdx-no-ext.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_dfdx_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-dfdx.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_error_directive_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-error-directive.html"); }
- public void test_conformance_glsl_misc_shader_with_explicit_int_cast_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-explicit-int-cast.vert.html"); }
- public void test_conformance_glsl_misc_shader_with_float_return_value_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-float-return-value.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_frag_depth_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-frag-depth.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_function_recursion_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-function-recursion.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_glcolor_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-glcolor.vert.html"); }
- public void test_conformance_glsl_misc_shader_with_gles_1_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-gles-1.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_gles_symbol_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-gles-symbol.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_glprojectionmatrix_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-glprojectionmatrix.vert.html"); }
- public void test_conformance_glsl_misc_shader_with_implicit_vec3_to_vec4_cast_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-implicit-vec3-to-vec4-cast.vert.html"); }
- public void test_conformance_glsl_misc_shader_with_include_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-include.vert.html"); }
- public void test_conformance_glsl_misc_shader_with_int_return_value_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-int-return-value.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_invalid_identifier_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-invalid-identifier.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_ivec2_return_value_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-ivec2-return-value.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_ivec3_return_value_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-ivec3-return-value.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_ivec4_return_value_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-ivec4-return-value.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_limited_indexing_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-limited-indexing.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_long_line_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-long-line.html"); }
- public void test_conformance_glsl_misc_shader_with_non_ascii_error_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-non-ascii-error.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_precision_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-precision.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_quoted_error_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-quoted-error.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_undefined_preprocessor_symbol_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-undefined-preprocessor-symbol.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_uniform_in_loop_condition_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-uniform-in-loop-condition.vert.html"); }
- public void test_conformance_glsl_misc_shader_with_vec2_return_value_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-vec2-return-value.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_vec3_return_value_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-vec3-return-value.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_vec4_return_value_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-vec4-return-value.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_version_100_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-version-100.frag.html"); }
- public void test_conformance_glsl_misc_shader_with_version_100_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-version-100.vert.html"); }
- public void test_conformance_glsl_misc_shader_with_version_120_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-version-120.vert.html"); }
- public void test_conformance_glsl_misc_shader_with_version_130_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-version-130.vert.html"); }
- public void test_conformance_glsl_misc_shader_with_webgl_identifier_vert_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-with-webgl-identifier.vert.html"); }
- public void test_conformance_glsl_misc_shader_without_precision_frag_html() throws Exception { doTest("tests/conformance/glsl/misc/shader-without-precision.frag.html"); }
- public void test_conformance_glsl_misc_shared_html() throws Exception { doTest("tests/conformance/glsl/misc/shared.html"); }
- public void test_conformance_glsl_misc_struct_nesting_exceeds_maximum_html() throws Exception { doTest("tests/conformance/glsl/misc/struct-nesting-exceeds-maximum.html"); }
- public void test_conformance_glsl_misc_struct_nesting_under_maximum_html() throws Exception { doTest("tests/conformance/glsl/misc/struct-nesting-under-maximum.html"); }
- public void test_conformance_glsl_misc_uniform_location_length_limits_html() throws Exception { doTest("tests/conformance/glsl/misc/uniform-location-length-limits.html"); }
- public void test_conformance_glsl_reserved__webgl_field_vert_html() throws Exception { doTest("tests/conformance/glsl/reserved/_webgl_field.vert.html"); }
- public void test_conformance_glsl_reserved__webgl_function_vert_html() throws Exception { doTest("tests/conformance/glsl/reserved/_webgl_function.vert.html"); }
- public void test_conformance_glsl_reserved__webgl_struct_vert_html() throws Exception { doTest("tests/conformance/glsl/reserved/_webgl_struct.vert.html"); }
- public void test_conformance_glsl_reserved__webgl_variable_vert_html() throws Exception { doTest("tests/conformance/glsl/reserved/_webgl_variable.vert.html"); }
- public void test_conformance_glsl_reserved_webgl_field_vert_html() throws Exception { doTest("tests/conformance/glsl/reserved/webgl_field.vert.html"); }
- public void test_conformance_glsl_reserved_webgl_function_vert_html() throws Exception { doTest("tests/conformance/glsl/reserved/webgl_function.vert.html"); }
- public void test_conformance_glsl_reserved_webgl_struct_vert_html() throws Exception { doTest("tests/conformance/glsl/reserved/webgl_struct.vert.html"); }
- public void test_conformance_glsl_reserved_webgl_variable_vert_html() throws Exception { doTest("tests/conformance/glsl/reserved/webgl_variable.vert.html"); }
- public void test_conformance_glsl_variables_gl_fragcoord_html() throws Exception { doTest("tests/conformance/glsl/variables/gl-fragcoord.html"); }
- public void test_conformance_glsl_variables_gl_frontfacing_html() throws Exception { doTest("tests/conformance/glsl/variables/gl-frontfacing.html"); }
- public void test_conformance_glsl_variables_gl_pointcoord_html() throws Exception { doTest("tests/conformance/glsl/variables/gl-pointcoord.html"); }
- public void test_conformance_limits_gl_max_texture_dimensions_html() throws Exception { doTest("tests/conformance/limits/gl-max-texture-dimensions.html"); }
- public void test_conformance_limits_gl_min_attribs_html() throws Exception { doTest("tests/conformance/limits/gl-min-attribs.html"); }
- public void test_conformance_limits_gl_min_textures_html() throws Exception { doTest("tests/conformance/limits/gl-min-textures.html"); }
- public void test_conformance_limits_gl_min_uniforms_html() throws Exception { doTest("tests/conformance/limits/gl-min-uniforms.html"); }
- public void test_conformance_misc_bad_arguments_test_html() throws Exception { doTest("tests/conformance/misc/bad-arguments-test.html"); }
- public void test_conformance_misc_error_reporting_html() throws Exception { doTest("tests/conformance/misc/error-reporting.html"); }
- public void test_conformance_misc_functions_returning_strings_html() throws Exception { doTest("tests/conformance/misc/functions-returning-strings.html"); }
- public void test_conformance_misc_instanceof_test_html() throws Exception { doTest("tests/conformance/misc/instanceof-test.html"); }
- public void test_conformance_misc_invalid_passed_params_html() throws Exception { doTest("tests/conformance/misc/invalid-passed-params.html"); }
- public void test_conformance_misc_is_object_html() throws Exception { doTest("tests/conformance/misc/is-object.html"); }
- public void test_conformance_misc_null_object_behaviour_html() throws Exception { doTest("tests/conformance/misc/null-object-behaviour.html"); }
- public void test_conformance_misc_object_deletion_behaviour_html() throws Exception { doTest("tests/conformance/misc/object-deletion-behaviour.html"); }
- public void test_conformance_misc_shader_precision_format_html() throws Exception { doTest("tests/conformance/misc/shader-precision-format.html"); }
- public void test_conformance_misc_type_conversion_test_html() throws Exception { doTest("tests/conformance/misc/type-conversion-test.html"); }
- public void test_conformance_misc_uninitialized_test_html() throws Exception { doTest("tests/conformance/misc/uninitialized-test.html"); }
- public void test_conformance_misc_webgl_specific_html() throws Exception { doTest("tests/conformance/misc/webgl-specific.html"); }
- public void test_conformance_more_conformance_constants_html() throws Exception { doTest("tests/conformance/more/conformance/constants.html"); }
- public void test_conformance_more_conformance_getContext_html() throws Exception { doTest("tests/conformance/more/conformance/getContext.html"); }
- public void test_conformance_more_conformance_methods_html() throws Exception { doTest("tests/conformance/more/conformance/methods.html"); }
- public void test_conformance_more_conformance_quickCheckAPI_A_html() throws Exception { doTest("tests/conformance/more/conformance/quickCheckAPI-A.html"); }
- public void test_conformance_more_conformance_quickCheckAPI_B1_html() throws Exception { doTest("tests/conformance/more/conformance/quickCheckAPI-B1.html"); }
- public void test_conformance_more_conformance_quickCheckAPI_B2_html() throws Exception { doTest("tests/conformance/more/conformance/quickCheckAPI-B2.html"); }
- public void test_conformance_more_conformance_quickCheckAPI_B3_html() throws Exception { doTest("tests/conformance/more/conformance/quickCheckAPI-B3.html"); }
- public void test_conformance_more_conformance_quickCheckAPI_B4_html() throws Exception { doTest("tests/conformance/more/conformance/quickCheckAPI-B4.html"); }
- public void test_conformance_more_conformance_quickCheckAPI_C_html() throws Exception { doTest("tests/conformance/more/conformance/quickCheckAPI-C.html"); }
- public void test_conformance_more_conformance_quickCheckAPI_D_G_html() throws Exception { doTest("tests/conformance/more/conformance/quickCheckAPI-D_G.html"); }
- public void test_conformance_more_conformance_quickCheckAPI_G_I_html() throws Exception { doTest("tests/conformance/more/conformance/quickCheckAPI-G_I.html"); }
- public void test_conformance_more_conformance_quickCheckAPI_L_S_html() throws Exception { doTest("tests/conformance/more/conformance/quickCheckAPI-L_S.html"); }
- public void test_conformance_more_conformance_quickCheckAPI_S_V_html() throws Exception { doTest("tests/conformance/more/conformance/quickCheckAPI-S_V.html"); }
- public void test_conformance_more_conformance_webGLArrays_html() throws Exception { doTest("tests/conformance/more/conformance/webGLArrays.html"); }
- public void test_conformance_more_functions_bindBuffer_html() throws Exception { doTest("tests/conformance/more/functions/bindBuffer.html"); }
- public void test_conformance_more_functions_bindBufferBadArgs_html() throws Exception { doTest("tests/conformance/more/functions/bindBufferBadArgs.html"); }
- public void test_conformance_more_functions_bindFramebufferLeaveNonZero_html() throws Exception { doTest("tests/conformance/more/functions/bindFramebufferLeaveNonZero.html"); }
- public void test_conformance_more_functions_bufferData_html() throws Exception { doTest("tests/conformance/more/functions/bufferData.html"); }
- public void test_conformance_more_functions_bufferDataBadArgs_html() throws Exception { doTest("tests/conformance/more/functions/bufferDataBadArgs.html"); }
- public void test_conformance_more_functions_bufferSubData_html() throws Exception { doTest("tests/conformance/more/functions/bufferSubData.html"); }
- public void test_conformance_more_functions_bufferSubDataBadArgs_html() throws Exception { doTest("tests/conformance/more/functions/bufferSubDataBadArgs.html"); }
- public void test_conformance_more_functions_copyTexImage2D_html() throws Exception { doTest("tests/conformance/more/functions/copyTexImage2D.html"); }
- public void test_conformance_more_functions_copyTexImage2DBadArgs_html() throws Exception { doTest("tests/conformance/more/functions/copyTexImage2DBadArgs.html"); }
- public void test_conformance_more_functions_copyTexSubImage2D_html() throws Exception { doTest("tests/conformance/more/functions/copyTexSubImage2D.html"); }
- public void test_conformance_more_functions_copyTexSubImage2DBadArgs_html() throws Exception { doTest("tests/conformance/more/functions/copyTexSubImage2DBadArgs.html"); }
- public void test_conformance_more_functions_deleteBufferBadArgs_html() throws Exception { doTest("tests/conformance/more/functions/deleteBufferBadArgs.html"); }
- public void test_conformance_more_functions_drawArrays_html() throws Exception { doTest("tests/conformance/more/functions/drawArrays.html"); }
- public void test_conformance_more_functions_drawArraysOutOfBounds_html() throws Exception { doTest("tests/conformance/more/functions/drawArraysOutOfBounds.html"); }
- public void test_conformance_more_functions_drawElements_html() throws Exception { doTest("tests/conformance/more/functions/drawElements.html"); }
- public void test_conformance_more_functions_drawElementsBadArgs_html() throws Exception { doTest("tests/conformance/more/functions/drawElementsBadArgs.html"); }
- public void test_conformance_more_functions_isTests_html() throws Exception { doTest("tests/conformance/more/functions/isTests.html"); }
- public void test_conformance_more_functions_readPixels_html() throws Exception { doTest("tests/conformance/more/functions/readPixels.html"); }
- public void test_conformance_more_functions_readPixelsBadArgs_html() throws Exception { doTest("tests/conformance/more/functions/readPixelsBadArgs.html"); }
- public void test_conformance_more_functions_texImage2D_html() throws Exception { doTest("tests/conformance/more/functions/texImage2D.html"); }
- public void test_conformance_more_functions_texImage2DBadArgs_html() throws Exception { doTest("tests/conformance/more/functions/texImage2DBadArgs.html"); }
- public void test_conformance_more_functions_texImage2DHTML_html() throws Exception { doTest("tests/conformance/more/functions/texImage2DHTML.html"); }
- public void test_conformance_more_functions_texImage2DHTMLBadArgs_html() throws Exception { doTest("tests/conformance/more/functions/texImage2DHTMLBadArgs.html"); }
- public void test_conformance_more_functions_texSubImage2D_html() throws Exception { doTest("tests/conformance/more/functions/texSubImage2D.html"); }
- public void test_conformance_more_functions_texSubImage2DBadArgs_html() throws Exception { doTest("tests/conformance/more/functions/texSubImage2DBadArgs.html"); }
- public void test_conformance_more_functions_texSubImage2DHTML_html() throws Exception { doTest("tests/conformance/more/functions/texSubImage2DHTML.html"); }
- public void test_conformance_more_functions_texSubImage2DHTMLBadArgs_html() throws Exception { doTest("tests/conformance/more/functions/texSubImage2DHTMLBadArgs.html"); }
- public void test_conformance_more_functions_uniformMatrix_html() throws Exception { doTest("tests/conformance/more/functions/uniformMatrix.html"); }
- public void test_conformance_more_functions_uniformMatrixBadArgs_html() throws Exception { doTest("tests/conformance/more/functions/uniformMatrixBadArgs.html"); }
- public void test_conformance_more_functions_uniformf_html() throws Exception { doTest("tests/conformance/more/functions/uniformf.html"); }
- public void test_conformance_more_functions_uniformfArrayLen1_html() throws Exception { doTest("tests/conformance/more/functions/uniformfArrayLen1.html"); }
- public void test_conformance_more_functions_uniformfBadArgs_html() throws Exception { doTest("tests/conformance/more/functions/uniformfBadArgs.html"); }
- public void test_conformance_more_functions_uniformi_html() throws Exception { doTest("tests/conformance/more/functions/uniformi.html"); }
- public void test_conformance_more_functions_uniformiBadArgs_html() throws Exception { doTest("tests/conformance/more/functions/uniformiBadArgs.html"); }
- public void test_conformance_more_functions_vertexAttrib_html() throws Exception { doTest("tests/conformance/more/functions/vertexAttrib.html"); }
- public void test_conformance_more_functions_vertexAttribBadArgs_html() throws Exception { doTest("tests/conformance/more/functions/vertexAttribBadArgs.html"); }
- public void test_conformance_more_functions_vertexAttribPointer_html() throws Exception { doTest("tests/conformance/more/functions/vertexAttribPointer.html"); }
- public void test_conformance_more_functions_vertexAttribPointerBadArgs_html() throws Exception { doTest("tests/conformance/more/functions/vertexAttribPointerBadArgs.html"); }
- public void test_conformance_more_glsl_arrayOutOfBounds_html() throws Exception { doTest("tests/conformance/more/glsl/arrayOutOfBounds.html"); }
- public void test_conformance_more_glsl_uniformOutOfBounds_html() throws Exception { doTest("tests/conformance/more/glsl/uniformOutOfBounds.html"); }
- public void test_conformance_programs_get_active_test_html() throws Exception { doTest("tests/conformance/programs/get-active-test.html"); }
- public void test_conformance_programs_gl_bind_attrib_location_test_html() throws Exception { doTest("tests/conformance/programs/gl-bind-attrib-location-test.html"); }
- public void test_conformance_programs_gl_get_active_attribute_html() throws Exception { doTest("tests/conformance/programs/gl-get-active-attribute.html"); }
- public void test_conformance_programs_gl_get_active_uniform_html() throws Exception { doTest("tests/conformance/programs/gl-get-active-uniform.html"); }
- public void test_conformance_programs_gl_getshadersource_html() throws Exception { doTest("tests/conformance/programs/gl-getshadersource.html"); }
- public void test_conformance_programs_gl_shader_test_html() throws Exception { doTest("tests/conformance/programs/gl-shader-test.html"); }
- public void test_conformance_programs_invalid_UTF_16_html() throws Exception { doTest("tests/conformance/programs/invalid-UTF-16.html"); }
- public void test_conformance_programs_program_test_html() throws Exception { doTest("tests/conformance/programs/program-test.html"); }
- public void test_conformance_reading_read_pixels_pack_alignment_html() throws Exception { doTest("tests/conformance/reading/read-pixels-pack-alignment.html"); }
- public void test_conformance_reading_read_pixels_test_html() throws Exception { doTest("tests/conformance/reading/read-pixels-test.html"); }
- public void test_conformance_renderbuffers_framebuffer_object_attachment_html() throws Exception { doTest("tests/conformance/renderbuffers/framebuffer-object-attachment.html"); }
- public void test_conformance_renderbuffers_framebuffer_test_html() throws Exception { doTest("tests/conformance/renderbuffers/framebuffer-test.html"); }
- public void test_conformance_renderbuffers_renderbuffer_initialization_html() throws Exception { doTest("tests/conformance/renderbuffers/renderbuffer-initialization.html"); }
- public void test_conformance_rendering_draw_arrays_out_of_bounds_html() throws Exception { doTest("tests/conformance/rendering/draw-arrays-out-of-bounds.html"); }
- public void test_conformance_rendering_draw_elements_out_of_bounds_html() throws Exception { doTest("tests/conformance/rendering/draw-elements-out-of-bounds.html"); }
- public void test_conformance_rendering_gl_clear_html() throws Exception { doTest("tests/conformance/rendering/gl-clear.html"); }
- public void test_conformance_rendering_gl_drawelements_html() throws Exception { doTest("tests/conformance/rendering/gl-drawelements.html"); }
- public void test_conformance_rendering_gl_scissor_test_html() throws Exception { doTest("tests/conformance/rendering/gl-scissor-test.html"); }
- public void test_conformance_rendering_line_loop_tri_fan_html() throws Exception { doTest("tests/conformance/rendering/line-loop-tri-fan.html"); }
- public void test_conformance_rendering_more_than_65536_indices_html() throws Exception { doTest("tests/conformance/rendering/more-than-65536-indices.html"); }
- public void test_conformance_rendering_multisample_corruption_html() throws Exception { doTest("tests/conformance/rendering/multisample-corruption.html"); }
- public void test_conformance_rendering_point_size_html() throws Exception { doTest("tests/conformance/rendering/point-size.html"); }
- public void test_conformance_rendering_triangle_html() throws Exception { doTest("tests/conformance/rendering/triangle.html"); }
- public void test_conformance_state_gl_enable_enum_test_html() throws Exception { doTest("tests/conformance/state/gl-enable-enum-test.html"); }
- public void test_conformance_state_gl_enum_tests_html() throws Exception { doTest("tests/conformance/state/gl-enum-tests.html"); }
- public void test_conformance_state_gl_get_calls_html() throws Exception { doTest("tests/conformance/state/gl-get-calls.html"); }
- public void test_conformance_state_gl_geterror_html() throws Exception { doTest("tests/conformance/state/gl-geterror.html"); }
- public void test_conformance_state_gl_getstring_html() throws Exception { doTest("tests/conformance/state/gl-getstring.html"); }
- public void test_conformance_state_gl_object_get_calls_html() throws Exception { doTest("tests/conformance/state/gl-object-get-calls.html"); }
- public void test_conformance_textures_compressed_tex_image_html() throws Exception { doTest("tests/conformance/textures/compressed-tex-image.html"); }
- public void test_conformance_textures_copy_tex_image_and_sub_image_2d_html() throws Exception { doTest("tests/conformance/textures/copy-tex-image-and-sub-image-2d.html"); }
- public void test_conformance_textures_gl_pixelstorei_html() throws Exception { doTest("tests/conformance/textures/gl-pixelstorei.html"); }
- public void test_conformance_textures_gl_teximage_html() throws Exception { doTest("tests/conformance/textures/gl-teximage.html"); }
- public void test_conformance_textures_origin_clean_conformance_html() throws Exception { doTest("tests/conformance/textures/origin-clean-conformance.html"); }
- public void test_conformance_textures_tex_image_and_sub_image_2d_with_array_buffer_view_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-sub-image-2d-with-array-buffer-view.html"); }
- public void test_conformance_textures_tex_image_and_sub_image_2d_with_canvas_rgb565_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-sub-image-2d-with-canvas-rgb565.html"); }
- public void test_conformance_textures_tex_image_and_sub_image_2d_with_canvas_rgba4444_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-sub-image-2d-with-canvas-rgba4444.html"); }
- public void test_conformance_textures_tex_image_and_sub_image_2d_with_canvas_rgba5551_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-sub-image-2d-with-canvas-rgba5551.html"); }
- public void test_conformance_textures_tex_image_and_sub_image_2d_with_canvas_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-sub-image-2d-with-canvas.html"); }
- public void test_conformance_textures_tex_image_and_sub_image_2d_with_image_data_rgb565_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-sub-image-2d-with-image-data-rgb565.html"); }
- public void test_conformance_textures_tex_image_and_sub_image_2d_with_image_data_rgba4444_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-sub-image-2d-with-image-data-rgba4444.html"); }
- public void test_conformance_textures_tex_image_and_sub_image_2d_with_image_data_rgba5551_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-sub-image-2d-with-image-data-rgba5551.html"); }
- public void test_conformance_textures_tex_image_and_sub_image_2d_with_image_data_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-sub-image-2d-with-image-data.html"); }
- public void test_conformance_textures_tex_image_and_sub_image_2d_with_image_rgb565_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-sub-image-2d-with-image-rgb565.html"); }
- public void test_conformance_textures_tex_image_and_sub_image_2d_with_image_rgba4444_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-sub-image-2d-with-image-rgba4444.html"); }
- public void test_conformance_textures_tex_image_and_sub_image_2d_with_image_rgba5551_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-sub-image-2d-with-image-rgba5551.html"); }
- public void test_conformance_textures_tex_image_and_sub_image_2d_with_image_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-sub-image-2d-with-image.html"); }
- public void test_conformance_textures_tex_image_and_sub_image_2d_with_video_rgb565_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-sub-image-2d-with-video-rgb565.html"); }
- public void test_conformance_textures_tex_image_and_sub_image_2d_with_video_rgba4444_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-sub-image-2d-with-video-rgba4444.html"); }
- public void test_conformance_textures_tex_image_and_sub_image_2d_with_video_rgba5551_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-sub-image-2d-with-video-rgba5551.html"); }
- public void test_conformance_textures_tex_image_and_sub_image_2d_with_video_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-sub-image-2d-with-video.html"); }
- public void test_conformance_textures_tex_image_and_uniform_binding_bugs_html() throws Exception { doTest("tests/conformance/textures/tex-image-and-uniform-binding-bugs.html"); }
- public void test_conformance_textures_tex_image_with_format_and_type_html() throws Exception { doTest("tests/conformance/textures/tex-image-with-format-and-type.html"); }
- public void test_conformance_textures_tex_image_with_invalid_data_html() throws Exception { doTest("tests/conformance/textures/tex-image-with-invalid-data.html"); }
- public void test_conformance_textures_tex_input_validation_html() throws Exception { doTest("tests/conformance/textures/tex-input-validation.html"); }
- public void test_conformance_textures_tex_sub_image_2d_bad_args_html() throws Exception { doTest("tests/conformance/textures/tex-sub-image-2d-bad-args.html"); }
- public void test_conformance_textures_tex_sub_image_2d_html() throws Exception { doTest("tests/conformance/textures/tex-sub-image-2d.html"); }
- public void test_conformance_textures_texparameter_test_html() throws Exception { doTest("tests/conformance/textures/texparameter-test.html"); }
- public void test_conformance_textures_texture_active_bind_2_html() throws Exception { doTest("tests/conformance/textures/texture-active-bind-2.html"); }
- public void test_conformance_textures_texture_active_bind_html() throws Exception { doTest("tests/conformance/textures/texture-active-bind.html"); }
- public void test_conformance_textures_texture_complete_html() throws Exception { doTest("tests/conformance/textures/texture-complete.html"); }
- public void test_conformance_textures_texture_mips_html() throws Exception { doTest("tests/conformance/textures/texture-mips.html"); }
- public void test_conformance_textures_texture_npot_video_html() throws Exception { doTest("tests/conformance/textures/texture-npot-video.html"); }
- public void test_conformance_textures_texture_npot_html() throws Exception { doTest("tests/conformance/textures/texture-npot.html"); }
- public void test_conformance_textures_texture_size_cube_maps_html() throws Exception { doTest("tests/conformance/textures/texture-size-cube-maps.html"); }
- public void test_conformance_textures_texture_size_html() throws Exception { doTest("tests/conformance/textures/texture-size.html"); }
- public void test_conformance_textures_texture_transparent_pixels_initialized_html() throws Exception { doTest("tests/conformance/textures/texture-transparent-pixels-initialized.html"); }
- public void test_conformance_typedarrays_array_buffer_crash_html() throws Exception { doTest("tests/conformance/typedarrays/array-buffer-crash.html"); }
- public void test_conformance_typedarrays_array_buffer_view_crash_html() throws Exception { doTest("tests/conformance/typedarrays/array-buffer-view-crash.html"); }
- public void test_conformance_typedarrays_array_unit_tests_html() throws Exception { doTest("tests/conformance/typedarrays/array-unit-tests.html"); }
- public void test_conformance_typedarrays_data_view_crash_html() throws Exception { doTest("tests/conformance/typedarrays/data-view-crash.html"); }
- public void test_conformance_typedarrays_data_view_test_html() throws Exception { doTest("tests/conformance/typedarrays/data-view-test.html"); }
- public void test_conformance_uniforms_gl_uniform_arrays_html() throws Exception { doTest("tests/conformance/uniforms/gl-uniform-arrays.html"); }
- public void test_conformance_uniforms_gl_uniform_bool_html() throws Exception { doTest("tests/conformance/uniforms/gl-uniform-bool.html"); }
- public void test_conformance_uniforms_gl_uniformmatrix4fv_html() throws Exception { doTest("tests/conformance/uniforms/gl-uniformmatrix4fv.html"); }
- public void test_conformance_uniforms_gl_unknown_uniform_html() throws Exception { doTest("tests/conformance/uniforms/gl-unknown-uniform.html"); }
- public void test_conformance_uniforms_null_uniform_location_html() throws Exception { doTest("tests/conformance/uniforms/null-uniform-location.html"); }
- public void test_conformance_uniforms_uniform_location_html() throws Exception { doTest("tests/conformance/uniforms/uniform-location.html"); }
- public void test_conformance_uniforms_uniform_samplers_test_html() throws Exception { doTest("tests/conformance/uniforms/uniform-samplers-test.html"); }
-}
diff --git a/tests/webgl/src/android/webgl/cts/ZipUtil.java b/tests/webgl/src/android/webgl/cts/ZipUtil.java
deleted file mode 100644
index 4b28e63..0000000
--- a/tests/webgl/src/android/webgl/cts/ZipUtil.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.webgl.cts;
-
-import android.util.Log;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.lang.String;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipInputStream;
-
-
-/**
- * Some boilerplate code to unzip files.
- */
-public class ZipUtil {
- private final static String TAG = "ZipUtil";
-
- /**
- * Stream to a file.
- */
- public static void streamToPath(InputStream is,
- File directory,
- String name) throws Exception {
- File file = new File(directory, name);
- streamToPath(is, file);
- }
-
- public static void streamToPath(InputStream is,
- File file) throws Exception {
- Log.i(TAG, "Streaming to path " + file.getPath());
- OutputStream os = null;
- os = new FileOutputStream(file);
- int count = -1;
- byte[] buffer = new byte[10 * 1024];
- while ((count = is.read(buffer)) != -1) {
- os.write(buffer, 0, count);
- }
- os.close();
- }
-
- /**
- * Unzip to a directory.
- */
- public static void unzipToPath(InputStream is,
- File filePath) throws Exception {
- ZipInputStream zis = new ZipInputStream(is);
- unzipToPath(zis, filePath.getPath());
- }
-
- public static void unzipToPath(ZipInputStream zis,
- String path) throws Exception {
- Log.i(TAG, "Unzipping to path " + path);
- byte[] buffer = new byte[10 * 1024];
- ZipEntry entry;
- while ((entry = zis.getNextEntry()) != null) {
- File entryFile = new File(path, entry.getName());
- if (entry.isDirectory()) {
- if (!entryFile.exists()) {
- entryFile.mkdirs();
- }
- continue;
- }
- if (entryFile.getParentFile() != null &&
- !entryFile.getParentFile().exists()) {
- entryFile.getParentFile().mkdirs();
- }
- if (!entryFile.exists()) {
- entryFile.createNewFile();
- entryFile.setReadable(true);
- entryFile.setExecutable(true);
- }
- streamToPath(zis, entryFile);
- }
- zis.close();
- }
-
- /**
- * Cleanup a directory.
- */
- static public boolean deleteDirectory(String directoryPath) {
- File path = new File(directoryPath);
- return deleteDirectory(path);
- }
-
- static public boolean deleteDirectory(File path) {
- if (path.exists()) {
- File[] files = path.listFiles();
- for(int i = 0; i < files.length; i++) {
- if(files[i].isDirectory()) {
- deleteDirectory(files[i]);
- } else {
- files[i].delete();
- }
- }
- return path.delete();
- }
- return false;
- }
-}
diff --git a/tools/selinux/SELinuxNeverallowTestFrame.py b/tools/selinux/SELinuxNeverallowTestFrame.py
index 5eba5bb..45900de 100644
--- a/tools/selinux/SELinuxNeverallowTestFrame.py
+++ b/tools/selinux/SELinuxNeverallowTestFrame.py
@@ -72,8 +72,7 @@
/* obtain sepolicy file from running device */
devicePolicyFile = File.createTempFile("sepolicy", ".tmp");
devicePolicyFile.deleteOnExit();
- mDevice.executeAdbCommand("pull", "/sys/fs/selinux/policy",
- devicePolicyFile.getAbsolutePath());
+ mDevice.pullFile("/sys/fs/selinux/policy", devicePolicyFile);
}
"""
src_body = ""
diff --git a/tools/tradefed-host/src/com/android/cts/tradefed/testtype/DeqpTestRunner.java b/tools/tradefed-host/src/com/android/cts/tradefed/testtype/DeqpTestRunner.java
index 677bc43..43aaf98 100644
--- a/tools/tradefed-host/src/com/android/cts/tradefed/testtype/DeqpTestRunner.java
+++ b/tools/tradefed-host/src/com/android/cts/tradefed/testtype/DeqpTestRunner.java
@@ -19,6 +19,9 @@
import com.android.tradefed.testtype.IBuildReceiver;
import com.android.tradefed.testtype.IDeviceTest;
import com.android.tradefed.testtype.IRemoteTest;
+import com.android.tradefed.util.IRunUtil;
+import com.android.tradefed.util.RunInterruptedException;
+import com.android.tradefed.util.RunUtil;
import java.io.File;
import java.io.FileNotFoundException;
@@ -49,8 +52,8 @@
private static final String DEQP_ONDEVICE_APK = "com.drawelements.deqp.apk";
private static final String DEQP_ONDEVICE_PKG = "com.drawelements.deqp";
private static final String INCOMPLETE_LOG_MESSAGE = "Crash: Incomplete test log";
- private static final String DEVICE_LOST_MESSAGE = "Crash: Device lost";
private static final String SKIPPED_INSTANCE_LOG_MESSAGE = "Configuration skipped";
+ private static final String NOT_EXECUTABLE_LOG_MESSAGE = "Abort: Test cannot be executed";
private static final String CASE_LIST_FILE_NAME = "/sdcard/dEQP-TestCaseList.txt";
private static final String LOG_FILE_NAME = "/sdcard/TestLog.qpa";
public static final String FEATURE_LANDSCAPE = "android.hardware.screen.landscape";
@@ -67,12 +70,14 @@
private final Collection<TestIdentifier> mRemainingTests;
private final Map<TestIdentifier, Set<BatchRunConfiguration>> mTestInstances;
private final TestInstanceResultListener mInstanceListerner = new TestInstanceResultListener();
+ private final Map<TestIdentifier, Integer> mTestInstabilityRatings;
private IAbi mAbi;
private CtsBuildHelper mCtsBuild;
private boolean mLogData = false;
private ITestDevice mDevice;
private Set<String> mDeviceFeatures;
private Map<String, Boolean> mConfigQuerySupportCache = new HashMap<>();
+ private IRunUtil mRunUtil = RunUtil.getDefault();
private IRecovery mDeviceRecovery = new Recovery();
{
@@ -85,6 +90,7 @@
mName = name;
mRemainingTests = new LinkedList<>(tests); // avoid modifying arguments
mTestInstances = parseTestInstances(tests, testInstances);
+ mTestInstabilityRatings = new HashMap<>();
}
/**
@@ -145,6 +151,15 @@
mDeviceRecovery = deviceRecovery;
}
+ /**
+ * Set IRunUtil.
+ *
+ * Exposed for unit testing.
+ */
+ public void setRunUtil(IRunUtil runUtil) {
+ mRunUtil = runUtil;
+ }
+
private static final class CapabilityQueryFailureException extends Exception {
};
@@ -253,15 +268,15 @@
/**
* Forward result to sink
*/
- private void forwardFinalizedPendingResult() {
- if (mRemainingTests.contains(mCurrentTestId)) {
- final PendingResult result = mPendingResults.get(mCurrentTestId);
+ private void forwardFinalizedPendingResult(TestIdentifier testId) {
+ if (mRemainingTests.contains(testId)) {
+ final PendingResult result = mPendingResults.get(testId);
- mPendingResults.remove(mCurrentTestId);
- mRemainingTests.remove(mCurrentTestId);
+ mPendingResults.remove(testId);
+ mRemainingTests.remove(testId);
// Forward results to the sink
- mSink.testStarted(mCurrentTestId);
+ mSink.testStarted(testId);
// Test Log
if (mLogData) {
@@ -270,9 +285,8 @@
final ByteArrayInputStreamSource source
= new ByteArrayInputStreamSource(entry.getValue().getBytes());
- mSink.testLog(mCurrentTestId.getClassName() + "."
- + mCurrentTestId.getTestName() + "@" + entry.getKey().getId(),
- LogDataType.XML, source);
+ mSink.testLog(testId.getClassName() + "." + testId.getTestName() + "@"
+ + entry.getKey().getId(), LogDataType.XML, source);
source.cancel();
}
@@ -292,11 +306,11 @@
errorLog.append(entry.getValue());
}
- mSink.testFailed(mCurrentTestId, errorLog.toString());
+ mSink.testFailed(testId, errorLog.toString());
}
final Map<String, String> emptyMap = Collections.emptyMap();
- mSink.testEnded(mCurrentTestId, emptyMap);
+ mSink.testEnded(testId, emptyMap);
}
}
@@ -346,10 +360,29 @@
result.errorMessages.put(mRunConfig, SKIPPED_INSTANCE_LOG_MESSAGE);
result.remainingConfigs.remove(mRunConfig);
+ // Pending result finished, report result
if (result.remainingConfigs.isEmpty()) {
- // fake as if we actually run the test
- mCurrentTestId = testId;
- forwardFinalizedPendingResult();
+ forwardFinalizedPendingResult(testId);
+ }
+ }
+
+ /**
+ * Fake failure of an instance with current config
+ */
+ public void abortTest(TestIdentifier testId, String errorMessage) {
+ final PendingResult result = mPendingResults.get(testId);
+
+ // Mark as executed
+ result.allInstancesPassed = false;
+ result.errorMessages.put(mRunConfig, errorMessage);
+ result.remainingConfigs.remove(mRunConfig);
+
+ // Pending result finished, report result
+ if (result.remainingConfigs.isEmpty()) {
+ forwardFinalizedPendingResult(testId);
+ }
+
+ if (testId.equals(mCurrentTestId)) {
mCurrentTestId = null;
}
}
@@ -402,7 +435,7 @@
// Pending result finished, report result
if (result.remainingConfigs.isEmpty()) {
- forwardFinalizedPendingResult();
+ forwardFinalizedPendingResult(mCurrentTestId);
}
} else {
CLog.w("Got unexpected end of %s", mCurrentTestId);
@@ -461,7 +494,7 @@
// Pending result finished, report result
if (result.remainingConfigs.isEmpty()) {
- forwardFinalizedPendingResult();
+ forwardFinalizedPendingResult(mCurrentTestId);
}
} else {
CLog.w("Got unexpected termination of %s", mCurrentTestId);
@@ -506,41 +539,20 @@
}
/**
- * Signal listener that batch ended to flush incomplete results.
+ * Signal listener that batch ended and forget incomplete results.
*/
public void endBatch() {
// end open test if when stream ends
if (mCurrentTestId != null) {
- final Map<String, String> emptyMap = Collections.emptyMap();
- handleEndTestCase(emptyMap);
- }
- }
-
- /**
- * Signal listener that device just died.
- */
- public void onDeviceLost() {
- if (mCurrentTestId != null) {
- final PendingResult result = mPendingResults.get(mCurrentTestId);
-
- if (result == null) {
- CLog.e("Device lost in invalid state: %s", mCurrentTestId);
- return;
+ // Current instance was removed from remainingConfigs when case
+ // started. Mark current instance as pending.
+ if (mPendingResults.get(mCurrentTestId) != null) {
+ mPendingResults.get(mCurrentTestId).remainingConfigs.add(mRunConfig);
+ } else {
+ CLog.w("Got unexpected internal state of %s", mCurrentTestId);
}
-
- // kill current test
- result.allInstancesPassed = false;
- result.errorMessages.put(mRunConfig, DEVICE_LOST_MESSAGE);
-
- if (mLogData && mCurrentTestLog != null && mCurrentTestLog.length() > 0) {
- result.testLogs.put(mRunConfig, mCurrentTestLog);
- }
-
- // finish all pending instances
- result.remainingConfigs.clear();
- forwardFinalizedPendingResult();
- mCurrentTestId = null;
}
+ mCurrentTestId = null;
}
}
@@ -553,6 +565,8 @@
private Map<String, String> mValues;
private String mCurrentName;
private String mCurrentValue;
+ private int mResultCode;
+ private boolean mGotExitValue = false;
public InstrumentationParser(TestInstanceResultListener listener) {
@@ -592,6 +606,13 @@
mCurrentName = line.substring(nameBegin, nameEnd);
mCurrentValue = line.substring(valueBegin);
+ } else if (line.startsWith("INSTRUMENTATION_CODE: ")) {
+ try {
+ mResultCode = Integer.parseInt(line.substring(22));
+ mGotExitValue = true;
+ } catch (NumberFormatException ex) {
+ CLog.w("Instrumentation code format unexpected");
+ }
} else if (mCurrentValue != null) {
mCurrentValue = mCurrentValue + line;
}
@@ -623,6 +644,20 @@
public boolean isCancelled() {
return false;
}
+
+ /**
+ * Returns whether target instrumentation exited normally.
+ */
+ public boolean wasSuccessful() {
+ return mGotExitValue;
+ }
+
+ /**
+ * Returns Instrumentation return code
+ */
+ public int getResultCode() {
+ return mResultCode;
+ }
}
/**
@@ -665,10 +700,16 @@
return false;
}
+ /**
+ * Returns whether target instrumentation exited normally.
+ */
public boolean wasSuccessful() {
return mGotExitValue;
}
+ /**
+ * Returns Instrumentation return code
+ */
public int getResultCode() {
return mResultCode;
}
@@ -740,6 +781,7 @@
*/
public static class Recovery implements IRecovery {
private int RETRY_COOLDOWN_MS = 6000; // 6 seconds
+ private int PROCESS_KILL_WAIT_MS = 1000; // 1 second
private static enum MachineState {
WAIT, // recover by waiting
@@ -752,6 +794,9 @@
private ITestDevice mDevice;
private ISleepProvider mSleepProvider;
+ private static class ProcessKillFailureException extends Exception {
+ }
+
/**
* {@inheritDoc}
*/
@@ -835,6 +880,9 @@
} catch (DeviceNotAvailableException ex) {
// chain forward
recoverComLinkKilled();
+ } catch (ProcessKillFailureException ex) {
+ // chain forward
+ recoverComLinkKilled();
}
break;
@@ -849,6 +897,9 @@
} catch (DeviceNotAvailableException ex) {
// chain forward
recoverComLinkKilled();
+ } catch (ProcessKillFailureException ex) {
+ // chain forward
+ recoverComLinkKilled();
}
break;
@@ -876,7 +927,8 @@
mSleepProvider.sleep(RETRY_COOLDOWN_MS);
}
- private void killDeqpProcess() throws DeviceNotAvailableException {
+ private Iterable<Integer> getDeqpProcessPids() throws DeviceNotAvailableException {
+ final List<Integer> pids = new ArrayList<Integer>(2);
final String processes = mDevice.executeShellCommand("ps | grep com.drawelements");
final String[] lines = processes.split("(\\r|\\n)+");
for (String line : lines) {
@@ -885,15 +937,29 @@
continue;
}
- final int processId;
try {
- processId = Integer.parseInt(fields[1], 10);
+ final int processId = Integer.parseInt(fields[1], 10);
+ pids.add(processId);
} catch (NumberFormatException ex) {
continue;
}
+ }
+ return pids;
+ }
+ private void killDeqpProcess() throws DeviceNotAvailableException,
+ ProcessKillFailureException {
+ for (Integer processId : getDeqpProcessPids()) {
mDevice.executeShellCommand(String.format("kill -9 %d", processId));
}
+
+ mSleepProvider.sleep(PROCESS_KILL_WAIT_MS);
+
+ // check that processes actually died
+ if (getDeqpProcessPids().iterator().hasNext()) {
+ // a process is still alive, killing failed
+ throw new ProcessKillFailureException();
+ }
}
public void recoverDevice() throws DeviceNotAvailableException {
@@ -1068,83 +1134,180 @@
return generateTestCaseTrieFromPaths(testPaths);
}
- /**
- * Executes tests on the device.
- */
- private void runTests() throws DeviceNotAvailableException, CapabilityQueryFailureException {
- mDeviceRecovery.setDevice(mDevice);
+ private static class TestBatch {
+ public BatchRunConfiguration config;
+ public List<TestIdentifier> tests;
+ }
- while (!mRemainingTests.isEmpty()) {
- // select tests for the batch
- final ArrayList<TestIdentifier> batchTests = new ArrayList<>(TESTCASE_BATCH_LIMIT);
- for (TestIdentifier test : mRemainingTests) {
- batchTests.add(test);
- if (batchTests.size() >= TESTCASE_BATCH_LIMIT) {
+ private TestBatch selectRunBatch() {
+ return selectRunBatch(mRemainingTests, null);
+ }
+
+ /**
+ * Creates a TestBatch from the given tests or null if not tests remaining.
+ *
+ * @param pool List of tests to select from
+ * @param requiredConfig Select only instances with pending requiredConfig, or null to select
+ * any run configuration.
+ */
+ private TestBatch selectRunBatch(Collection<TestIdentifier> pool,
+ BatchRunConfiguration requiredConfig) {
+ // select one test (leading test) that is going to be executed and then pack along as many
+ // other compatible instances as possible.
+
+ TestIdentifier leadingTest = null;
+ for (TestIdentifier test : pool) {
+ if (!mRemainingTests.contains(test)) {
+ continue;
+ }
+ if (requiredConfig != null &&
+ !mInstanceListerner.isPendingTestInstance(test, requiredConfig)) {
+ continue;
+ }
+ leadingTest = test;
+ break;
+ }
+
+ // no remaining tests?
+ if (leadingTest == null) {
+ return null;
+ }
+
+ BatchRunConfiguration leadingTestConfig = null;
+ if (requiredConfig != null) {
+ leadingTestConfig = requiredConfig;
+ } else {
+ for (BatchRunConfiguration runConfig : getTestRunConfigs(leadingTest)) {
+ if (mInstanceListerner.isPendingTestInstance(leadingTest, runConfig)) {
+ leadingTestConfig = runConfig;
break;
}
}
+ }
- // find union of all run configurations
- final Set<BatchRunConfiguration> allConfigs = new LinkedHashSet<>();
- for (TestIdentifier test : batchTests) {
- allConfigs.addAll(getTestRunConfigs(test));
+ // test pending <=> test has a pending config
+ if (leadingTestConfig == null) {
+ throw new AssertionError("search postcondition failed");
+ }
+
+ final int leadingInstability = getTestInstabilityRating(leadingTest);
+
+ final TestBatch runBatch = new TestBatch();
+ runBatch.config = leadingTestConfig;
+ runBatch.tests = new ArrayList<>();
+ runBatch.tests.add(leadingTest);
+
+ for (TestIdentifier test : pool) {
+ if (test == leadingTest) {
+ // do not re-select the leading tests
+ continue;
}
-
- // prepare instance listener
- for (TestIdentifier test : batchTests) {
- mInstanceListerner.setTestInstances(test, getTestRunConfigs(test));
+ if (!mInstanceListerner.isPendingTestInstance(test, leadingTestConfig)) {
+ // select only compatible
+ continue;
}
-
- // run batch for all configurations
- for (BatchRunConfiguration runConfig : allConfigs) {
- final ArrayList<TestIdentifier> relevantTests =
- new ArrayList<>(TESTCASE_BATCH_LIMIT);
-
- // run only for declared run configs and only if test has not already
- // been attempted to run
- for (TestIdentifier test : batchTests) {
- if (mInstanceListerner.isPendingTestInstance(test, runConfig)) {
- relevantTests.add(test);
- }
- }
-
- if (!relevantTests.isEmpty()) {
- runTestRunBatch(relevantTests, runConfig);
- }
+ if (getTestInstabilityRating(test) != leadingInstability) {
+ // pack along only cases in the same stability category. Packing more dangerous
+ // tests along jeopardizes the stability of this run. Packing more stable tests
+ // along jeopardizes their stability rating.
+ continue;
}
+ if (runBatch.tests.size() >= getBatchSizeLimitForInstability(leadingInstability)) {
+ // batch size is limited.
+ break;
+ }
+ runBatch.tests.add(test);
+ }
+
+ return runBatch;
+ }
+
+ private int getBatchNumPendingCases(TestBatch batch) {
+ int numPending = 0;
+ for (TestIdentifier test : batch.tests) {
+ if (mInstanceListerner.isPendingTestInstance(test, batch.config)) {
+ ++numPending;
+ }
+ }
+ return numPending;
+ }
+
+ private int getBatchSizeLimitForInstability(int batchInstabilityRating) {
+ // reduce group size exponentially down to one
+ return Math.max(1, TESTCASE_BATCH_LIMIT / (1 << batchInstabilityRating));
+ }
+
+ private int getTestInstabilityRating(TestIdentifier testId) {
+ if (mTestInstabilityRatings.containsKey(testId)) {
+ return mTestInstabilityRatings.get(testId);
+ } else {
+ return 0;
}
}
- private void runTestRunBatch(Collection<TestIdentifier> tests, BatchRunConfiguration runConfig)
- throws DeviceNotAvailableException, CapabilityQueryFailureException {
- boolean isSupportedConfig = true;
+ private void recordTestInstability(TestIdentifier testId) {
+ mTestInstabilityRatings.put(testId, getTestInstabilityRating(testId) + 1);
+ }
+ private void clearTestInstability(TestIdentifier testId) {
+ mTestInstabilityRatings.put(testId, 0);
+ }
+
+ /**
+ * Executes all tests on the device.
+ */
+ private void runTests() throws DeviceNotAvailableException, CapabilityQueryFailureException {
+ for (;;) {
+ TestBatch batch = selectRunBatch();
+
+ if (batch == null) {
+ break;
+ }
+
+ runTestRunBatch(batch);
+ }
+ }
+
+ /**
+ * Runs a TestBatch by either faking it or executing it on a device.
+ */
+ private void runTestRunBatch(TestBatch batch) throws DeviceNotAvailableException,
+ CapabilityQueryFailureException {
+ // prepare instance listener
+ mInstanceListerner.setCurrentConfig(batch.config);
+ for (TestIdentifier test : batch.tests) {
+ mInstanceListerner.setTestInstances(test, getTestRunConfigs(test));
+ }
+
+ // execute only if config is executable, else fake results
+ if (isSupportedRunConfiguration(batch.config)) {
+ executeTestRunBatch(batch);
+ } else {
+ fakePassTestRunBatch(batch);
+ }
+ }
+
+ private boolean isSupportedRunConfiguration(BatchRunConfiguration runConfig)
+ throws DeviceNotAvailableException, CapabilityQueryFailureException {
// orientation support
if (!BatchRunConfiguration.ROTATION_UNSPECIFIED.equals(runConfig.getRotation())) {
final Set<String> features = getDeviceFeatures(mDevice);
if (isPortraitClassRotation(runConfig.getRotation()) &&
!features.contains(FEATURE_PORTRAIT)) {
- isSupportedConfig = false;
+ return false;
}
if (isLandscapeClassRotation(runConfig.getRotation()) &&
!features.contains(FEATURE_LANDSCAPE)) {
- isSupportedConfig = false;
+ return false;
}
}
- // renderability support for OpenGL ES tests
- if (isSupportedConfig && isOpenGlEsPackage()) {
- isSupportedConfig = isSupportedGlesRenderConfig(runConfig);
- }
-
- mInstanceListerner.setCurrentConfig(runConfig);
-
- // execute only if config is executable, else fake results
- if (isSupportedConfig) {
- executeTestRunBatch(tests, runConfig);
+ if (isOpenGlEsPackage()) {
+ // renderability support for OpenGL ES tests
+ return isSupportedGlesRenderConfig(runConfig);
} else {
- fakePassTestRunBatch(tests, runConfig);
+ return true;
}
}
@@ -1186,9 +1349,68 @@
}
}
- private void executeTestRunBatch(Collection<TestIdentifier> tests,
- BatchRunConfiguration runConfig) throws DeviceNotAvailableException {
- final String testCases = generateTestCaseTrie(tests);
+ /**
+ * Executes given test batch on a device
+ */
+ private void executeTestRunBatch(TestBatch batch) throws DeviceNotAvailableException {
+ // attempt full run once
+ executeTestRunBatchRun(batch);
+
+ // split remaining tests to two sub batches and execute both. This will terminate
+ // since executeTestRunBatchRun will always progress for a batch of size 1.
+ final ArrayList<TestIdentifier> pendingTests = new ArrayList<>();
+
+ for (TestIdentifier test : batch.tests) {
+ if (mInstanceListerner.isPendingTestInstance(test, batch.config)) {
+ pendingTests.add(test);
+ }
+ }
+
+ final int divisorNdx = pendingTests.size() / 2;
+ final List<TestIdentifier> headList = pendingTests.subList(0, divisorNdx);
+ final List<TestIdentifier> tailList = pendingTests.subList(divisorNdx, pendingTests.size());
+
+ // head
+ for (;;) {
+ TestBatch subBatch = selectRunBatch(headList, batch.config);
+
+ if (subBatch == null) {
+ break;
+ }
+
+ executeTestRunBatch(subBatch);
+ }
+
+ // tail
+ for (;;) {
+ TestBatch subBatch = selectRunBatch(tailList, batch.config);
+
+ if (subBatch == null) {
+ break;
+ }
+
+ executeTestRunBatch(subBatch);
+ }
+
+ if (getBatchNumPendingCases(batch) != 0) {
+ throw new AssertionError("executeTestRunBatch postcondition failed");
+ }
+ }
+
+ /**
+ * Runs one execution pass over the given batch.
+ *
+ * Tries to run the batch. Always makes progress (executes instances or modifies stability
+ * scores).
+ */
+ private void executeTestRunBatchRun(TestBatch batch) throws DeviceNotAvailableException {
+ if (getBatchNumPendingCases(batch) != batch.tests.size()) {
+ throw new AssertionError("executeTestRunBatchRun precondition failed");
+ }
+
+ checkInterrupted(); // throws if interrupted
+
+ final String testCases = generateTestCaseTrie(batch.tests);
mDevice.executeShellCommand("rm " + CASE_LIST_FILE_NAME);
mDevice.executeShellCommand("rm " + LOG_FILE_NAME);
@@ -1201,7 +1423,7 @@
deqpCmdLine.append("--deqp-caselist-file=");
deqpCmdLine.append(CASE_LIST_FILE_NAME);
deqpCmdLine.append(" ");
- deqpCmdLine.append(getRunConfigDisplayCmdLine(runConfig));
+ deqpCmdLine.append(getRunConfigDisplayCmdLine(batch.config));
// If we are not logging data, do not bother outputting the images from the test exe.
if (!mLogData) {
@@ -1228,40 +1450,88 @@
parser.flush();
}
- try {
- final boolean progressedSinceLastCall =
- mInstanceListerner.getCurrentTestId() != null ||
- getNumRemainingInstances() < numRemainingInstancesBefore;
+ final boolean progressedSinceLastCall = mInstanceListerner.getCurrentTestId() != null ||
+ getNumRemainingInstances() < numRemainingInstancesBefore;
- if (progressedSinceLastCall) {
- mDeviceRecovery.onExecutionProgressed();
- }
+ if (progressedSinceLastCall) {
+ mDeviceRecovery.onExecutionProgressed();
+ }
- if (interruptingError == null) {
- // execution finished successfully, do nothing
- } else if (interruptingError instanceof AdbComLinkOpenError) {
+ // interrupted, try to recover
+ if (interruptingError != null) {
+ if (interruptingError instanceof AdbComLinkOpenError) {
mDeviceRecovery.recoverConnectionRefused();
} else if (interruptingError instanceof AdbComLinkKilledError) {
mDeviceRecovery.recoverComLinkKilled();
+ } else if (interruptingError instanceof RunInterruptedException) {
+ // external run interruption request. Terminate immediately.
+ throw (RunInterruptedException)interruptingError;
} else {
CLog.e(interruptingError);
throw new RuntimeException(interruptingError);
}
- } catch (DeviceNotAvailableException ex) {
- // Device lost. We must signal the tradedef by rethrowing this execption. However,
- // there is a possiblity that the device loss was caused by the currently run test
- // instance. Since CtsTest is unaware of tests with only some instances executed,
- // continuing the session after device has recovered will create a new DeqpTestRunner
- // with current test in its run queue and this will cause the re-execution of this same
- // instance. If the instance reliably can kill the device, the CTS cannot recover.
- //
- // Prevent this by terminating ALL instances of a tests if any of them causes a device
- // loss.
- mInstanceListerner.onDeviceLost();
- throw ex;
- } finally {
- mInstanceListerner.endBatch();
+
+ // recoverXXX did not throw => recovery succeeded
+ } else if (!parser.wasSuccessful()) {
+ mDeviceRecovery.recoverComLinkKilled();
+ // recoverXXX did not throw => recovery succeeded
}
+
+ // Progress guarantees.
+ if (batch.tests.size() == 1) {
+ final TestIdentifier onlyTest = batch.tests.iterator().next();
+ final boolean wasTestExecuted =
+ !mInstanceListerner.isPendingTestInstance(onlyTest, batch.config) &&
+ mInstanceListerner.getCurrentTestId() == null;
+ final boolean wasLinkFailure = !parser.wasSuccessful() || interruptingError != null;
+
+ // Link failures can be caused by external events, require at least two observations
+ // until bailing.
+ if (!wasTestExecuted && (!wasLinkFailure || getTestInstabilityRating(onlyTest) > 0)) {
+ recordTestInstability(onlyTest);
+ // If we cannot finish the test, mark the case as a crash.
+ //
+ // If we couldn't even start the test, fail the test instance as non-executable.
+ // This is required so that a consistently crashing or non-existent tests will
+ // not cause futile (non-terminating) re-execution attempts.
+ if (mInstanceListerner.getCurrentTestId() != null) {
+ mInstanceListerner.abortTest(onlyTest, INCOMPLETE_LOG_MESSAGE);
+ } else {
+ mInstanceListerner.abortTest(onlyTest, NOT_EXECUTABLE_LOG_MESSAGE);
+ }
+ } else if (wasTestExecuted) {
+ clearTestInstability(onlyTest);
+ }
+ }
+ else
+ {
+ // Analyze results to update test stability ratings. If there is no interrupting test
+ // logged, increase instability rating of all remaining tests. If there is a
+ // interrupting test logged, increase only its instability rating.
+ //
+ // A successful run of tests clears instability rating.
+ if (mInstanceListerner.getCurrentTestId() == null) {
+ for (TestIdentifier test : batch.tests) {
+ if (mInstanceListerner.isPendingTestInstance(test, batch.config)) {
+ recordTestInstability(test);
+ } else {
+ clearTestInstability(test);
+ }
+ }
+ } else {
+ recordTestInstability(mInstanceListerner.getCurrentTestId());
+ for (TestIdentifier test : batch.tests) {
+ // \note: isPendingTestInstance is false for getCurrentTestId. Current ID is
+ // considered 'running' and will be restored to 'pending' in endBatch().
+ if (!test.equals(mInstanceListerner.getCurrentTestId()) &&
+ !mInstanceListerner.isPendingTestInstance(test, batch.config)) {
+ clearTestInstability(test);
+ }
+ }
+ }
+ }
+
+ mInstanceListerner.endBatch();
}
private static String getRunConfigDisplayCmdLine(BatchRunConfiguration runConfig) {
@@ -1303,13 +1573,21 @@
}
/**
+ * Checks if this execution has been marked as interrupted and throws if it has.
+ */
+ private void checkInterrupted() throws RunInterruptedException {
+ // Work around the API. RunUtil::checkInterrupted is private but we can call it indirectly
+ // by sleeping a value <= 0.
+ mRunUtil.sleep(0);
+ }
+
+ /**
* Pass given batch tests without running it
*/
- private void fakePassTestRunBatch(Collection<TestIdentifier> tests,
- BatchRunConfiguration runConfig) {
- for (TestIdentifier test : tests) {
+ private void fakePassTestRunBatch(TestBatch batch) {
+ for (TestIdentifier test : batch.tests) {
CLog.d("Skipping test '%s' invocation in config '%s'", test.toString(),
- runConfig.getId());
+ batch.config.getId());
mInstanceListerner.skipTest(test);
}
}
@@ -1543,6 +1821,7 @@
installTestApk();
mInstanceListerner.setSink(listener);
+ mDeviceRecovery.setDevice(mDevice);
runTests();
uninstallTestApk();
diff --git a/tools/tradefed-host/tests/src/com/android/cts/tradefed/testtype/DeqpTestRunnerTest.java b/tools/tradefed-host/tests/src/com/android/cts/tradefed/testtype/DeqpTestRunnerTest.java
index 5a05049..7ec09c9 100644
--- a/tools/tradefed-host/tests/src/com/android/cts/tradefed/testtype/DeqpTestRunnerTest.java
+++ b/tools/tradefed-host/tests/src/com/android/cts/tradefed/testtype/DeqpTestRunnerTest.java
@@ -27,6 +27,8 @@
import com.android.tradefed.device.ITestDevice;
import com.android.tradefed.result.ITestInvocationListener;
import com.android.tradefed.testtype.IAbi;
+import com.android.tradefed.util.IRunUtil;
+import com.android.tradefed.util.RunInterruptedException;
import junit.framework.TestCase;
@@ -71,6 +73,43 @@
DEFAULT_INSTANCE_ARGS.iterator().next().put("surfacetype", "window");
}
+ private static class StubRecovery implements DeqpTestRunner.IRecovery {
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void setSleepProvider(DeqpTestRunner.ISleepProvider sleepProvider) {
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void setDevice(ITestDevice device) {
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void onExecutionProgressed() {
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void recoverConnectionRefused() throws DeviceNotAvailableException {
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void recoverComLinkKilled() throws DeviceNotAvailableException {
+ }
+ };
+
/**
* {@inheritDoc}
*/
@@ -512,6 +551,125 @@
}
/**
+ * Test running a unexecutable test.
+ */
+ public void testRun_unexecutableTests() throws Exception {
+ final String instrumentationAnswerNoExecs =
+ "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseName\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=2014.x\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseId\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=0xcafebabe\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=targetName\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=android\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginSession\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=EndSession\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_CODE: 0\r\n";
+
+ final TestIdentifier[] testIds = {
+ new TestIdentifier("dEQP-GLES3.missing", "no"),
+ new TestIdentifier("dEQP-GLES3.missing", "nope"),
+ new TestIdentifier("dEQP-GLES3.missing", "donotwant"),
+ };
+
+ final String[] testPaths = {
+ "dEQP-GLES3.missing.no",
+ "dEQP-GLES3.missing.nope",
+ "dEQP-GLES3.missing.donotwant",
+ };
+
+ ITestDevice mockDevice = EasyMock.createMock(ITestDevice.class);
+ ITestInvocationListener mockListener
+ = EasyMock.createStrictMock(ITestInvocationListener.class);
+ IDevice mockIDevice = EasyMock.createMock(IDevice.class);
+
+ Collection<TestIdentifier> tests = new ArrayList<TestIdentifier>();
+ Map<TestIdentifier, List<Map<String, String>>> instances = new HashMap<>();
+
+ for (TestIdentifier id : testIds) {
+ tests.add(id);
+ instances.put(id, DEFAULT_INSTANCE_ARGS);
+ }
+
+ DeqpTestRunner deqpTest = new DeqpTestRunner(NAME, NAME, tests, instances);
+ deqpTest.setAbi(UnitTests.ABI);
+
+ int version = 3 << 16;
+ EasyMock.expect(mockDevice.getProperty("ro.opengles.version"))
+ .andReturn(Integer.toString(version)).atLeastOnce();
+
+ EasyMock.expect(mockDevice.uninstallPackage(EasyMock.eq(DEQP_ONDEVICE_PKG))).andReturn("")
+ .once();
+ EasyMock.expect(mockDevice.installPackage(EasyMock.<File>anyObject(),
+ EasyMock.eq(true), EasyMock.eq(AbiUtils.createAbiFlag(UnitTests.ABI.getName()))))
+ .andReturn(null).once();
+
+ expectRenderConfigQuery(mockDevice, 3, 0);
+
+ String commandLine = String.format(
+ "--deqp-caselist-file=%s --deqp-gl-config-name=rgba8888d24s8 "
+ + "--deqp-screen-rotation=unspecified "
+ + "--deqp-surface-type=window "
+ + "--deqp-log-images=disable "
+ + "--deqp-watchdog=enable",
+ CASE_LIST_FILE_NAME);
+
+ // first try
+ runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+ "{dEQP-GLES3{missing{no,nope,donotwant}}}", commandLine, instrumentationAnswerNoExecs);
+
+ // splitting begins
+ runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+ "{dEQP-GLES3{missing{no}}}", commandLine, instrumentationAnswerNoExecs);
+ runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+ "{dEQP-GLES3{missing{nope,donotwant}}}", commandLine, instrumentationAnswerNoExecs);
+ runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+ "{dEQP-GLES3{missing{nope}}}", commandLine, instrumentationAnswerNoExecs);
+ runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+ "{dEQP-GLES3{missing{donotwant}}}", commandLine, instrumentationAnswerNoExecs);
+
+ mockListener.testRunStarted(ID, testPaths.length);
+ EasyMock.expectLastCall().once();
+
+ for (int i = 0; i < testPaths.length; i++) {
+ mockListener.testStarted(EasyMock.eq(testIds[i]));
+ EasyMock.expectLastCall().once();
+
+ mockListener.testFailed(EasyMock.eq(testIds[i]),
+ EasyMock.eq("=== with config {glformat=rgba8888d24s8,rotation=unspecified,surfacetype=window} ===\n"
+ + "Abort: Test cannot be executed"));
+ EasyMock.expectLastCall().once();
+
+ mockListener.testEnded(EasyMock.eq(testIds[i]),
+ EasyMock.<Map<String, String>>notNull());
+ EasyMock.expectLastCall().once();
+ }
+
+ mockListener.testRunEnded(EasyMock.anyLong(), EasyMock.<Map<String, String>>notNull());
+ EasyMock.expectLastCall().once();
+
+ EasyMock.expect(mockDevice.uninstallPackage(EasyMock.eq(DEQP_ONDEVICE_PKG))).andReturn("")
+ .once();
+
+ EasyMock.replay(mockDevice, mockIDevice);
+ EasyMock.replay(mockListener);
+
+ deqpTest.setDevice(mockDevice);
+ deqpTest.setBuildHelper(new StubCtsBuildHelper());
+ deqpTest.run(mockListener);
+
+ EasyMock.verify(mockListener);
+ EasyMock.verify(mockDevice, mockIDevice);
+ }
+
+ /**
* Test that test are left unexecuted if pm list query fails
*/
public void testRun_queryPmListFailure()
@@ -1011,7 +1169,7 @@
* Test dEQP with multiple instances
*/
public void testRun_multipleInstances() throws Exception {
- final String instrumentationAnswerConfigA =
+ final String instrumentationAnswerConfigAPass1 =
"INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseName\r\n"
+ "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=2014.x\r\n"
@@ -1042,10 +1200,30 @@
+ "INSTRUMENTATION_STATUS: dEQP-TestCaseResult-Details=Pass\r\n"
+ "INSTRUMENTATION_STATUS: dEQP-EventType=TestCaseResult\r\n"
+ "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=EndTestCase\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ "INSTRUMENTATION_STATUS: dEQP-EventType=BeginTestCase\r\n"
+ "INSTRUMENTATION_STATUS: dEQP-BeginTestCase-TestCasePath=dEQP-GLES3.instances.crashtwo\r\n"
+ "INSTRUMENTATION_STATUS_CODE: 0\r\n"; // early eof
- final String instrumentationAnswerConfigB =
+ final String instrumentationAnswerConfigAPass2 =
+ "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseName\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=2014.x\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseId\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=0xcafebabe\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=targetName\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=android\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginSession\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginTestCase\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-BeginTestCase-TestCasePath=dEQP-GLES3.instances.crashtwo\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"; // early eof
+ final String instrumentationAnswerConfigBPass1 =
"INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseName\r\n"
+ "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=2014.x\r\n"
@@ -1070,12 +1248,6 @@
+ "INSTRUMENTATION_STATUS: dEQP-EventType=EndTestCase\r\n"
+ "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ "INSTRUMENTATION_STATUS: dEQP-EventType=BeginTestCase\r\n"
- + "INSTRUMENTATION_STATUS: dEQP-BeginTestCase-TestCasePath=dEQP-GLES3.instances.crashtwo\r\n"
- + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
- + "INSTRUMENTATION_STATUS: dEQP-TerminateTestCase-Reason=Magic\r\n"
- + "INSTRUMENTATION_STATUS: dEQP-EventType=TerminateTestCase\r\n"
- + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
- + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginTestCase\r\n"
+ "INSTRUMENTATION_STATUS: dEQP-BeginTestCase-TestCasePath=dEQP-GLES3.instances.skipone\r\n"
+ "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ "INSTRUMENTATION_STATUS: dEQP-TestCaseResult-Code=Pass\r\n"
@@ -1087,7 +1259,31 @@
+ "INSTRUMENTATION_STATUS: dEQP-EventType=EndSession\r\n"
+ "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ "INSTRUMENTATION_CODE: 0\r\n";
- final String instrumentationAnswerConfigC =
+ final String instrumentationAnswerConfigBPass2 =
+ "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseName\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=2014.x\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseId\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=0xcafebabe\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=targetName\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=android\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginSession\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginTestCase\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-BeginTestCase-TestCasePath=dEQP-GLES3.instances.crashtwo\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-TerminateTestCase-Reason=Magic\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=TerminateTestCase\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=EndSession\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_CODE: 0\r\n";
+ final String instrumentationAnswerConfigCPass1 =
"INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseName\r\n"
+ "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=2014.x\r\n"
@@ -1111,6 +1307,24 @@
+ "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ "INSTRUMENTATION_STATUS: dEQP-EventType=EndTestCase\r\n"
+ "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=EndSession\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_CODE: 0\r\n";
+ final String instrumentationAnswerConfigCPass2 =
+ "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseName\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=2014.x\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseId\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=0xcafebabe\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=targetName\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=android\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginSession\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ "INSTRUMENTATION_STATUS: dEQP-EventType=BeginTestCase\r\n"
+ "INSTRUMENTATION_STATUS: dEQP-BeginTestCase-TestCasePath=dEQP-GLES3.instances.crashtwo\r\n"
+ "INSTRUMENTATION_STATUS_CODE: 0\r\n"
@@ -1186,10 +1400,11 @@
tests.add(id);
}
- ITestDevice mockDevice = EasyMock.createMock(ITestDevice.class);
ITestInvocationListener mockListener
= EasyMock.createStrictMock(ITestInvocationListener.class);
- IDevice mockIDevice = EasyMock.createMock(IDevice.class);
+ IMocksControl orderedControl = EasyMock.createStrictControl();
+ ITestDevice mockDevice = orderedControl.createMock(ITestDevice.class);
+ IDevice mockIDevice = orderedControl.createMock(IDevice.class);
DeqpTestRunner deqpTest = new DeqpTestRunner(NAME, NAME, tests, instances);
deqpTest.setAbi(UnitTests.ABI);
@@ -1199,8 +1414,6 @@
int version = 3 << 16;
EasyMock.expect(mockDevice.getProperty("ro.opengles.version"))
.andReturn(Integer.toString(version)).atLeastOnce();
- EasyMock.expect(mockDevice.executeShellCommand("pm list features")).andReturn(ALL_FEATURES)
- .anyTimes();
EasyMock.expect(mockDevice.uninstallPackage(EasyMock.eq(DEQP_ONDEVICE_PKG))).
andReturn("").once();
@@ -1218,7 +1431,7 @@
+ "--deqp-gl-major-version=3 "
+ "--deqp-gl-minor-version=0", "Yes");
- // run config A
+ // run config A - first pass
runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
"{dEQP-GLES3{instances{passall,failone,crashtwo}}}",
"--deqp-caselist-file=" + CASE_LIST_FILE_NAME
@@ -1226,9 +1439,23 @@
+ "--deqp-screen-rotation=unspecified "
+ "--deqp-surface-type=window "
+ "--deqp-log-images=disable "
- + "--deqp-watchdog=enable", instrumentationAnswerConfigA);
+ + "--deqp-watchdog=enable", instrumentationAnswerConfigAPass1);
+
+ // run config A - second pass
+ runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+ "{dEQP-GLES3{instances{crashtwo}}}",
+ "--deqp-caselist-file=" + CASE_LIST_FILE_NAME
+ + " --deqp-gl-config-name=rgba8888d24s8 "
+ + "--deqp-screen-rotation=unspecified "
+ + "--deqp-surface-type=window "
+ + "--deqp-log-images=disable "
+ + "--deqp-watchdog=enable", instrumentationAnswerConfigAPass2);
// query for config B
+
+ EasyMock.expect(mockDevice.executeShellCommand("pm list features")).andReturn(ALL_FEATURES)
+ .once();
+
expectRenderConfigQueryAndReturn(mockDevice,
"--deqp-gl-config-name=rgba8888d24s8 "
+ "--deqp-screen-rotation=90 "
@@ -1236,15 +1463,15 @@
+ "--deqp-gl-major-version=3 "
+ "--deqp-gl-minor-version=0", "Yes");
- // run for config B
+ // run for config B - first pass
runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
- "{dEQP-GLES3{instances{passall,crashtwo,skipone}}}",
+ "{dEQP-GLES3{instances{passall,skipone}}}",
"--deqp-caselist-file=" + CASE_LIST_FILE_NAME
+ " --deqp-gl-config-name=rgba8888d24s8 "
+ "--deqp-screen-rotation=90 "
+ "--deqp-surface-type=window "
+ "--deqp-log-images=disable "
- + "--deqp-watchdog=enable", instrumentationAnswerConfigB);
+ + "--deqp-watchdog=enable", instrumentationAnswerConfigBPass1);
// query for config C
expectRenderConfigQueryAndReturn(mockDevice,
@@ -1254,15 +1481,35 @@
+ "--deqp-gl-major-version=3 "
+ "--deqp-gl-minor-version=0", "Yes");
- // run for config C
+ // run for config C - first pass
runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
- "{dEQP-GLES3{instances{failone,crashtwo}}}",
+ "{dEQP-GLES3{instances{failone}}}",
"--deqp-caselist-file=" + CASE_LIST_FILE_NAME
+ " --deqp-gl-config-name=rgba8888d24s8 "
+ "--deqp-screen-rotation=180 "
+ "--deqp-surface-type=window "
+ "--deqp-log-images=disable "
- + "--deqp-watchdog=enable" , instrumentationAnswerConfigC);
+ + "--deqp-watchdog=enable", instrumentationAnswerConfigCPass1);
+
+ // run for config C - second pass
+ runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+ "{dEQP-GLES3{instances{crashtwo}}}",
+ "--deqp-caselist-file=" + CASE_LIST_FILE_NAME
+ + " --deqp-gl-config-name=rgba8888d24s8 "
+ + "--deqp-screen-rotation=180 "
+ + "--deqp-surface-type=window "
+ + "--deqp-log-images=disable "
+ + "--deqp-watchdog=enable", instrumentationAnswerConfigCPass2);
+
+ // run for config B - second pass (crashtwo has been deferred due to its instability)
+ runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+ "{dEQP-GLES3{instances{crashtwo}}}",
+ "--deqp-caselist-file=" + CASE_LIST_FILE_NAME
+ + " --deqp-gl-config-name=rgba8888d24s8 "
+ + "--deqp-screen-rotation=90 "
+ + "--deqp-surface-type=window "
+ + "--deqp-log-images=disable "
+ + "--deqp-watchdog=enable", instrumentationAnswerConfigBPass2);
// query for unsupported config
expectRenderConfigQueryAndReturn(mockDevice,
@@ -1321,11 +1568,12 @@
mockListener.testRunEnded(EasyMock.anyLong(), EasyMock.<Map<String, String>>notNull());
EasyMock.expectLastCall().once();
- EasyMock.replay(mockDevice, mockIDevice);
+ orderedControl.replay();
EasyMock.replay(mockListener);
+ deqpTest.setRecovery(new StubRecovery());
deqpTest.run(mockListener);
EasyMock.verify(mockListener);
- EasyMock.verify(mockDevice, mockIDevice);
+ orderedControl.verify();
}
private void testMultipleInstancesLossOfDeviceMidInstance(final boolean recoverySuccessful)
@@ -1377,7 +1625,6 @@
+ "INSTRUMENTATION_STATUS_CODE: 0\r\n"; // early <EOF>
final TestIdentifier testId = new TestIdentifier("dEQP-GLES3.loss", "instance");
- final String testPath = "dEQP-GLES3.loss.instance";
Map<String,String> supportedConfigA = new HashMap<>();
supportedConfigA.put("glconfig", "rgba8888d24s8");
@@ -1512,6 +1759,16 @@
mockRecovery.recoverComLinkKilled();
EasyMock.expectLastCall().once();
+ // retry running config B
+ runInstrumentationLineAndAnswer(mockDevice, mockIDevice,
+ "{dEQP-GLES3{loss{instance}}}",
+ "--deqp-caselist-file=" + CASE_LIST_FILE_NAME
+ + " --deqp-gl-config-name=rgba8888d24s8 "
+ + "--deqp-screen-rotation=90 "
+ + "--deqp-surface-type=window "
+ + "--deqp-log-images=disable "
+ + "--deqp-watchdog=enable", instrumentationAnswerFine);
+
// query config C
expectRenderConfigQueryAndReturn(mockDevice,
"--deqp-gl-config-name=rgba8888d24s8 "
@@ -1534,20 +1791,14 @@
mockListener.testRunStarted(ID, 1);
EasyMock.expectLastCall().once();
- mockListener.testStarted(EasyMock.eq(testId));
- EasyMock.expectLastCall().once();
-
- final String crashDescription = (recoverySuccessful) ? ("Incomplete test log") : ("Device lost");
- mockListener.testFailed(testId,
- "=== with config {glformat=rgba8888d24s8,rotation=90,surfacetype=window} ===\n"
- + "Crash: " + crashDescription);
- EasyMock.expectLastCall().once();
-
- mockListener.testEnded(EasyMock.eq(testId), EasyMock.<Map<String, String>>notNull());
- EasyMock.expectLastCall().once();
-
- // run is ended successfully only if device is available
+ // result is reported only if device is available
if (recoverySuccessful) {
+ mockListener.testStarted(EasyMock.eq(testId));
+ EasyMock.expectLastCall().once();
+
+ mockListener.testEnded(EasyMock.eq(testId), EasyMock.<Map<String, String>>notNull());
+ EasyMock.expectLastCall().once();
+
mockListener.testRunEnded(EasyMock.anyLong(), EasyMock.<Map<String, String>>notNull());
EasyMock.expectLastCall().once();
}
@@ -1620,15 +1871,20 @@
EasyMock.expectLastCall().once();
}
- private void setRecoveryExpectationKillProcess(RecoverableTestDevice mockDevice)
- throws DeviceNotAvailableException {
+ private void setRecoveryExpectationKillProcess(RecoverableTestDevice mockDevice,
+ DeqpTestRunner.ISleepProvider mockSleepProvider) throws DeviceNotAvailableException {
EasyMock.expect(mockDevice.executeShellCommand(EasyMock.contains("ps"))).
andReturn("root 1234 com.drawelement.deqp").once();
EasyMock.expect(mockDevice.executeShellCommand(EasyMock.eq("kill -9 1234"))).
andReturn("").once();
- }
+ // Recovery checks if kill failed
+ mockSleepProvider.sleep(EasyMock.gt(0));
+ EasyMock.expectLastCall().once();
+ EasyMock.expect(mockDevice.executeShellCommand(EasyMock.contains("ps"))).
+ andReturn("").once();
+ }
private void setRecoveryExpectationRecovery(RecoverableTestDevice mockDevice)
throws DeviceNotAvailableException {
@@ -1664,11 +1920,11 @@
switch (numConsecutiveErrors) {
case 0:
setRecoveryExpectationWait(mockSleepProvider);
- setRecoveryExpectationKillProcess(mockDevice);
+ setRecoveryExpectationKillProcess(mockDevice, mockSleepProvider);
return 1;
case 1:
setRecoveryExpectationRecovery(mockDevice);
- setRecoveryExpectationKillProcess(mockDevice);
+ setRecoveryExpectationKillProcess(mockDevice, mockSleepProvider);
return 2;
case 2:
setRecoveryExpectationReboot(mockDevice);
@@ -1839,6 +2095,223 @@
RecoveryEvent.PROGRESS);
}
+ /**
+ * Test recovery if process cannot be killed
+ */
+ public void testRecovery_unkillableProcess () throws Exception {
+ DeqpTestRunner.Recovery recovery = new DeqpTestRunner.Recovery();
+ IMocksControl orderedControl = EasyMock.createStrictControl();
+ RecoverableTestDevice mockDevice = orderedControl.createMock(RecoverableTestDevice.class);
+ DeqpTestRunner.ISleepProvider mockSleepProvider =
+ orderedControl.createMock(DeqpTestRunner.ISleepProvider.class);
+
+ // recovery attempts to kill the process after a timeout
+ mockSleepProvider.sleep(EasyMock.gt(0));
+ EasyMock.expect(mockDevice.executeShellCommand(EasyMock.contains("ps"))).
+ andReturn("root 1234 com.drawelement.deqp").once();
+ EasyMock.expect(mockDevice.executeShellCommand(EasyMock.eq("kill -9 1234"))).
+ andReturn("").once();
+
+ // Recovery checks if kill failed
+ mockSleepProvider.sleep(EasyMock.gt(0));
+ EasyMock.expectLastCall().once();
+ EasyMock.expect(mockDevice.executeShellCommand(EasyMock.contains("ps"))).
+ andReturn("root 1234 com.drawelement.deqp").once();
+
+ // Recovery resets the connection
+ mockDevice.recoverDevice();
+ EasyMock.expectLastCall().once();
+
+ // and attempts to kill the process again
+ EasyMock.expect(mockDevice.executeShellCommand(EasyMock.contains("ps"))).
+ andReturn("root 1234 com.drawelement.deqp").once();
+ EasyMock.expect(mockDevice.executeShellCommand(EasyMock.eq("kill -9 1234"))).
+ andReturn("").once();
+
+ // Recovery checks if kill failed
+ mockSleepProvider.sleep(EasyMock.gt(0));
+ EasyMock.expectLastCall().once();
+ EasyMock.expect(mockDevice.executeShellCommand(EasyMock.contains("ps"))).
+ andReturn("root 1234 com.drawelement.deqp").once();
+
+ // recovery reboots the device
+ mockDevice.reboot();
+ EasyMock.expectLastCall().once();
+
+ orderedControl.replay();
+ recovery.setDevice(mockDevice);
+ recovery.setSleepProvider(mockSleepProvider);
+ recovery.recoverComLinkKilled();
+ orderedControl.verify();
+ }
+
+ /**
+ * Test external interruption before batch run.
+ */
+ public void testInterrupt_killBeforeBatch() throws Exception {
+ final TestIdentifier testId = new TestIdentifier("dEQP-GLES3.interrupt", "test");
+
+ Collection<TestIdentifier> tests = new ArrayList<TestIdentifier>();
+ tests.add(testId);
+
+ Map<TestIdentifier, List<Map<String, String>>> instance = new HashMap<>();
+ instance.put(testId, DEFAULT_INSTANCE_ARGS);
+
+ ITestInvocationListener mockListener
+ = EasyMock.createStrictMock(ITestInvocationListener.class);
+ ITestDevice mockDevice = EasyMock.createMock(ITestDevice.class);
+ IDevice mockIDevice = EasyMock.createMock(IDevice.class);
+ IRunUtil mockRunUtil = EasyMock.createMock(IRunUtil.class);
+
+ DeqpTestRunner deqpTest = new DeqpTestRunner(NAME, NAME, tests, instance);
+ deqpTest.setAbi(UnitTests.ABI);
+ deqpTest.setDevice(mockDevice);
+ deqpTest.setBuildHelper(new StubCtsBuildHelper());
+ deqpTest.setRunUtil(mockRunUtil);
+
+ int version = 3 << 16;
+ EasyMock.expect(mockDevice.getProperty("ro.opengles.version"))
+ .andReturn(Integer.toString(version)).atLeastOnce();
+
+ EasyMock.expect(mockDevice.uninstallPackage(EasyMock.eq(DEQP_ONDEVICE_PKG))).
+ andReturn("").once();
+
+ EasyMock.expect(mockDevice.installPackage(EasyMock.<File>anyObject(),
+ EasyMock.eq(true),
+ EasyMock.eq(AbiUtils.createAbiFlag(UnitTests.ABI.getName())))).andReturn(null)
+ .once();
+
+ expectRenderConfigQuery(mockDevice,
+ "--deqp-gl-config-name=rgba8888d24s8 --deqp-screen-rotation=unspecified "
+ + "--deqp-surface-type=window --deqp-gl-major-version=3 "
+ + "--deqp-gl-minor-version=0");
+
+ mockRunUtil.sleep(0);
+ EasyMock.expectLastCall().andThrow(new RunInterruptedException());
+
+ mockListener.testRunStarted(ID, 1);
+ EasyMock.expectLastCall().once();
+
+ EasyMock.replay(mockDevice, mockIDevice);
+ EasyMock.replay(mockListener);
+ EasyMock.replay(mockRunUtil);
+ try {
+ deqpTest.run(mockListener);
+ fail("expected RunInterruptedException");
+ } catch (RunInterruptedException ex) {
+ // expected
+ }
+ EasyMock.verify(mockRunUtil);
+ EasyMock.verify(mockListener);
+ EasyMock.verify(mockDevice, mockIDevice);
+ }
+
+ /**
+ * Test external interruption in testFailed().
+ */
+ public void testInterrupt_killReportTestFailed() throws Exception {
+ final TestIdentifier testId = new TestIdentifier("dEQP-GLES3.interrupt", "test");
+ final String testPath = "dEQP-GLES3.interrupt.test";
+ final String testTrie = "{dEQP-GLES3{interrupt{test}}}";
+ final String output = "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseName\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=2014.x\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseId\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=0xcafebabe\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=targetName\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Value=android\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginSession\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=BeginTestCase\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-BeginTestCase-TestCasePath=" + testPath + "\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-TestCaseResult-Code=Fail\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-TestCaseResult-Details=Fail\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=TestCaseResult\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=EndTestCase\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_STATUS: dEQP-EventType=EndSession\r\n"
+ + "INSTRUMENTATION_STATUS_CODE: 0\r\n"
+ + "INSTRUMENTATION_CODE: 0\r\n";
+
+ Collection<TestIdentifier> tests = new ArrayList<TestIdentifier>();
+ tests.add(testId);
+
+ Map<TestIdentifier, List<Map<String, String>>> instance = new HashMap<>();
+ instance.put(testId, DEFAULT_INSTANCE_ARGS);
+
+ ITestInvocationListener mockListener
+ = EasyMock.createStrictMock(ITestInvocationListener.class);
+ ITestDevice mockDevice = EasyMock.createMock(ITestDevice.class);
+ IDevice mockIDevice = EasyMock.createMock(IDevice.class);
+ IRunUtil mockRunUtil = EasyMock.createMock(IRunUtil.class);
+
+ DeqpTestRunner deqpTest = new DeqpTestRunner(NAME, NAME, tests, instance);
+ deqpTest.setAbi(UnitTests.ABI);
+ deqpTest.setDevice(mockDevice);
+ deqpTest.setBuildHelper(new StubCtsBuildHelper());
+ deqpTest.setRunUtil(mockRunUtil);
+
+ int version = 3 << 16;
+ EasyMock.expect(mockDevice.getProperty("ro.opengles.version"))
+ .andReturn(Integer.toString(version)).atLeastOnce();
+
+ EasyMock.expect(mockDevice.uninstallPackage(EasyMock.eq(DEQP_ONDEVICE_PKG))).
+ andReturn("").once();
+
+ EasyMock.expect(mockDevice.installPackage(EasyMock.<File>anyObject(),
+ EasyMock.eq(true),
+ EasyMock.eq(AbiUtils.createAbiFlag(UnitTests.ABI.getName())))).andReturn(null)
+ .once();
+
+ expectRenderConfigQuery(mockDevice,
+ "--deqp-gl-config-name=rgba8888d24s8 --deqp-screen-rotation=unspecified "
+ + "--deqp-surface-type=window --deqp-gl-major-version=3 "
+ + "--deqp-gl-minor-version=0");
+
+ mockRunUtil.sleep(0);
+ EasyMock.expectLastCall().once();
+
+ String commandLine = String.format(
+ "--deqp-caselist-file=%s --deqp-gl-config-name=rgba8888d24s8 "
+ + "--deqp-screen-rotation=unspecified "
+ + "--deqp-surface-type=window "
+ + "--deqp-log-images=disable "
+ + "--deqp-watchdog=enable",
+ CASE_LIST_FILE_NAME);
+
+ runInstrumentationLineAndAnswer(mockDevice, mockIDevice, testTrie, commandLine,
+ output);
+
+ mockListener.testRunStarted(ID, 1);
+ EasyMock.expectLastCall().once();
+
+ mockListener.testStarted(EasyMock.eq(testId));
+ EasyMock.expectLastCall().once();
+
+ mockListener.testFailed(EasyMock.eq(testId), EasyMock.<String>notNull());
+ EasyMock.expectLastCall().andThrow(new RunInterruptedException());
+
+ EasyMock.replay(mockDevice, mockIDevice);
+ EasyMock.replay(mockListener);
+ EasyMock.replay(mockRunUtil);
+ try {
+ deqpTest.run(mockListener);
+ fail("expected RunInterruptedException");
+ } catch (RunInterruptedException ex) {
+ // expected
+ }
+ EasyMock.verify(mockRunUtil);
+ EasyMock.verify(mockListener);
+ EasyMock.verify(mockDevice, mockIDevice);
+ }
+
private void runInstrumentationLineAndAnswer(ITestDevice mockDevice, IDevice mockIDevice,
final String testTrie, final String cmd, final String output) throws Exception {
EasyMock.expect(mockDevice.executeShellCommand(EasyMock.eq("rm " + CASE_LIST_FILE_NAME)))
diff --git a/tools/utils/buildCts.py b/tools/utils/buildCts.py
index 2fd71a8..6b74608 100755
--- a/tools/utils/buildCts.py
+++ b/tools/utils/buildCts.py
@@ -159,6 +159,7 @@
plan.Include('com\.android\.cts\..*')#TODO(stuartscott): Should PDK have all these?
self.__WritePlan(plan, 'PDK')
+ temporarily_known_failure_tests = BuildCtsTemporarilyKnownFailureList();
flaky_tests = BuildCtsFlakyTestList()
releasekey_tests = BuildListForReleaseBuildTest()
@@ -190,6 +191,8 @@
for package, test_list in small_tests.iteritems():
plan.Include(package+'$')
plan.Exclude(r'com\.android\.cts\.browserbench')
+ for package, test_list in temporarily_known_failure_tests.iteritems():
+ plan.ExcludeTests(package, test_list)
for package, test_list in flaky_tests.iteritems():
plan.ExcludeTests(package, test_list)
for package, test_list in releasekey_tests.iteritems():
@@ -264,6 +267,7 @@
# different deqp sets in different plans.
plan.ExcludeTests('com.drawelements.deqp.gles3', ReadFileLines(os.path.join(self.test_root, 'deqp/gles3-temporary-failures.txt')))
plan.ExcludeTests('com.drawelements.deqp.gles31', ReadFileLines(os.path.join(self.test_root, 'deqp/gles31-temporary-failures.txt')))
+ plan.ExcludeTests('com.drawelements.deqp.egl', ReadFileLines(os.path.join(self.test_root, 'deqp/egl-temporary-failures.txt')))
self.__WritePlan(plan, 'CTS-DEQP')
# CTS - sub plan for new test packages added for staging
@@ -274,6 +278,9 @@
plan.Exclude(package+'$')
for package, tests_list in new_test_packages.iteritems():
plan.Exclude(package+'$')
+ for package, test_list in temporarily_known_failure_tests.iteritems():
+ plan.Include(package+'$')
+ plan.IncludeTests(package, test_list)
plan.Exclude(r'com\.drawelements\.')
plan.Exclude(r'android\.hardware$')
plan.Exclude(r'android\.media$')
@@ -288,7 +295,6 @@
plan = tools.TestPlan(packages)
plan.Exclude('.*')
- plan.Include(r'android\.webgl')
self.__WritePlan(plan, 'CTS-webview')
@@ -402,8 +408,7 @@
'android.signature' : [],
'android.tv' : [],
'android.uiautomation' : [],
- 'android.uirendering' : [],
- 'android.webgl' : []}
+ 'android.uirendering' : []}
def BuildListForReleaseBuildTest():
""" Construct a defaultdict that maps package name to a list of tests
@@ -457,6 +462,41 @@
'com.android.cts.filesystemperf.RandomRWTest#testRandomUpdate',],
'' : []}
+def BuildCtsTemporarilyKnownFailureList():
+ """ Construct a defaultdict that maps package name to a list of tests
+ that are known failures during dev cycle but expected to be fixed before launch """
+ return {
+ 'android.content' : [
+ 'android.content.cts.ContentResolverTest#testAndroidTestCaseSetupProperly',
+ 'android.content.cts.ContentResolverTest#testBulkInsert',
+ 'android.content.cts.ContentResolverTest#testCancelableQuery_WhenCanceledBeforeQuery_ThrowsImmediately',
+ 'android.content.cts.ContentResolverTest#testCancelableQuery_WhenCanceledDuringLongRunningQuery_CancelsQueryAndThrows',
+ 'android.content.cts.ContentResolverTest#testCancelableQuery_WhenNotCanceled_ReturnsResultSet',
+ 'android.content.cts.ContentResolverTest#testConstructor',
+ 'android.content.cts.ContentResolverTest#testCrashOnLaunch',
+ 'android.content.cts.ContentResolverTest#testCrashingOpenAssetFileDescriptor',
+ 'android.content.cts.ContentResolverTest#testCrashingOpenTypedAssetFileDescriptor',
+ 'android.content.cts.ContentResolverTest#testCrashingQuery',
+ 'android.content.cts.ContentResolverTest#testDelete',
+ 'android.content.cts.ContentResolverTest#testGetType',
+ 'android.content.cts.ContentResolverTest#testInsert',
+ 'android.content.cts.ContentResolverTest#testNotifyChange1',
+ 'android.content.cts.ContentResolverTest#testNotifyChange2',
+ 'android.content.cts.ContentResolverTest#testOpenAssetFileDescriptor',
+ 'android.content.cts.ContentResolverTest#testOpenFileDescriptor',
+ 'android.content.cts.ContentResolverTest#testOpenInputStream',
+ 'android.content.cts.ContentResolverTest#testOpenOutputStream',
+ 'android.content.cts.ContentResolverTest#testQuery',
+ 'android.content.cts.ContentResolverTest#testRegisterContentObserver',
+ 'android.content.cts.ContentResolverTest#testStableToUnstableRefs',
+ 'android.content.cts.ContentResolverTest#testStartCancelSync',
+ 'android.content.cts.ContentResolverTest#testStartSyncFailure',
+ 'android.content.cts.ContentResolverTest#testUnstableGetType',
+ 'android.content.cts.ContentResolverTest#testUnstableToStableRefs',
+ 'android.content.cts.ContentResolverTest#testUpdate',
+ 'android.content.cts.ContentResolverTest#testValidateSyncExtrasBundle',],
+ '' : []}
+
def LogGenerateDescription(name):
print 'Generating test description for package %s' % name
diff --git a/tools/vm-tests-tf/Android.mk b/tools/vm-tests-tf/Android.mk
index e66ec7c..b1cbe37 100644
--- a/tools/vm-tests-tf/Android.mk
+++ b/tools/vm-tests-tf/Android.mk
@@ -100,13 +100,13 @@
$(hide) cd $(PRIVATE_INTERMEDIATES_HOSTJUNIT_FILES)/classes && zip -q -r ../../android.core.vm-tests-tf.jar .
$(hide) cd $(dir $@) && zip -q -r android.core.vm-tests-tf.jar tests
else # LOCAL_JACK_ENABLED
-$(vmteststf_jar) : $(vmteststf_dep_jars) $(JACK_JAR) $(JILL_JAR) out/target/common/obj/JAVA_LIBRARIES/core-libart_intermediates/classes.jack $(HOST_OUT_JAVA_LIBRARIES)/tradefed-prebuilt.jar
+$(vmteststf_jar) : $(vmteststf_dep_jars) $(JACK_JAR) $(JILL_JAR) $(call intermediates-dir-for,JAVA_LIBRARIES,core-libart,,COMMON)/classes.jack $(HOST_OUT_JAVA_LIBRARIES)/tradefed-prebuilt.jar
$(hide) rm -rf $(dir $@) && mkdir -p $(dir $@)
$(hide) mkdir -p $(PRIVATE_INTERMEDIATES_HOSTJUNIT_FILES)/dot/junit $(dir $(PRIVATE_INTERMEDIATES_DEXCORE_JAR))
# generated and compile the host side junit tests
@echo "Write generated Main_*.java files to $(PRIVATE_INTERMEDIATES_MAIN_FILES)"
$(hide) java -cp $(PRIVATE_CLASS_PATH) util.build.JackBuildDalvikSuite $(PRIVATE_SRC_FOLDER) $(PRIVATE_INTERMEDIATES) \
- out/target/common/obj/JAVA_LIBRARIES/core-libart_intermediates/classes.jack:$(cts-tf-dalvik-lib.jack):$(HOST_OUT_JAVA_LIBRARIES)/tradefed-prebuilt.jar \
+ $(call intermediates-dir-for,JAVA_LIBRARIES,core-libart,,COMMON)/classes.jack:$(cts-tf-dalvik-lib.jack):$(HOST_OUT_JAVA_LIBRARIES)/tradefed-prebuilt.jar \
$(PRIVATE_INTERMEDIATES_MAIN_FILES) $(PRIVATE_INTERMEDIATES_CLASSES) $(PRIVATE_INTERMEDIATES_HOSTJUNIT_FILES) $$RUN_VM_TESTS_RTO
@echo "Generate $(PRIVATE_INTERMEDIATES_DEXCORE_JAR)"
$(hide) jar -cf $(PRIVATE_INTERMEDIATES_DEXCORE_JAR)-class.jar \