Merge "Refractor for coexbasetest and performancebasetest."
diff --git a/Android.mk b/Android.mk
index adf3524..f33b490 100644
--- a/Android.mk
+++ b/Android.mk
@@ -23,11 +23,11 @@
# general Android Conntectivity Test Suite
ACTS_DISTRO := $(HOST_OUT)/acts-dist/acts.zip
-$(ACTS_DISTRO): $(sort $(shell find $(LOCAL_PATH)/acts/framework))
+$(ACTS_DISTRO): $(sort $(shell find $(LOCAL_PATH)/acts))
@echo "Packaging ACTS into $(ACTS_DISTRO)"
@mkdir -p $(HOST_OUT)/acts-dist/
@rm -f $(HOST_OUT)/acts-dist/acts.zip
- $(hide) zip -r $(HOST_OUT)/acts-dist/acts.zip tools/test/connectivity/acts/*
+ $(hide) zip $(HOST_OUT)/acts-dist/acts.zip $(shell find tools/test/connectivity/acts/* ! -wholename "*__pycache__*")
acts: $(ACTS_DISTRO)
.PHONY: acts
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index 2867353..749e47d 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -12,6 +12,7 @@
acts_job_test = ./acts/framework/tests/acts_job_test.py
acts_test_runner_test = ./acts/framework/tests/acts_test_runner_test.py
acts_unittest_suite = ./acts/framework/tests/acts_unittest_suite.py
+version_selector_tests = ./acts/framework/tests/libs/version_selector_test.py
acts_utils_test = ./acts/framework/tests/acts_utils_test.py
acts_host_utils_test = ./acts/framework/tests/acts_host_utils_test.py
acts_import_test_utils_test = ./acts/framework/tests/acts_import_test_utils_test.py
@@ -21,6 +22,9 @@
acts_metrics_test = ./acts/framework/tests/libs/metrics/unittest_bundle.py
test_runner_test = ./acts/framework/tests/test_runner_test.py
event_unittest_bundle = ./acts/framework/tests/event/event_unittest_bundle.py
+logging_unittest_bundle = ./acts/framework/tests/libs/logging/logging_unittest_bundle.py
+proc_unittest_bundle = ./acts/framework/tests/libs/proc/proc_unittest_bundle.py
+android_lib_unittest_bundle = ./acts/framework/tests/controllers/android_lib/android_lib_unittest_bundle.py
keyword_check = ./tools/keyword_check.py
yapf_hook = ./tools/yapf_checker.py
lab_test = ./tools/lab/lab_upload_hooks.py
diff --git a/acts/framework/acts/base_test.py b/acts/framework/acts/base_test.py
index f6d14e6..d0f3b9a 100755
--- a/acts/framework/acts/base_test.py
+++ b/acts/framework/acts/base_test.py
@@ -18,8 +18,6 @@
import traceback
from concurrent.futures import ThreadPoolExecutor
-from acts.event import subscription_bundle
-
from acts import asserts
from acts import keys
from acts import logger
@@ -28,10 +26,16 @@
from acts import error
from acts import tracelogger
from acts import utils
-
-# Macro strings for test result reporting
+from acts.event import event_bus
+from acts.event import subscription_bundle
+from acts.event.event import TestCaseBeginEvent
+from acts.event.event import TestCaseEndEvent
+from acts.event.event import TestClassBeginEvent
+from acts.event.event import TestClassEndEvent
from acts.event.subscription_bundle import SubscriptionBundle
+
+# Macro strings for test result reporting
TEST_CASE_TOKEN = "[Test Case]"
RESULT_LINE_TEMPLATE = TEST_CASE_TOKEN + " %s %s"
@@ -83,6 +87,7 @@
if ad.droid:
utils.set_location_service(ad, False)
utils.sync_device_time(ad)
+ self.testbed_name = ''
def __enter__(self):
return self
@@ -399,6 +404,7 @@
self.log.info("%s %s", TEST_CASE_TOKEN, test_name)
verdict = None
try:
+ event_bus.post(TestCaseBeginEvent(self, test_func))
try:
if hasattr(self, 'android_devices'):
for ad in self.android_devices:
@@ -451,7 +457,8 @@
self._exec_procedure_func(self._on_blocked, tr_record)
except error.ActsError as e:
self.results.errors.append(e)
- self.log.error("BaseTest execute_one_test_case error: %s" % e.message)
+ self.log.error(
+ 'BaseTest execute_one_test_case error: %s' % e.message)
except Exception as e:
self.log.error(traceback.format_exc())
# Exception happened during test.
@@ -467,6 +474,8 @@
tr_record.test_fail()
self._exec_procedure_func(self._on_fail, tr_record)
finally:
+ event_bus.post(TestCaseEndEvent(
+ self, test_func, signals.TestSignal))
if not is_generate_trigger:
self.results.add_record(tr_record)
@@ -662,6 +671,7 @@
"""
self.register_test_class_event_subscriptions()
self.log.info("==========> %s <==========", self.TAG)
+ event_bus.post(TestClassBeginEvent(self))
# Devise the actual test cases to run in the test class.
if not test_names:
if self.tests:
@@ -705,6 +715,7 @@
raise e
finally:
self._exec_func(self.teardown_class)
+ event_bus.post(TestClassEndEvent(self, self.results))
self.log.info("Summary for test class %s: %s", self.TAG,
self.results.summary_str())
diff --git a/acts/framework/acts/controllers/__init__.py b/acts/framework/acts/controllers/__init__.py
index 41e48df..78014d7 100644
--- a/acts/framework/acts/controllers/__init__.py
+++ b/acts/framework/acts/controllers/__init__.py
@@ -25,5 +25,5 @@
"""This is a list of all the top level controller modules"""
__all__ = [
"android_device", "attenuator", "monsoon", "access_point", "iperf_server",
- "packet_sender", "arduino_wifi_dongle"
+ "packet_sender", "arduino_wifi_dongle", "packet_capture"
]
diff --git a/acts/framework/acts/controllers/access_point.py b/acts/framework/acts/controllers/access_point.py
index 4691e9b..9e87d43 100755
--- a/acts/framework/acts/controllers/access_point.py
+++ b/acts/framework/acts/controllers/access_point.py
@@ -433,7 +433,7 @@
self.scapy_install_path = None
def send_ra(self, iface, mac=RA_MULTICAST_ADDR, interval=1, count=None,
- lifetime=LIFETIME):
+ lifetime=LIFETIME, rtt=0):
"""Invoke scapy and send RA to the device.
Args:
@@ -442,10 +442,11 @@
interval: int Time to sleep between consecutive packets.
count: int Number of packets to be sent.
lifetime: int original RA's router lifetime in seconds.
+ rtt: retrans timer of the RA packet
"""
scapy_command = os.path.join(self.scapy_install_path, RA_SCRIPT)
- options = ' -m %s -i %d -c %d -l %d -in %s' % (
- mac, interval, count, lifetime, iface)
+ options = ' -m %s -i %d -c %d -l %d -in %s -rtt %s' % (
+ mac, interval, count, lifetime, iface, rtt)
self.log.info("Scapy cmd: %s" % scapy_command + options)
res = self.ssh.run(scapy_command + options)
diff --git a/acts/framework/acts/controllers/adb.py b/acts/framework/acts/controllers/adb.py
index dbbb75c..23dbb12 100644
--- a/acts/framework/acts/controllers/adb.py
+++ b/acts/framework/acts/controllers/adb.py
@@ -75,8 +75,6 @@
>> adb.devices() # will return the console output of "adb devices".
"""
- _SERVER_LOCAL_PORT = None
-
def __init__(self, serial="", ssh_connection=None):
"""Construct an instance of AdbProxy.
@@ -86,11 +84,12 @@
connected to a remote host that we can reach via SSH.
"""
self.serial = serial
+ self._server_local_port = None
adb_path = self._exec_cmd("which adb")
adb_cmd = [adb_path]
if serial:
adb_cmd.append("-s %s" % serial)
- if ssh_connection is not None and not AdbProxy._SERVER_LOCAL_PORT:
+ if ssh_connection is not None:
# Kill all existing adb processes on the remote host (if any)
# Note that if there are none, then pkill exits with non-zero status
ssh_connection.run("pkill adb", ignore_status=True)
@@ -103,9 +102,9 @@
ssh_connection.run(remote_adb_cmd)
# Proxy a local port to the adb server port
local_port = ssh_connection.create_ssh_tunnel(5037)
- AdbProxy._SERVER_LOCAL_PORT = local_port
+ self._server_local_port = local_port
- if AdbProxy._SERVER_LOCAL_PORT:
+ if self._server_local_port:
adb_cmd.append("-P %d" % local_port)
self.adb_str = " ".join(adb_cmd)
self._ssh_connection = ssh_connection
@@ -213,7 +212,7 @@
# 2) Setup forwarding between that remote port and the requested
# device port
remote_port = self._ssh_connection.find_free_port()
- self._ssh_connection.create_ssh_tunnel(
+ local_port = self._ssh_connection.create_ssh_tunnel(
remote_port, local_port=host_port)
host_port = remote_port
output = self.forward("tcp:%d tcp:%d" % (host_port, device_port))
@@ -223,7 +222,7 @@
if output:
return int(output)
else:
- return host_port
+ return local_port
def remove_tcp_forward(self, host_port):
"""Stop tcp forwarding a port from localhost to this android device.
diff --git a/acts/framework/acts/controllers/android_device.py b/acts/framework/acts/controllers/android_device.py
index b245f40..d41a21c 100755
--- a/acts/framework/acts/controllers/android_device.py
+++ b/acts/framework/acts/controllers/android_device.py
@@ -64,15 +64,17 @@
RELEASE_ID_REGEXES = [re.compile(r'\w+\.\d+\.\d+'), re.compile(r'N\w+')]
+class AndroidDeviceConfigError(Exception):
+ """Raised when AndroidDevice configs are malformatted."""
+
+
class AndroidDeviceError(error.ActsError):
- """Raised when there is an error in AndroidDevice
- """
- pass
+ """Raised when there is an error in AndroidDevice."""
class DoesNotExistError(AndroidDeviceError):
- """Raised when something that does not exist is referenced.
- """
+ """Raised when something that does not exist is referenced."""
+
def create(configs):
"""Creates AndroidDevice controller objects.
@@ -85,11 +87,11 @@
A list of AndroidDevice objects.
"""
if not configs:
- raise AndroidDeviceError(ANDROID_DEVICE_EMPTY_CONFIG_MSG)
+ raise AndroidDeviceConfigError(ANDROID_DEVICE_EMPTY_CONFIG_MSG)
elif configs == ANDROID_DEVICE_PICK_ALL_TOKEN:
ads = get_all_instances()
elif not isinstance(configs, list):
- raise AndroidDeviceError(ANDROID_DEVICE_NOT_LIST_CONFIG_MSG)
+ raise AndroidDeviceConfigError(ANDROID_DEVICE_NOT_LIST_CONFIG_MSG)
elif isinstance(configs[0], str):
# Configs is a list of serials.
ads = get_instances(configs)
@@ -102,7 +104,8 @@
for ad in ads:
if not ad.is_connected():
raise DoesNotExistError(("Android device %s is specified in config"
- " but is not attached.") % ad.serial)
+ " but is not attached.") % ad.serial,
+ serial=ad.serial)
_start_services_on_ads(ads)
return ads
@@ -162,17 +165,19 @@
for ad in ads:
running_ads.append(ad)
if not ad.ensure_screen_on():
- ad.log.error("User window cannot come up")
+ ad.log.error('User window cannot come up')
destroy(running_ads)
- raise AndroidDeviceError("User window cannot come up")
+ raise AndroidDeviceError('User window cannot come up',
+ serial=ad.serial)
if not ad.skip_sl4a and not ad.is_sl4a_installed():
- ad.log.error("sl4a.apk is not installed")
+ ad.log.error('sl4a.apk is not installed')
destroy(running_ads)
- raise AndroidDeviceError("The required sl4a.apk is not installed")
+ raise AndroidDeviceError('The required sl4a.apk is not installed',
+ serial=ad.serial)
try:
ad.start_services(skip_sl4a=ad.skip_sl4a)
except:
- ad.log.exception("Failed to start some services, abort!")
+ ad.log.exception('Failed to start some services, abort!')
destroy(running_ads)
raise
@@ -243,12 +248,12 @@
results = []
for c in configs:
try:
- serial = c.pop("serial")
+ serial = c.pop('serial')
except KeyError:
- raise AndroidDeviceError(
+ raise AndroidDeviceConfigError(
"Required value 'serial' is missing in AndroidDevice config %s."
% c)
- ssh_config = c.pop("ssh_config", None)
+ ssh_config = c.pop('ssh_config', None)
ssh_connection = None
if ssh_config is not None:
ssh_settings = settings.from_config(ssh_config)
@@ -324,14 +329,14 @@
filtered = filter_devices(ads, _get_device_filter)
if not filtered:
- raise AndroidDeviceError(
+ raise ValueError(
"Could not find a target device that matches condition: %s." %
kwargs)
elif len(filtered) == 1:
return filtered[0]
else:
serials = [ad.serial for ad in filtered]
- raise AndroidDeviceError("More than one device matched: %s" % serials)
+ raise ValueError("More than one device matched: %s" % serials)
def take_bug_reports(ads, test_name, begin_time):
@@ -384,6 +389,7 @@
class AndroidBugReportEvent(AndroidEvent):
"""The event posted when an AndroidDevice captures a bugreport."""
+
def __init__(self, android_device, bugreport_dir):
super().__init__(android_device)
self.bugreport_dir = bugreport_dir
@@ -436,6 +442,9 @@
self.data_accounting = collections.defaultdict(int)
self._sl4a_manager = sl4a_manager.Sl4aManager(self.adb)
self.last_logcat_timestamp = None
+ # Device info cache.
+ self._user_added_device_info = {}
+ self._sdk_api_level = None
def clean_up(self):
"""Cleans up the AndroidDevice object and releases any resources it
@@ -524,6 +533,31 @@
return info
@property
+ def device_info(self):
+ """Information to be pulled into controller info.
+
+ The latest serial, model, and build_info are included. Additional info
+ can be added via `add_device_info`.
+ """
+ info = {
+ 'serial': self.serial,
+ 'model': self.model,
+ 'build_info': self.build_info,
+ 'user_added_info': self._user_added_device_info
+ }
+ return info
+
+ def sdk_api_level(self):
+ if self._sdk_api_level is not None:
+ return self._sdk_api_level
+ if self.is_bootloader:
+ self.log.error('Device is in fastboot mode. Cannot get build info.')
+ return
+ self._sdk_api_level = int(
+ self.adb.shell('getprop ro.build.version.sdk'))
+ return self._sdk_api_level
+
+ @property
def is_bootloader(self):
"""True if the device is in bootloader mode.
"""
@@ -602,6 +636,10 @@
return False
return False
+ def update_sdk_api_level(self):
+ self._sdk_api_level = None
+ self.sdk_api_level()
+
def load_config(self, config):
"""Add attributes to the AndroidDevice object based on json config.
@@ -617,7 +655,7 @@
if hasattr(self, k) and k != "skip_sl4a":
raise AndroidDeviceError(
"Attempting to set existing attribute %s on %s" %
- (k, self.serial))
+ (k, self.serial), serial=self.serial)
setattr(self, k, v)
def root_adb(self):
@@ -771,9 +809,10 @@
logcat is no longer running.
"""
if self.is_adb_logcat_on:
- raise AndroidDeviceError(("Android device {} already has an adb "
- "logcat thread going on. Cannot start "
- "another one.").format(self.serial))
+ self.log.warn(
+ 'Android device %s already has a running adb logcat thread. '
+ % self.serial)
+ return
# Disable adb log spam filter. Have to stop and clear settings first
# because 'start' doesn't support --clear option before Android N.
self.adb.shell("logpersist.stop --clear")
@@ -795,7 +834,7 @@
else:
begin_at = '-T 1'
- # TODO(markdr): Pull 'adb -s %SERIAL' from the AdbProxy object.
+ # TODO(markdr): Pull 'adb -s %SERIAL' from the AdbProxy object.
cmd = "adb -s {} logcat {} -v year {} >> {}".format(
self.serial, begin_at, extra_params, self.adb_logcat_file_path)
self.adb_logcat_process = utils.start_standing_subprocess(cmd)
@@ -804,9 +843,10 @@
"""Stops the adb logcat collection subprocess.
"""
if not self.is_adb_logcat_on:
- raise AndroidDeviceError(
- "Android device %s does not have an ongoing adb logcat "
- "collection." % self.serial)
+ self.log.warn(
+ 'Android device %s does not have an ongoing adb logcat '
+ % self.serial)
+ return
# Set the last timestamp to the current timestamp. This may cause
# a race condition that allows the same line to be logged twice,
# but it does not pose a problem for our logging purposes.
@@ -936,7 +976,8 @@
out = self.adb.shell("bugreportz", timeout=BUG_REPORT_TIMEOUT)
if not out.startswith("OK"):
raise AndroidDeviceError(
- "Failed to take bugreport on %s: %s" % (self.serial, out))
+ 'Failed to take bugreport on %s: %s' % (self.serial, out),
+ serial=self.serial)
br_out_path = out.split(':')[1].strip().split()[0]
self.adb.pull("%s %s" % (br_out_path, full_out_path))
else:
@@ -1145,7 +1186,8 @@
pass
time.sleep(5)
raise AndroidDeviceError(
- "Device %s booting process timed out." % self.serial)
+ 'Device %s booting process timed out.' % self.serial,
+ serial=self.serial)
def reboot(self, stop_at_lock_screen=False):
"""Reboots the device.
@@ -1167,6 +1209,22 @@
self.stop_services()
self.log.info("Rebooting")
self.adb.reboot()
+
+ timeout_start = time.time()
+ timeout = 2 * 60
+ # b/111791239: Newer versions of android sometimes return early after
+ # `adb reboot` is called. This means subsequent calls may make it to
+ # the device before the reboot goes through, return false positives for
+ # getprops such as sys.boot_completed.
+ while time.time() < timeout_start + timeout:
+ try:
+ self.adb.get_state()
+ time.sleep(.1)
+ except adb.AdbError:
+ # get_state will raise an error if the device is not found. We
+ # want the device to be missing to prove the device has kicked
+ # off the reboot.
+ break
self.wait_for_boot_completion()
self.root_adb()
if stop_at_lock_screen:
@@ -1177,7 +1235,8 @@
return
if not self.ensure_screen_on():
self.log.error("User window cannot come up")
- raise AndroidDeviceError("User window cannot come up")
+ raise AndroidDeviceError("User window cannot come up",
+ serial=self.serial)
self.start_services(self.skip_sl4a)
def restart_runtime(self):
@@ -1189,12 +1248,16 @@
self.stop_services()
self.log.info("Restarting android runtime")
self.adb.shell("stop")
+ # Reset the boot completed flag before we restart the framework
+ # to correctly detect when the framework has fully come up.
+ self.adb.shell("setprop sys.boot_completed 0")
self.adb.shell("start")
self.wait_for_boot_completion()
self.root_adb()
if not self.ensure_screen_on():
self.log.error("User window cannot come up")
- raise AndroidDeviceError("User window cannot come up")
+ raise AndroidDeviceError('User window cannot come up',
+ serial=self.serial)
self.start_services(self.skip_sl4a)
def search_logcat(self, matching_string, begin_time=None):
@@ -1369,7 +1432,7 @@
self.log.info("Device is in CrpytKeeper window")
return True
if "StatusBar" in current_window and (
- (not current_app) or "FallbackHome" in current_app):
+ (not current_app) or "FallbackHome" in current_app):
self.log.info("Device is locked")
return True
return False
diff --git a/acts/framework/acts/controllers/android_lib/__init__.py b/acts/framework/acts/controllers/android_lib/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/acts/controllers/android_lib/__init__.py
diff --git a/acts/framework/acts/controllers/android_lib/android_api.py b/acts/framework/acts/controllers/android_lib/android_api.py
new file mode 100644
index 0000000..634e44b
--- /dev/null
+++ b/acts/framework/acts/controllers/android_lib/android_api.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import enum
+import logging
+import sys
+
+from acts.controllers.android_device import AndroidDevice
+from acts.libs import version_selector
+
+
+class AndroidApi:
+ OLDEST = 0
+ MINIMUM = 0
+ L = 21
+ L_MR1 = 22
+ M = 23
+ N = 24
+ N_MR1 = 25
+ O = 26
+ O_MR1 = 27
+ P = 28
+ LATEST = sys.maxsize
+ MAX = sys.maxsize
+
+
+def android_api(min_api=AndroidApi.OLDEST,
+ max_api=AndroidApi.LATEST):
+ """Decorates a function to only be called for the given API range.
+
+ Only gets called if the AndroidDevice in the args is within the specified
+ API range. Otherwise, a different function may be called instead. If the
+ API level is out of range, and no other function handles that API level, an
+ error is raise instead.
+
+ Note: In Python3.5 and below, the order of kwargs is not preserved. If your
+ function contains multiple AndroidDevices within the kwargs, and no
+ AndroidDevices within args, you are NOT guaranteed the first
+ AndroidDevice is the same one chosen each time the function runs. Due
+ to this, we do not check for AndroidDevices in kwargs.
+
+ Args:
+ min_api: The minimum API level. Can be an int or an AndroidApi value.
+ max_api: The maximum API level. Can be an int or an AndroidApi value.
+ """
+ def get_api_level(*args, **_):
+ for arg in args:
+ if isinstance(arg, AndroidDevice):
+ return arg.sdk_api_level()
+ logging.getLogger().error('An AndroidDevice was not found in the given '
+ 'arguments.')
+ return None
+
+ return version_selector.set_version(get_api_level, min_api, max_api)
diff --git a/acts/framework/acts/controllers/android_lib/logcat.py b/acts/framework/acts/controllers/android_lib/logcat.py
new file mode 100644
index 0000000..ad891b1
--- /dev/null
+++ b/acts/framework/acts/controllers/android_lib/logcat.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import re
+
+from acts.libs.proc.process import Process
+from acts.libs.logging import log_stream
+from acts.libs.logging.log_stream import LogStyles
+
+TIMESTAMP_REGEX = r'((?:\d+-)?\d+-\d+ \d+:\d+:\d+.\d+)'
+
+
+class TimestampTracker(object):
+ """Stores the last timestamp outputted by the Logcat process."""
+
+ def __init__(self):
+ self._last_timestamp = None
+
+ @property
+ def last_timestamp(self):
+ return self._last_timestamp
+
+ def read_output(self, message):
+ """Reads the message and parses all timestamps from it."""
+ all_timestamps = re.findall(TIMESTAMP_REGEX, message)
+ if len(all_timestamps) > 0:
+ self._last_timestamp = all_timestamps[0]
+
+
+def _get_log_level(message):
+ """Returns the log level for the given message."""
+ if message.startswith('-') or len(message) < 37:
+ return logging.ERROR
+ else:
+ log_level = message[36]
+ if log_level in ('V', 'D'):
+ return logging.DEBUG
+ elif log_level == 'I':
+ return logging.INFO
+ elif log_level == 'W':
+ return logging.WARNING
+ elif log_level == 'E':
+ return logging.ERROR
+ return logging.NOTSET
+
+
+def _log_line_func(log, timestamp_tracker):
+ """Returns a lambda that logs a message to the given logger."""
+
+ def log_line(message):
+ timestamp_tracker.read_output(message)
+ log.log(_get_log_level(message), message)
+
+ return log_line
+
+
+def _on_retry(serial, extra_params, timestamp_tracker):
+ def on_retry(_):
+ begin_at = '"%s"' % timestamp_tracker.last_timestamp or 1
+ additional_params = extra_params or ''
+
+ return 'adb -s %s logcat -T %s -v year %s' % (
+ serial, begin_at, additional_params)
+
+ return on_retry
+
+
+def create_logcat_keepalive_process(serial, extra_params=''):
+ """Creates a Logcat Process that automatically attempts to reconnect.
+
+ Args:
+ serial: The serial of the device to read the logcat of.
+ extra_params: Any additional params to be added to the logcat cmdline.
+
+ Returns:
+ A acts.libs.proc.process.Process object.
+ """
+ logger = log_stream.create_logger('AndroidDevice%s' % serial,
+ LogStyles.LOG_DEBUG |
+ LogStyles.TESTCASE_LOG |
+ LogStyles.MONOLITH_LOG)
+ process = Process('adb -s %s logcat -T 1 -b all -v year %s' % (
+ serial, extra_params), shell=True)
+ timestamp_tracker = TimestampTracker()
+ process.set_on_output_callback(_log_line_func(logger, timestamp_tracker))
+ process.set_on_terminate_callback(
+ _on_retry(serial, extra_params, timestamp_tracker))
+ return process
diff --git a/acts/framework/acts/controllers/anritsu_lib/md8475a.py b/acts/framework/acts/controllers/anritsu_lib/md8475a.py
index 8e5865c..6d0fb7a 100644
--- a/acts/framework/acts/controllers/anritsu_lib/md8475a.py
+++ b/acts/framework/acts/controllers/anritsu_lib/md8475a.py
@@ -130,11 +130,17 @@
LTE_BANDWIDTH_15MHz = "15MHz"
LTE_BANDWIDTH_20MHz = "20MHz"
+class BtsGprsMode(Enum):
+ ''' Values for Gprs Modes '''
+ NO_GPRS = "NO_GPRS"
+ GPRS = "GPRS"
+ EGPRS = "EGPRS"
class BtsPacketRate(Enum):
''' Values for Cell Packet rate '''
LTE_MANUAL = "MANUAL"
LTE_BESTEFFORT = "BESTEFFORT"
+ WCDMA_DL384K_UL64K = "DL384K_UL64K"
WCDMA_DLHSAUTO_REL7_UL384K = "DLHSAUTO_REL7_UL384K"
WCDMA_DL18_0M_UL384K = "DL18_0M_UL384K"
WCDMA_DL21_6M_UL384K = "DL21_6M_UL384K"
@@ -162,7 +168,7 @@
WCDMA_DL36_0M_UL5_76M = "DL36_0M_UL5_76M"
WCDMA_DL43_2M_UL1_46M = "DL43_2M_UL1_46M"
WCDMA_DL43_2M_UL2_0M = "DL43_2M_UL2_0M"
- WCDMA_DL43_2M_UL5_76M = "L43_2M_UL5_76M"
+ WCDMA_DL43_2M_UL5_76M = "DL43_2M_UL5_76M"
class BtsPacketWindowSize(Enum):
@@ -519,7 +525,7 @@
Returns:
query response
"""
- self.log.info("--> {}".format(query))
+ self.log.debug("--> {}".format(query))
querytoSend = (query + TERMINATOR).encode('utf-8')
self._sock.settimeout(sock_timeout)
try:
@@ -527,7 +533,7 @@
result = self._sock.recv(ANRITSU_SOCKET_BUFFER_SIZE).rstrip(
TERMINATOR.encode('utf-8'))
response = result.decode('utf-8')
- self.log.info('<-- {}'.format(response))
+ self.log.debug('<-- {}'.format(response))
return response
except socket.timeout:
raise AnritsuError("Timeout: Response from Anritsu")
@@ -543,7 +549,7 @@
Returns:
None
"""
- self.log.info("--> {}".format(command))
+ self.log.debug("--> {}".format(command))
if self._error_reporting:
cmdToSend = (command + ";ERROR?" + TERMINATOR).encode('utf-8')
self._sock.settimeout(sock_timeout)
@@ -2995,6 +3001,84 @@
cmd = "CBCHPARAMSETUP {},{}".format(enable.value, self._bts_number)
self._anritsu.send_command(cmd)
+ @property
+ def gsm_gprs_mode(self):
+ """ Gets the GSM connection mode
+
+ Args:
+ None
+
+ Returns:
+ A string indicating if connection is EGPRS, GPRS or non-GPRS
+ """
+ cmd = "GPRS? " + self._bts_number
+ return self._anritsu.send_query(cmd)
+
+ @gsm_gprs_mode.setter
+ def gsm_gprs_mode(self, mode):
+ """ Sets the GPRS connection mode
+
+ Args:
+ mode: GPRS connection mode
+
+ Returns:
+ None
+ """
+
+ if not isinstance(mode, BtsGprsMode):
+ raise ValueError(' The parameter should be of type "BtsGprsMode"')
+ cmd = "GPRS {},{}".format(mode.value, self._bts_number)
+
+ self._anritsu.send_command(cmd)
+
+ @property
+ def gsm_slots(self):
+ """ Gets the GSM slot assignment
+
+ Args:
+ None
+
+ Returns:
+ A tuple indicating DL and UL slots.
+ """
+
+ cmd = "MLTSLTCFG? " + self._bts_number
+
+ response = self._anritsu.send_query(cmd)
+ split_response = response.split(',')
+
+ if not len(split_response) == 2:
+ raise ValueError(response)
+
+ return response[0], response[1]
+
+ @gsm_slots.setter
+ def gsm_slots(self, slots):
+ """ Sets the number of downlink / uplink slots for GSM
+
+ Args:
+ slots: a tuple containing two ints indicating (DL,UL)
+
+ Returns:
+ None
+ """
+
+ try:
+ dl, ul = slots
+ dl = int(dl)
+ ul = int(ul)
+ except:
+ raise ValueError('The parameter slot has to be a tuple containing two ints indicating (dl,ul) slots.')
+
+ # Validate
+ if dl < 1 or ul < 1 or dl + ul > 5:
+ raise ValueError('DL and UL slots have to be >= 1 and the sum <= 5.')
+
+ cmd = "MLTSLTCFG {},{},{}".format(dl, ul, self._bts_number)
+
+ self._anritsu.send_command(cmd)
+
+
class _VirtualPhone(object):
'''Class to interact with virtual phone supported by MD8475 '''
diff --git a/acts/framework/acts/controllers/ap_lib/hostapd_ap_preset.py b/acts/framework/acts/controllers/ap_lib/hostapd_ap_preset.py
index 845f3d3..2df7ece 100644
--- a/acts/framework/acts/controllers/ap_lib/hostapd_ap_preset.py
+++ b/acts/framework/acts/controllers/ap_lib/hostapd_ap_preset.py
@@ -22,6 +22,7 @@
frequency=None,
security=None,
ssid=None,
+ hidden=False,
vht_bandwidth=80,
bss_settings=[],
iface_wlan_2g=hostapd_constants.WLAN0_STRING,
@@ -82,6 +83,7 @@
]
config = hostapd_config.HostapdConfig(
ssid=ssid,
+ hidden=hidden,
security=security,
interface=interface,
mode=mode,
@@ -129,6 +131,7 @@
]
config = hostapd_config.HostapdConfig(
ssid=ssid,
+ hidden=hidden,
security=security,
interface=interface,
mode=mode,
diff --git a/acts/framework/acts/controllers/ap_lib/hostapd_constants.py b/acts/framework/acts/controllers/ap_lib/hostapd_constants.py
index b953a9e..208aaf9 100755
--- a/acts/framework/acts/controllers/ap_lib/hostapd_constants.py
+++ b/acts/framework/acts/controllers/ap_lib/hostapd_constants.py
@@ -18,6 +18,7 @@
BAND_2G = '2g'
BAND_5G = '5g'
+WEP = 0
WPA1 = 1
WPA2 = 2
MIXED = 3
@@ -35,6 +36,9 @@
WLAN1_STRING = 'wlan1'
WLAN2_STRING = 'wlan2'
WLAN3_STRING = 'wlan3'
+WEP_STRING = 'wep'
+WEP_DEFAULT_KEY = 0
+WEP_HEX_LENGTH = [10, 26, 32, 58]
AP_DEFAULT_CHANNEL_2G = 6
AP_DEFAULT_CHANNEL_5G = 36
AP_DEFAULT_MAX_SSIDS_2G = 8
diff --git a/acts/framework/acts/controllers/ap_lib/hostapd_security.py b/acts/framework/acts/controllers/ap_lib/hostapd_security.py
index 9733e99..6e0ecce 100644
--- a/acts/framework/acts/controllers/ap_lib/hostapd_security.py
+++ b/acts/framework/acts/controllers/ap_lib/hostapd_security.py
@@ -13,6 +13,7 @@
# limitations under the License.
import collections
+import string
from acts.controllers.ap_lib import hostapd_constants
@@ -28,13 +29,14 @@
wpa_cipher=hostapd_constants.WPA_DEFAULT_CIPHER,
wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
wpa_group_rekey=hostapd_constants.WPA_GROUP_KEY_ROTATION_TIME,
- wpa_strict_rekey=hostapd_constants.WPA_STRICT_REKEY_DEFAULT):
+ wpa_strict_rekey=hostapd_constants.WPA_STRICT_REKEY_DEFAULT,
+ wep_default_key=hostapd_constants.WEP_DEFAULT_KEY):
"""Gather all of the security settings for WPA-PSK. This could be
expanded later.
Args:
security_mode: Type of security modes.
- Options: wpa, wpa2, wpa/wpa2
+ Options: wep, wpa, wpa2, wpa/wpa2
password: The PSK or passphrase for the security mode.
wpa_cipher: The cipher to be used for wpa.
Options: TKIP, CCMP, TKIP CCMP
@@ -50,51 +52,68 @@
leaves the network or not.
Options: True, False
Default: True
+ wep_default_key: The wep key number to use when transmitting.
"""
self.wpa_cipher = wpa_cipher
self.wpa2_cipher = wpa2_cipher
self.wpa_group_rekey = wpa_group_rekey
self.wpa_strict_rekey = wpa_strict_rekey
+ self.wep_default_key = wep_default_key
if security_mode == hostapd_constants.WPA_STRING:
security_mode = hostapd_constants.WPA1
elif security_mode == hostapd_constants.WPA2_STRING:
security_mode = hostapd_constants.WPA2
elif security_mode == hostapd_constants.WPA_MIXED_STRING:
security_mode = hostapd_constants.MIXED
+ elif security_mode == hostapd_constants.WEP_STRING:
+ security_mode = hostapd_constants.WEP
else:
security_mode = None
self.security_mode = security_mode
if password:
- if len(password) < hostapd_constants.MIN_WPA_PSK_LENGTH or len(
- password) > hostapd_constants.MAX_WPA_PSK_LENGTH:
- raise ValueError(
- 'Password must be a minumum of %s characters and a maximum of %s'
- % (hostapd_constants.MIN_WPA_PSK_LENGTH,
- hostapd_constants.MAX_WPA_PSK_LENGTH))
+ if security_mode == hostapd_constants.WEP:
+ if len(password) in hostapd_constants.WEP_HEX_LENGTH and all(
+ c in string.hexdigits for c in password):
+ self.password = password
+ else:
+ raise ValueError(
+ 'WEP key must be a hex string of %s characters'
+ % hostapd_constants.WEP_HEX_LENGTH)
else:
- self.password = password
+ if len(password) < hostapd_constants.MIN_WPA_PSK_LENGTH or len(
+ password) > hostapd_constants.MAX_WPA_PSK_LENGTH:
+ raise ValueError(
+ 'Password must be a minumum of %s characters and a maximum of %s'
+ % (hostapd_constants.MIN_WPA_PSK_LENGTH,
+ hostapd_constants.MAX_WPA_PSK_LENGTH))
+ else:
+ self.password = password
def generate_dict(self):
"""Returns: an ordered dictionary of settings"""
settings = collections.OrderedDict()
- if self.security_mode:
- settings['wpa'] = self.security_mode
- if len(self.password) == hostapd_constants.MAX_WPA_PSK_LENGTH:
- settings['wpa_psk'] = self.password
+ if self.security_mode != None:
+ if self.security_mode == hostapd_constants.WEP:
+ settings['wep_default_key'] = self.wep_default_key
+ settings['wep_key' + str(self.wep_default_key)] = self.password
else:
- settings['wpa_passphrase'] = self.password
+ settings['wpa'] = self.security_mode
+ if len(self.password) == hostapd_constants.MAX_WPA_PSK_LENGTH:
+ settings['wpa_psk'] = self.password
+ else:
+ settings['wpa_passphrase'] = self.password
- if self.security_mode == hostapd_constants.MIXED:
- settings['wpa_pairwise'] = self.wpa_cipher
- settings['rsn_pairwise'] = self.wpa2_cipher
- elif self.security_mode == hostapd_constants.WPA1:
- settings['wpa_pairwise'] = self.wpa_cipher
- elif self.security_mode == hostapd_constants.WPA2:
- settings['rsn_pairwise'] = self.wpa2_cipher
+ if self.security_mode == hostapd_constants.MIXED:
+ settings['wpa_pairwise'] = self.wpa_cipher
+ settings['rsn_pairwise'] = self.wpa2_cipher
+ elif self.security_mode == hostapd_constants.WPA1:
+ settings['wpa_pairwise'] = self.wpa_cipher
+ elif self.security_mode == hostapd_constants.WPA2:
+ settings['rsn_pairwise'] = self.wpa2_cipher
- if self.wpa_group_rekey:
- settings['wpa_group_rekey'] = self.wpa_group_rekey
- if self.wpa_strict_rekey:
- settings[
- 'wpa_strict_rekey'] = hostapd_constants.WPA_STRICT_REKEY
+ if self.wpa_group_rekey:
+ settings['wpa_group_rekey'] = self.wpa_group_rekey
+ if self.wpa_strict_rekey:
+ settings[
+ 'wpa_strict_rekey'] = hostapd_constants.WPA_STRICT_REKEY
return settings
diff --git a/acts/framework/acts/controllers/attenuator_lib/_tnhelper.py b/acts/framework/acts/controllers/attenuator_lib/_tnhelper.py
index 6dbfd49..d03cf19 100644
--- a/acts/framework/acts/controllers/attenuator_lib/_tnhelper.py
+++ b/acts/framework/acts/controllers/attenuator_lib/_tnhelper.py
@@ -19,6 +19,7 @@
User code shouldn't need to directly access this class.
"""
+import logging
import telnetlib
from acts.controllers import attenuator
@@ -44,7 +45,7 @@
def open(self, host, port=23):
if self._tn:
self._tn.close()
-
+ logging.debug("Attenuator IP = %s" % host)
self._tn = telnetlib.Telnet()
self._tn.open(host, port, 10)
diff --git a/acts/framework/acts/controllers/fuchsia_device.py b/acts/framework/acts/controllers/fuchsia_device.py
index 0ea6a1b..576f3ed 100644
--- a/acts/framework/acts/controllers/fuchsia_device.py
+++ b/acts/framework/acts/controllers/fuchsia_device.py
@@ -35,7 +35,8 @@
from acts import tracelogger
from acts import utils
-from acts.controllers.fuchsia_lib.ble_lib import FuchsiaBleLib
+from acts.controllers.fuchsia_lib.bt.ble_lib import FuchsiaBleLib
+from acts.controllers.fuchsia_lib.bt.bta_lib import FuchsiaBtaLib
ACTS_CONTROLLER_CONFIG_NAME = "FuchsiaDevice"
ACTS_CONTROLLER_REFERENCE_NAME = "fuchsia_devices"
@@ -107,6 +108,11 @@
"""
def __init__(self, ip="", port=80):
+ """
+ Args:
+ ip: string, Ip address of fuchsia device.
+ port: int, Port number of connection
+ """
log_path_base = getattr(logging, "log_path", "/tmp/logs")
self.log_path = os.path.join(log_path_base, "FuchsiaDevice%s" % ip)
self.log = tracelogger.TraceLogger(
@@ -115,24 +121,36 @@
self.ip = ip
self.log = logging.getLogger()
self.port = port
+
self.address = "http://{}:{}".format(ip, self.port)
self.init_address = self.address + "/init"
self.cleanup_address = self.address + "/cleanup"
- # TODO(aniramakri): Come up with better client numbering system
- self.client_id = "FTest" + str(random.randint(0, 1000000))
+ self.print_address = self.address + "/print_clients"
+
+ # TODO(): Come up with better client numbering system
+ self.client_id = "FuchsiaClient" + str(random.randint(0, 1000000))
self.test_counter = 0
+
+ # Grab commands from FuchsiaBleLib
setattr(self, "ble_lib",
FuchsiaBleLib(self.address, self.test_counter, self.client_id))
- self.ble_lib.bleStopBleAdvertising()
+ # Grab commands from FuchsiaBtaLib
+ setattr(self, "bta_lib",
+ FuchsiaBtaLib(self.address, self.test_counter, self.client_id))
+
#Init server
self.init_server_connection()
- # The id of a command is: client_id.test_id
def build_id(self, test_id):
+ """Concatenates client_id and test_id to form a command_id
+
+ Args:
+ test_id: string, unique identifier of test command
+ """
return self.client_id + "." + str(test_id)
def init_server_connection(self):
- ################ #Initialize connection #####################
+ """Initializes HTTP connection with SL4F server."""
self.log.debug("Initialziing server connection")
init_data = json.dumps({
"jsonrpc": "2.0",
@@ -145,6 +163,24 @@
r = requests.get(url=self.init_address, data=init_data)
self.test_counter += 1
+ def print_clients(self):
+ """Gets connected clients from SL4F server"""
+ self.log.debug("Request to print clients")
+ print_id = self.build_id(self.test_counter)
+ print_args = {}
+ print_method = "sl4f.sl4f_print_clients"
+ data = json.dumps({
+ "jsonrpc": "2.0",
+ "id": print_id,
+ "method": print_method,
+ "params": print_args
+ })
+
+ r = requests.get(url=self.print_address, data=data).json()
+ self.test_counter += 1
+
+ return r
+
def clean_up(self):
"""Cleans up the FuchsiaDevice object and releases any resources it
claimed.
@@ -158,10 +194,12 @@
"method": cleanup_method,
"params": cleanup_args
})
- r = requests.get(url=self.cleanup_address, data=data)
+
+ r = requests.get(url=self.cleanup_address, data=data).json()
self.test_counter += 1
- self.log.debug("Cleaned up with status: ", r.json())
+ self.log.debug("Cleaned up with status: ", r)
+ return r
def start_services(self, skip_sl4f=False, skip_setup_wizard=True):
"""Starts long running services on the Fuchsia device.
diff --git a/acts/framework/acts/controllers/fuchsia_lib/base_lib.py b/acts/framework/acts/controllers/fuchsia_lib/base_lib.py
new file mode 100644
index 0000000..5997ebb
--- /dev/null
+++ b/acts/framework/acts/controllers/fuchsia_lib/base_lib.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import json
+import logging
+import math
+import os
+import random
+import re
+import requests
+import socket
+import time
+
+
+class BaseLib():
+ def __init__(self, addr, tc, client_id):
+ self.address = addr
+ self.test_counter = tc
+ self.client_id = client_id
+
+ def build_id(self, test_id):
+ """Concatenates client_id and test_id to form a command_id.
+
+ Args:
+ test_id: string, unique identifier of test command.
+ """
+ return self.client_id + "." + str(test_id)
+
+ def send_command(self, test_id, test_cmd, test_args):
+ """Builds and sends a JSON command to SL4F server.
+
+ Args:
+ test_id: string, unique identifier of test command.
+ test_cmd: string, sl4f method name of command.
+ test_args: dictionary, arguments required to execute test_cmd.
+
+ Returns:
+ Dictionary, Result of sl4f command executed.
+ """
+ test_data = json.dumps({
+ "jsonrpc": "2.0",
+ "id": test_id,
+ "method": test_cmd,
+ "params": test_args
+ })
+ return requests.get(url=self.address, data=test_data).json()
diff --git a/acts/framework/acts/controllers/fuchsia_lib/ble_lib.py b/acts/framework/acts/controllers/fuchsia_lib/ble_lib.py
index ad2f0fd..57f2a47 100644
--- a/acts/framework/acts/controllers/fuchsia_lib/ble_lib.py
+++ b/acts/framework/acts/controllers/fuchsia_lib/ble_lib.py
@@ -32,11 +32,25 @@
self.test_counter = tc
self.client_id = client_id
- # The id of a command is: client_id.test_id
def build_id(self, test_id):
+ """Concatenates client_id and test_id to form a command_id.
+
+ Args:
+ test_id: string, unique identifier of test command.
+ """
return self.client_id + "." + str(test_id)
def send_command(self, test_id, test_cmd, test_args):
+ """Builds and sends a JSON command to SL4F server.
+
+ Args:
+ test_id: string, unique identifier of test command.
+ test_cmd: string, sl4f method name of command.
+ test_args: dictionary, arguments required to execute test_cmd.
+
+ Returns:
+ Dictionary, Result of sl4f command executed.
+ """
test_data = json.dumps({
"jsonrpc": "2.0",
"id": test_id,
@@ -46,8 +60,12 @@
test_res = requests.get(url=self.address, data=test_data).json()
return test_res
- #Formulate args based on FIDL API, with key = fidl api arg name (verbatim)
def bleStopBleAdvertising(self):
+ """BleStopAdvertising command
+
+ Returns:
+ Dictionary, None if success, error string if error.
+ """
test_cmd = "bluetooth.BleStopAdvertise"
test_args = {}
test_id = self.build_id(self.test_counter)
@@ -55,7 +73,16 @@
return self.send_command(test_id, test_cmd, test_args)
- def bleStartBleAdvertising(self, interval, advertising_data):
+ def bleStartBleAdvertising(self, advertising_data, interval):
+ """BleStartAdvertising command
+
+ Args:
+ advertising_data: dictionary, advertising data required for ble advertise.
+ interval: int, Advertising interval (in ms).
+
+ Returns:
+ Dictionary, None if success, error string if error.
+ """
test_cmd = "bluetooth.BleAdvertise"
test_args = {
"advertising_data": advertising_data,
@@ -67,6 +94,16 @@
return self.send_command(test_id, test_cmd, test_args)
def bleStartBleScan(self, scan_time_ms, scan_filter, scan_count):
+ """Starts a BLE scan
+
+ Args:
+ scan_time_ms: int, Amount of time to scan for.
+ scan_filter: dictionary, Device filter for a scan.
+ scan_count: int, Number of devices to scan for before termination.
+
+ Returns:
+ Dictionary, List of devices discovered, error string if error.
+ """
test_cmd = "bluetooth.BleScan"
test_args = {
"scan_time_ms": scan_time_ms,
@@ -77,3 +114,75 @@
self.test_counter += 1
return self.send_command(test_id, test_cmd, test_args)
+
+ def bleConnectToPeripheral(self, id):
+ """Connects to a peripheral specified by id.
+
+ Args:
+ id: string, Peripheral identifier to connect to.
+
+ Returns:
+ Dictionary, List of Service Info if success, error string if error.
+ """
+ test_cmd = "bluetooth.BleConnectPeripheral"
+ test_args = {"identifier": id}
+ test_id = self.build_id(self.test_counter)
+ self.test_counter += 1
+
+ return self.send_command(test_id, test_cmd, test_args)
+
+ def bleDisconnectPeripheral(self, id):
+ """Disconnects from a peripheral specified by id.
+
+ Args:
+ id: string, Peripheral identifier to disconnect from.
+
+ Returns:
+ Dictionary, None if success, error string if error.
+ """
+ test_cmd = "bluetooth.BleDisconnectPeripheral"
+ test_args = {"identifier": id}
+ test_id = self.build_id(self.test_counter)
+ self.test_counter += 1
+
+ return self.send_command(test_id, test_cmd, test_args)
+
+ def bleListServices(self, id):
+ """Lists services of a peripheral specified by id.
+
+ Args:
+ id: string, Peripheral identifier to list services.
+
+ Returns:
+ Dictionary, List of Service Info if success, error string if error.
+ """
+ test_cmd = "bluetooth.BleListServices"
+ test_args = {"identifier": id}
+ test_id = self.build_id(self.test_counter)
+ self.test_counter += 1
+
+ return self.send_command(test_id, test_cmd, test_args)
+
+ def blePublishService(self, id_, primary, type_, service_id):
+ """Publishes services specified by input args
+
+ Args:
+ id: string, Identifier of service.
+ primary: bool, Flag of service.
+ type: string, Canonical 8-4-4-4-12 uuid of service.
+ service_proxy_key: string, Unique identifier to specify where to publish service
+
+ Returns:
+ Dictionary, None if success, error if error.
+ """
+ test_cmd = "bluetooth.BlePublishService"
+ test_args = {
+ "id": id_,
+ "primary": primary,
+ "type": type_,
+ "local_service_id": service_id
+ }
+ test_id = self.build_id(self.test_counter)
+ self.test_counter += 1
+
+ return self.send_command(test_id, test_cmd, test_args)
diff --git a/acts/framework/acts/controllers/fuchsia_lib/bt/ble_lib.py b/acts/framework/acts/controllers/fuchsia_lib/bt/ble_lib.py
new file mode 100644
index 0000000..ae83dc3
--- /dev/null
+++ b/acts/framework/acts/controllers/fuchsia_lib/bt/ble_lib.py
@@ -0,0 +1,161 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import json
+import logging
+import math
+import os
+import random
+import re
+import requests
+import socket
+import time
+
+from acts.controllers.fuchsia_lib.base_lib import BaseLib
+
+
+class FuchsiaBleLib(BaseLib):
+ def __init__(self, addr, tc, client_id):
+ self.address = addr
+ self.test_counter = tc
+ self.client_id = client_id
+
+ def bleStopBleAdvertising(self):
+ """BleStopAdvertising command
+
+ Returns:
+ Dictionary, None if success, error string if error.
+ """
+ test_cmd = "ble_advertise_facade.BleStopAdvertise"
+ test_args = {}
+ test_id = self.build_id(self.test_counter)
+ self.test_counter += 1
+
+ return self.send_command(test_id, test_cmd, test_args)
+
+ def bleStartBleAdvertising(self, advertising_data, interval):
+ """BleStartAdvertising command
+
+ Args:
+ advertising_data: dictionary, advertising data required for ble advertise.
+ interval: int, Advertising interval (in ms).
+
+ Returns:
+ Dictionary, None if success, error string if error.
+ """
+ test_cmd = "ble_advertise_facade.BleAdvertise"
+ test_args = {
+ "advertising_data": advertising_data,
+ "interval_ms": interval
+ }
+ test_id = self.build_id(self.test_counter)
+ self.test_counter += 1
+ return self.send_command(test_id, test_cmd, test_args)
+
+ def bleStartBleScan(self, scan_time_ms, scan_filter, scan_count):
+ """Starts a BLE scan
+
+ Args:
+ scan_time_ms: int, Amount of time to scan for.
+ scan_filter: dictionary, Device filter for a scan.
+ scan_count: int, Number of devices to scan for before termination.
+
+ Returns:
+ Dictionary, List of devices discovered, error string if error.
+ """
+ test_cmd = "bluetooth.BleScan"
+ test_args = {
+ "scan_time_ms": scan_time_ms,
+ "filter": scan_filter,
+ "scan_count": scan_count
+ }
+ test_id = self.build_id(self.test_counter)
+ self.test_counter += 1
+
+ return self.send_command(test_id, test_cmd, test_args)
+
+ def bleConnectToPeripheral(self, id):
+ """Connects to a peripheral specified by id.
+
+ Args:
+ id: string, Peripheral identifier to connect to.
+
+ Returns:
+ Dictionary, List of Service Info if success, error string if error.
+ """
+ test_cmd = "bluetooth.BleConnectPeripheral"
+ test_args = {"identifier": id}
+ test_id = self.build_id(self.test_counter)
+ self.test_counter += 1
+
+ return self.send_command(test_id, test_cmd, test_args)
+
+ def bleDisconnectPeripheral(self, id):
+ """Disconnects from a peripheral specified by id.
+
+ Args:
+ id: string, Peripheral identifier to disconnect from.
+
+ Returns:
+ Dictionary, None if success, error string if error.
+ """
+ test_cmd = "bluetooth.BleDisconnectPeripheral"
+ test_args = {"identifier": id}
+ test_id = self.build_id(self.test_counter)
+ self.test_counter += 1
+
+ return self.send_command(test_id, test_cmd, test_args)
+
+ def bleListServices(self, id):
+ """Lists services of a peripheral specified by id.
+
+ Args:
+ id: string, Peripheral identifier to list services.
+
+ Returns:
+ Dictionary, List of Service Info if success, error string if error.
+ """
+ test_cmd = "bluetooth.BleListServices"
+ test_args = {"identifier": id}
+ test_id = self.build_id(self.test_counter)
+ self.test_counter += 1
+
+ return self.send_command(test_id, test_cmd, test_args)
+
+ def blePublishService(self, id_, primary, type_, service_id):
+ """Publishes services specified by input args
+
+ Args:
+ id: string, Identifier of service.
+ primary: bool, Flag of service.
+ type: string, Canonical 8-4-4-4-12 uuid of service.
+ service_proxy_key: string, Unique identifier to specify where to publish service
+
+ Returns:
+ Dictionary, None if success, error if error.
+ """
+ test_cmd = "bluetooth.BlePublishService"
+ test_args = {
+ "id": id_,
+ "primary": primary,
+ "type": type_,
+ "local_service_id": service_id
+ }
+ test_id = self.build_id(self.test_counter)
+ self.test_counter += 1
+
+ return self.send_command(test_id, test_cmd, test_args)
diff --git a/acts/framework/acts/controllers/fuchsia_lib/bt/bta_lib.py b/acts/framework/acts/controllers/fuchsia_lib/bt/bta_lib.py
new file mode 100644
index 0000000..8737183
--- /dev/null
+++ b/acts/framework/acts/controllers/fuchsia_lib/bt/bta_lib.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import json
+import logging
+import math
+import os
+import random
+import re
+import requests
+import socket
+import time
+
+from acts.controllers.fuchsia_lib.base_lib import BaseLib
+
+# Placeholder for Bluetooth adapter commands
+
+class FuchsiaBtaLib(BaseLib):
+ def __init__(self, addr, tc, client_id):
+ self.address = addr
+ self.test_counter = tc
+ self.client_id = client_id
diff --git a/acts/framework/acts/controllers/packet_capture.py b/acts/framework/acts/controllers/packet_capture.py
new file mode 100755
index 0000000..0528aa2
--- /dev/null
+++ b/acts/framework/acts/controllers/packet_capture.py
@@ -0,0 +1,348 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - Google, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from acts import logger
+from acts.controllers.ap_lib.hostapd_constants import AP_DEFAULT_CHANNEL_2G
+from acts.controllers.ap_lib.hostapd_constants import AP_DEFAULT_CHANNEL_5G
+from acts.controllers.utils_lib.ssh import connection
+from acts.controllers.utils_lib.ssh import settings
+
+import os
+import threading
+import time
+
+ACTS_CONTROLLER_CONFIG_NAME = 'PacketCapture'
+ACTS_CONTROLLER_REFERENCE_NAME = 'packet_capture'
+BSS = 'BSS'
+BSSID = 'BSSID'
+FREQ = 'freq'
+FREQUENCY = 'frequency'
+LEVEL = 'level'
+MON_2G = 'mon0'
+MON_5G = 'mon1'
+BAND_IFACE = {'2G' : MON_2G, '5G': MON_5G}
+SCAN_IFACE = 'wlan2'
+SCAN_TIMEOUT = 120
+SEP = ':'
+SIGNAL = 'signal'
+SSID = 'SSID'
+
+
+def create(configs):
+ return [PacketCapture(c) for c in configs]
+
+def destroy(pcaps):
+ for pcap in pcaps:
+ pcap.close()
+
+def get_info(pcaps):
+ return [pcap.ssh_settings.hostname for pcap in pcaps]
+
+
+class PcapProperties(object):
+ """Class to maintain packet capture properties after starting tcpdump.
+
+ Attributes:
+ pid: proccess id of tcpdump
+ pcap_dir: tmp dir location where pcap files are saved
+ pcap_file: pcap file name
+ pcap_thread: thread used to push files to logpath
+ """
+ def __init__(self, pid, pcap_dir, pcap_file, pcap_thread):
+ """Initialize object."""
+ self.pid = pid
+ self.pcap_dir = pcap_dir
+ self.pcap_file = pcap_file
+ self.pcap_thread = pcap_thread
+
+
+class PacketCaptureError(Exception):
+ """Error related to Packet capture."""
+
+
+class PacketCapture(object):
+ """Class representing packet capturer.
+
+ An instance of this class creates and configures two interfaces for monitor
+ mode; 'mon0' for 2G and 'mon1' for 5G and one interface for scanning for
+ wifi networks; 'wlan2' which is a dual band interface.
+
+ Attributes:
+ pcap: dict that specifies packet capture properties for a band.
+ tmp_dirs: list of tmp directories created for pcap files.
+ """
+ def __init__(self, configs):
+ """Initialize objects.
+
+ Args:
+ configs: config for the packet capture.
+ """
+ self.ssh_settings = settings.from_config(configs['ssh_config'])
+ self.ssh = connection.SshConnection(self.ssh_settings)
+ self.log = logger.create_logger(lambda msg: '[%s|%s] %s' % (
+ ACTS_CONTROLLER_CONFIG_NAME, self.ssh_settings.hostname, msg))
+
+ self._create_interface(MON_2G, 'monitor')
+ self._create_interface(MON_5G, 'monitor')
+ self._create_interface(SCAN_IFACE, 'managed')
+
+ self.pcap_properties = dict()
+ self._pcap_stop_lock = threading.Lock()
+ self.tmp_dirs = []
+
+ def _create_interface(self, iface, mode):
+ """Create interface of monitor/managed mode.
+
+ Create mon0/mon1 for 2G/5G monitor mode and wlan2 for managed mode.
+ """
+ self.ssh.run('iw dev %s del' % iface, ignore_status=True)
+ self.ssh.run('iw phy%s interface add %s type %s'
+ % (iface[-1], iface, mode), ignore_status=True)
+ self.ssh.run('ip link set %s up' % iface, ignore_status=True)
+ result = self.ssh.run('iw dev %s info' % iface, ignore_status=True)
+ if result.stderr or iface not in result.stdout:
+ raise PacketCaptureError('Failed to configure interface %s' % iface)
+
+ def _cleanup_interface(self, iface):
+ """Clean up monitor mode interfaces."""
+ self.ssh.run('iw dev %s del' % iface, ignore_status=True)
+ result = self.ssh.run('iw dev %s info' % iface, ignore_status=True)
+ if not result.stderr or 'No such device' not in result.stderr:
+ raise PacketCaptureError('Failed to cleanup monitor mode for %s'
+ % iface)
+
+ def _parse_scan_results(self, scan_result):
+ """Parses the scan dump output and returns list of dictionaries.
+
+ Args:
+ scan_result: scan dump output from scan on mon interface.
+
+ Returns:
+ Dictionary of found network in the scan.
+ The attributes returned are
+ a.) SSID - SSID of the network.
+ b.) LEVEL - signal level.
+ c.) FREQUENCY - WiFi band the network is on.
+ d.) BSSID - BSSID of the network.
+ """
+ scan_networks = []
+ network = {}
+ for line in scan_result.splitlines():
+ if SEP not in line:
+ continue
+ if BSS in line:
+ network[BSSID] = line.split('(')[0].split()[-1]
+ field, value = line.lstrip().rstrip().split(SEP)[0:2]
+ value = value.lstrip()
+ if SIGNAL in line:
+ network[LEVEL] = int(float(value.split()[0]))
+ elif FREQ in line:
+ network[FREQUENCY] = int(value)
+ elif SSID in line:
+ network[SSID] = value
+ scan_networks.append(network)
+ network = {}
+ return scan_networks
+
+ def _check_if_tcpdump_started(self, pcap_log):
+ """Check if tcpdump started.
+
+ This method ensures that tcpdump has started successfully.
+ We look for 'listening on' from the stdout indicating that tcpdump
+ is started.
+
+ Args:
+ pcap_log: log file that has redirected output of starting tcpdump.
+
+ Returns:
+ True/False if tcpdump is started or not.
+ """
+ curr_time = time.time()
+ timeout = 3
+ find_str = 'listening on'
+ while time.time() < curr_time + timeout:
+ result = self.ssh.run('grep "%s" %s' % (find_str, pcap_log),
+ ignore_status=True)
+ if result.stdout and find_str in result.stdout:
+ return True
+ time.sleep(1)
+ return False
+
+ def _pull_pcap(self, band, pcap_file, log_path):
+ """Pulls pcap files to test log path from onhub.
+
+ Called by start_packet_capture(). This method moves a pcap file to log
+ path once it has reached 50MB.
+
+ Args:
+ index: param that indicates if the tcpdump is stopped.
+ pcap_file: pcap file to move.
+ log_path: log path to move the pcap file to.
+ """
+ curr_no = 0
+ while True:
+ next_no = curr_no + 1
+ curr_fno = '%02i' % curr_no
+ next_fno = '%02i' % next_no
+ curr_file = '%s%s' % (pcap_file, curr_fno)
+ next_file = '%s%s' % (pcap_file, next_fno)
+
+ result = self.ssh.run('ls %s' % next_file, ignore_status=True)
+ if not result.stderr and next_file in result.stdout:
+ self.ssh.pull_file(log_path, curr_file)
+ self.ssh.run('rm -rf %s' % curr_file, ignore_status=True)
+ curr_no += 1
+ continue
+
+ with self._pcap_stop_lock:
+ if band not in self.pcap_properties:
+ self.ssh.pull_file(log_path, curr_file)
+ break
+ time.sleep(2) # wait before looking for file again
+
+ def get_wifi_scan_results(self):
+ """Starts a wifi scan on wlan2 interface.
+
+ Returns:
+ List of dictionaries each representing a found network.
+ """
+ result = self.ssh.run('iw dev %s scan' % SCAN_IFACE)
+ if result.stderr:
+ raise PacketCaptureError('Failed to get scan dump')
+ if not result.stdout:
+ return []
+ return self._parse_scan_results(result.stdout)
+
+ def start_scan_and_find_network(self, ssid):
+ """Start a wifi scan on wlan2 interface and find network.
+
+ Args:
+ ssid: SSID of the network.
+
+ Returns:
+ True/False if the network if found or not.
+ """
+ curr_time = time.time()
+ while time.time() < curr_time + SCAN_TIMEOUT:
+ found_networks = self.get_wifi_scan_results()
+ for network in found_networks:
+ if network[SSID] == ssid:
+ return True
+ time.sleep(3) # sleep before next scan
+ return False
+
+ def configure_monitor_mode(self, band, channel):
+ """Configure monitor mode.
+
+ Args:
+ band: band to configure monitor mode for.
+ channel: channel to set for the interface.
+
+ Returns:
+ True if configure successful.
+ False if not successful.
+ """
+ band = band.upper()
+ if band not in BAND_IFACE:
+ self.log.error('Invalid band. Must be 2g/2G or 5g/5G')
+ return False
+
+ iface = BAND_IFACE[band]
+ self.ssh.run('iw dev %s set channel %s' %
+ (iface, channel), ignore_status=True)
+ result = self.ssh.run('iw dev %s info' % iface, ignore_status=True)
+ if result.stderr or 'channel %s' % channel not in result.stdout:
+ self.log.error("Failed to configure monitor mode for %s" % band)
+ return False
+ return True
+
+ def start_packet_capture(self, band, log_path, pcap_file):
+ """Start packet capture for band.
+
+ band = 2G starts tcpdump on 'mon0' interface.
+ band = 5G starts tcpdump on 'mon1' interface.
+
+ This method splits the pcap file every 50MB for 100 files.
+ Since, the size of the pcap file could become large, each split file
+ is moved to log_path once a new file is generated. This ensures that
+ there is no crash on the onhub router due to lack of space.
+
+ Args:
+ band: '2g' or '2G' and '5g' or '5G'.
+ log_path: test log path to save the pcap file.
+ pcap_file: name of the pcap file.
+
+ Returns:
+ pid: process id of the tcpdump.
+ """
+ band = band.upper()
+ if band not in BAND_IFACE.keys() or band in self.pcap_properties:
+ self.log.error("Invalid band or packet capture already running")
+ return None
+
+ pcap_dir = self.ssh.run('mktemp -d', ignore_status=True).stdout.rstrip()
+ self.tmp_dirs.append(pcap_dir)
+ pcap_file = os.path.join(pcap_dir, "%s_%s.pcap" % (pcap_file, band))
+ pcap_log = os.path.join(pcap_dir, "%s.log" % pcap_file)
+
+ cmd = 'tcpdump -i %s -W 100 -C 50 -w %s > %s 2>&1 & echo $!' % (
+ BAND_IFACE[band], pcap_file, pcap_log)
+ result = self.ssh.run(cmd, ignore_status=True)
+ if not self._check_if_tcpdump_started(pcap_log):
+ self.log.error("Failed to start packet capture")
+ return None
+
+ pcap_thread = threading.Thread(target=self._pull_pcap,
+ args=(band, pcap_file, log_path))
+ pcap_thread.start()
+
+ pid = int(result.stdout)
+ self.pcap_properties[band] = PcapProperties(
+ pid, pcap_dir, pcap_file, pcap_thread)
+ return pid
+
+ def stop_packet_capture(self, pid):
+ """Stop the packet capture.
+
+ Args:
+ pid: process id of tcpdump to kill.
+ """
+ for key, val in self.pcap_properties.items():
+ if val.pid == pid:
+ break
+ else:
+ self.log.error("Failed to stop tcpdump. Invalid PID %s" % pid)
+ return
+
+ pcap_dir = val.pcap_dir
+ pcap_thread = val.pcap_thread
+ self.ssh.run('kill %s' % pid, ignore_status=True)
+ with self._pcap_stop_lock:
+ del self.pcap_properties[key]
+ pcap_thread.join()
+ self.ssh.run('rm -rf %s' % pcap_dir, ignore_status=True)
+ self.tmp_dirs.remove(pcap_dir)
+
+ def close(self):
+ """Cleanup.
+
+ Cleans up all the monitor mode interfaces and closes ssh connections.
+ """
+ self._cleanup_interface(MON_2G)
+ self._cleanup_interface(MON_5G)
+ for tmp_dir in self.tmp_dirs:
+ self.ssh.run('rm -rf %s' % tmp_dir, ignore_status=True)
+ self.ssh.close()
diff --git a/acts/framework/acts/controllers/relay_lib/relay_rig.py b/acts/framework/acts/controllers/relay_lib/relay_rig.py
index 463cc92..eccf465 100644
--- a/acts/framework/acts/controllers/relay_lib/relay_rig.py
+++ b/acts/framework/acts/controllers/relay_lib/relay_rig.py
@@ -24,6 +24,7 @@
from acts.controllers.relay_lib.logitech_headset import LogitechAudioReceiver
from acts.controllers.relay_lib.sony_xb2_speaker import SonyXB2Speaker
from acts.controllers.relay_lib.sony_xb20_speaker import SonyXB20Speaker
+from acts.controllers.relay_lib.tao_tronics_headset import TaoTronicsCarkit
from acts.controllers.relay_lib.ak_xb10_speaker import AkXB10Speaker
from acts.controllers.relay_lib.dongles import SingleButtonDongle
from acts.controllers.relay_lib.dongles import ThreeButtonDongle
@@ -66,6 +67,7 @@
"LogitechAudioReceiver" :lambda x, rig: LogitechAudioReceiver(x, rig),
'SonyXB2Speaker': lambda x, rig: SonyXB2Speaker(x, rig),
'SonyXB20Speaker': lambda x, rig: SonyXB20Speaker(x, rig),
+ 'TaoTronicsCarkit': lambda x, rig: TaoTronicsCarkit(x, rig),
'AkXB10Speaker': lambda x, rig: AkXB10Speaker(x, rig),
'SingleButtonDongle': lambda x, rig: SingleButtonDongle(x, rig),
'ThreeButtonDongle': lambda x, rig: ThreeButtonDongle(x, rig),
diff --git a/acts/framework/acts/controllers/relay_lib/sain_smart_8_channel_usb_relay_board.py b/acts/framework/acts/controllers/relay_lib/sain_smart_8_channel_usb_relay_board.py
index 7770672..7b3eb13 100644
--- a/acts/framework/acts/controllers/relay_lib/sain_smart_8_channel_usb_relay_board.py
+++ b/acts/framework/acts/controllers/relay_lib/sain_smart_8_channel_usb_relay_board.py
@@ -25,7 +25,7 @@
Additional setup steps:
Change out pip/pip3 and python2.7/3.4 based on python version
1. pip install pylibftdi
-2. pip install usblib1
+2. pip install libusb1
3. sudo apt-get install libftdi-dev
4. Make this file /etc/udev/rules.d/99-libftdi.rules with root and add the lines below:
SUBSYSTEMS=="usb", ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6001", GROUP="dialout", MODE="0660"
diff --git a/acts/framework/acts/controllers/relay_lib/tao_tronics_headset.py b/acts/framework/acts/controllers/relay_lib/tao_tronics_headset.py
new file mode 100644
index 0000000..c802c50
--- /dev/null
+++ b/acts/framework/acts/controllers/relay_lib/tao_tronics_headset.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import enum
+import time
+from acts.controllers.relay_lib.errors import RelayConfigError
+from acts.controllers.relay_lib.generic_relay_device import GenericRelayDevice
+from acts.controllers.relay_lib.helpers import validate_key
+
+WAIT_TIME = 0.05
+MISSING_RELAY_MSG = 'Relay config for Tao tronics carkit "%s" missing relay "%s".'
+
+
+class Buttons(enum.Enum):
+ NEXT = 'Next'
+ PREVIOUS = "Previous"
+ PLAY_PAUSE = 'Play_pause'
+ VOLUME_UP = "Volume_up"
+ VOLUME_DOWN = "Volume_down"
+
+
+class TaoTronicsCarkit(GenericRelayDevice):
+
+ def __init__(self, config, relay_rig):
+ GenericRelayDevice.__init__(self, config, relay_rig)
+ self.mac_address = validate_key('mac_address', config, str,
+ 'TaoTronicsCarkit')
+ for button in Buttons:
+ self.ensure_config_contains_relay(button.value)
+
+ def setup(self):
+ GenericRelayDevice.setup(self)
+
+ def ensure_config_contains_relay(self, relay_name):
+ """
+ Throws an error if the relay does not exist.
+
+ Args:
+ relay_name:relay_name to be checked.
+ """
+ if relay_name not in self.relays:
+ raise RelayConfigError(MISSING_RELAY_MSG % (self.name, relay_name))
+
+ def press_play_pause(self):
+ """
+ Sets relay to
+ Play state : if there is no A2DP_streaming.
+ Pause state : if there is A2DP_streaming.
+ """
+ self.relays[Buttons.PLAY_PAUSE.value].set_no_for(WAIT_TIME)
+
+ def press_next(self):
+ """Skips to next song from relay_device."""
+ self.relays[Buttons.NEXT.value].set_no_for(WAIT_TIME)
+
+ def press_previous(self):
+ """Skips to previous song from relay_device."""
+ self.relays[Buttons.PREVIOUS.value].set_no_for(WAIT_TIME)
+
+ def press_volume_up(self):
+ """Increases volume from relay_device."""
+ self.relays[Buttons.VOLUME_UP.value].set_no_for(WAIT_TIME)
+
+ def press_volume_down(self):
+ """Decreases volume from relay_device."""
+ self.relays[Buttons.VOLUME_DOWN.value].set_no_for(WAIT_TIME)
+
+ def press_initiate_call(self):
+ """Initiate call from relay device."""
+ for i in range(0, 2):
+ self.press(Buttons.PLAY_PAUSE.value)
+ time.sleep(0.2)
+ return True
+
+ def press_accept_call(self):
+ """Accepts call from relay device."""
+ self.press(Buttons.PLAY_PAUSE.value)
+ return True
diff --git a/acts/framework/acts/controllers/sl4a_lib/rpc_client.py b/acts/framework/acts/controllers/sl4a_lib/rpc_client.py
index 72351c4..55633a2 100644
--- a/acts/framework/acts/controllers/sl4a_lib/rpc_client.py
+++ b/acts/framework/acts/controllers/sl4a_lib/rpc_client.py
@@ -14,13 +14,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import json
+import socket
import threading
-
import time
from concurrent import futures
-from acts import logger
from acts import error
+from acts import logger
+
+# The default timeout value when no timeout is set.
SOCKET_TIMEOUT = 60
# The Session UID when a UID has not been received yet.
@@ -50,10 +52,14 @@
MISMATCHED_API_ID = 'Mismatched API id.'
-class MissingSl4AError(Sl4aException):
+class Sl4aNotInstalledError(Sl4aException):
"""An error raised when an Sl4aClient is created without SL4A installed."""
+class Sl4aRpcTimeoutError(Sl4aException):
+ """An error raised when an SL4A RPC has timed out."""
+
+
class RpcClient(object):
"""An RPC client capable of processing multiple RPCs concurrently.
@@ -169,8 +175,8 @@
self._working_connections.append(client)
return client
- client_count = (
- len(self._free_connections) + len(self._working_connections))
+ client_count = (len(self._free_connections) +
+ len(self._working_connections))
if client_count < self.max_connections:
with self._lock:
client_count = (len(self._free_connections) +
@@ -215,7 +221,7 @@
"""
connection = self._get_free_connection()
ticket = connection.get_new_ticket()
-
+ timed_out = False
if timeout:
connection.set_timeout(timeout)
data = {'id': ticket, 'method': method, 'params': args}
@@ -250,10 +256,23 @@
self._log.warning('The connection was killed during cleanup:')
self._log.warning(e)
raise Sl4aConnectionError(e)
+ except socket.timeout as err:
+ # If a socket connection has timed out, the socket can no longer be
+ # used. Close it out and remove the socket from the connection pool.
+ timed_out = True
+ self._log.warning('RPC "%s" (id: %s) timed out after %s seconds.',
+ method, ticket, timeout or SOCKET_TIMEOUT)
+ self._log.debug(
+ 'Closing timed out connection over %s' % connection.ports)
+ connection.close()
+ self._working_connections.remove(connection)
+ # Re-raise the error as an SL4A Error so end users can process it.
+ raise Sl4aRpcTimeoutError(err)
finally:
- if timeout:
- connection.set_timeout(SOCKET_TIMEOUT)
- self._release_working_connection(connection)
+ if not timed_out:
+ if timeout:
+ connection.set_timeout(SOCKET_TIMEOUT)
+ self._release_working_connection(connection)
result = json.loads(str(response, encoding='utf8'))
if result['error']:
diff --git a/acts/framework/acts/controllers/sl4a_lib/sl4a_manager.py b/acts/framework/acts/controllers/sl4a_lib/sl4a_manager.py
index 6c9c71f..99b5216 100644
--- a/acts/framework/acts/controllers/sl4a_lib/sl4a_manager.py
+++ b/acts/framework/acts/controllers/sl4a_lib/sl4a_manager.py
@@ -216,7 +216,7 @@
if not self._started:
self._started = True
if not self.is_sl4a_installed():
- raise rpc_client.MissingSl4AError(
+ raise rpc_client.Sl4aNotInstalledError(
'SL4A is not installed on device %s' % self.adb.serial)
if self.adb.shell(
'ps | grep "S com.googlecode.android_scripting"'):
diff --git a/acts/framework/acts/controllers/utils_lib/ssh/connection.py b/acts/framework/acts/controllers/utils_lib/ssh/connection.py
index b969e00..7426a8c 100644
--- a/acts/framework/acts/controllers/utils_lib/ssh/connection.py
+++ b/acts/framework/acts/controllers/utils_lib/ssh/connection.py
@@ -332,7 +332,7 @@
Returns:
the created tunnel process.
"""
- if local_port is None:
+ if not local_port:
local_port = host_utils.get_available_host_port()
else:
for tunnel in self._tunnels:
@@ -396,6 +396,16 @@
user_host = self._formatter.format_host_name(self._settings)
job.run('scp %s %s:%s' % (local_path, user_host, remote_path))
+ def pull_file(self, local_path, remote_path):
+ """Send a file from remote host to local host
+
+ Args:
+ local_path: string path of file to recv on local host
+ remote_path: string path to copy file from on remote host.
+ """
+ user_host = self._formatter.format_host_name(self._settings)
+ job.run('scp %s:%s %s' % (user_host, remote_path, local_path))
+
def find_free_port(self, interface_name='localhost'):
"""Find a unused port on the remote host.
diff --git a/acts/framework/acts/error.py b/acts/framework/acts/error.py
index f11e337..ce50aba 100644
--- a/acts/framework/acts/error.py
+++ b/acts/framework/acts/error.py
@@ -10,7 +10,9 @@
class_name = self.__class__.__name__
self.message = self.__class__.__doc__
self.error_code = getattr(ActsErrorCode, class_name)
- self.extra = args
+ self.extra = kwargs
+ if len(args) > 0:
+ self.extra['details'] = args
def json_str(self):
"""Converts this error to a string in json format.
@@ -46,7 +48,8 @@
Sl4aApiError = 1002
Sl4aConnectionError = 1003
Sl4aProtocolError = 1004
- MissingSl4AError = 1005
+ Sl4aNotInstalledError = 1005
+ Sl4aRpcTimeoutError = 1006
# Util Errors 4000-9999
diff --git a/acts/framework/acts/event/decorators.py b/acts/framework/acts/event/decorators.py
index 0cefb7d..8fe7c7e 100644
--- a/acts/framework/acts/event/decorators.py
+++ b/acts/framework/acts/event/decorators.py
@@ -15,23 +15,56 @@
# limitations under the License.
from acts.event.subscription_handle import InstanceSubscriptionHandle
from acts.event.subscription_handle import StaticSubscriptionHandle
+from acts.event import subscription_bundle
def subscribe_static(event_type, event_filter=None, order=0):
+ """A decorator that subscribes a static or module-level function.
+
+ This function must be registered manually.
+ """
class InnerSubscriptionHandle(StaticSubscriptionHandle):
def __init__(self, func):
super().__init__(event_type, func,
- _event_filter=event_filter,
+ event_filter=event_filter,
order=order)
return InnerSubscriptionHandle
def subscribe(event_type, event_filter=None, order=0):
+ """A decorator that subscribes an instance method."""
class InnerSubscriptionHandle(InstanceSubscriptionHandle):
def __init__(self, func):
- super().__init__(event_type, lambda event: func(self._owner, event),
- _event_filter=event_filter,
+ super().__init__(event_type, func,
+ event_filter=event_filter,
order=order)
return InnerSubscriptionHandle
+
+
+def register_static_subscriptions(decorated):
+ """Registers all static subscriptions in decorated's attributes.
+
+ Args:
+ decorated: The object being decorated
+
+ Returns:
+ The decorated.
+ """
+ subscription_bundle.create_from_static(decorated).register()
+
+ return decorated
+
+
+def register_instance_subscriptions(obj):
+ """A decorator that subscribes all instance subscriptions after object init.
+ """
+ old_init = obj.__init__
+
+ def init_replacement(self, *args, **kwargs):
+ old_init(self, *args, **kwargs)
+ subscription_bundle.create_from_instance(self).register()
+
+ obj.__init__ = init_replacement
+ return obj
diff --git a/acts/framework/acts/event/event.py b/acts/framework/acts/event/event.py
index 8e7c2bf..614aa6b 100644
--- a/acts/framework/acts/event/event.py
+++ b/acts/framework/acts/event/event.py
@@ -23,15 +23,15 @@
class TestEvent(Event):
"""The base class for test-related events."""
- def __init__(self, testbed):
- self.testbed = testbed
+ def __init__(self):
+ pass
class TestCaseEvent(TestEvent):
"""The base class for test-case-related events."""
- def __init__(self, testbed, test_class, test_case):
- super().__init__(testbed)
+ def __init__(self, test_class, test_case):
+ super().__init__()
self.test_class = test_class
self.test_case = test_case
@@ -41,18 +41,26 @@
@property
def test_class_name(self):
- return self.test_class.__name__
+ return self.test_class.__class__.__name__
class TestCaseSignalEvent(TestEvent):
"""The base class for test-case-signal-related events."""
- def __init__(self, testbed, test_class, test_case, test_signal):
- super().__init__(testbed)
+ def __init__(self, test_class, test_case, test_signal):
+ super().__init__()
self.test_class = test_class
self.test_case = test_case
self.test_signal = test_signal
+ @property
+ def test_case_name(self):
+ return self.test_case.__name__
+
+ @property
+ def test_class_name(self):
+ return self.test_class.__class__.__name__
+
class TestCaseBeginEvent(TestCaseEvent):
"""The event posted when a test case has begun."""
@@ -77,8 +85,8 @@
class TestClassEvent(TestEvent):
"""The base class for test-class-related events"""
- def __init__(self, testbed, test_class):
- super().__init__(testbed)
+ def __init__(self, test_class):
+ super().__init__()
self.test_class = test_class
@@ -89,6 +97,6 @@
class TestClassEndEvent(TestClassEvent):
"""The event posted when a test class has finished testing."""
- def __init__(self, testbed, test_class, summary):
- super().__init__(testbed, test_class)
- self.summary = summary
+ def __init__(self, test_class, result):
+ super().__init__(test_class)
+ self.result = result
\ No newline at end of file
diff --git a/acts/framework/acts/event/event_bus.py b/acts/framework/acts/event/event_bus.py
index 2bcb515..0bab180 100644
--- a/acts/framework/acts/event/event_bus.py
+++ b/acts/framework/acts/event/event_bus.py
@@ -19,6 +19,7 @@
from threading import RLock
from acts.event.event_subscription import EventSubscription
+from acts.event.subscription_handle import SubscriptionHandle
class _EventBus(object):
@@ -126,7 +127,10 @@
Args:
registration_id: the Subscription or registration_id to unsubscribe.
"""
- if type(registration_id) is EventSubscription:
+ if type(registration_id) is SubscriptionHandle:
+ subscription = registration_id.subscription
+ registration_id = id(registration_id.subscription)
+ elif type(registration_id) is EventSubscription:
subscription = registration_id
registration_id = id(registration_id)
elif registration_id in self._registration_id_map.keys():
diff --git a/acts/framework/acts/event/event_subscription.py b/acts/framework/acts/event/event_subscription.py
index e38f129..474f1dc 100644
--- a/acts/framework/acts/event/event_subscription.py
+++ b/acts/framework/acts/event/event_subscription.py
@@ -13,7 +13,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-import threading
class EventSubscription(object):
@@ -24,6 +23,7 @@
_func: The subscribed function.
_event_filter: A lambda that returns True if an event should be passed
to the subscribed function.
+ order: The order value in which this subscription should be called.
"""
def __init__(self, event_type, func, event_filter=None, order=0):
self._event_type = event_type
diff --git a/acts/framework/acts/event/subscription_bundle.py b/acts/framework/acts/event/subscription_bundle.py
index e59a9ac..0225be9 100644
--- a/acts/framework/acts/event/subscription_bundle.py
+++ b/acts/framework/acts/event/subscription_bundle.py
@@ -110,19 +110,7 @@
Returns:
An unregistered SubscriptionBundle.
"""
- return _create_from_object(obj, StaticSubscriptionHandle)
-
-
-def create_from_object(obj):
- """Generates a SubscriptionBundle from @subscribe.* functions on obj.
-
- Args:
- obj: The object that contains @subscribe.* functions.
-
- Returns:
- An unregistered SubscriptionBundle.
- """
- return _create_from_object(obj, SubscriptionHandle)
+ return _create_from_object(obj, obj, StaticSubscriptionHandle)
def create_from_instance(instance):
@@ -134,14 +122,21 @@
Returns:
An unregistered SubscriptionBundle.
"""
- return _create_from_object(instance, InstanceSubscriptionHandle)
+ return _create_from_object(instance, instance.__class__,
+ InstanceSubscriptionHandle)
-def _create_from_object(obj, subscription_handle_type):
+def _create_from_object(obj, obj_to_search, subscription_handle_type):
"""Generates a SubscriptionBundle from an object's SubscriptionHandles.
+ Note that instance variables do not have the class's functions as direct
+ attributes. The attributes are resolved from the type of the object. Here,
+ we need to search through the instance's class to find the correct types,
+ and subscribe the instance-specific subscriptions.
+
Args:
- obj: The object that contains SubscriptionHandles
+ obj: The object that contains SubscriptionHandles.
+ obj_to_search: The class to search for SubscriptionHandles from.
subscription_handle_type: The type of the SubscriptionHandles to
capture.
@@ -149,9 +144,10 @@
An unregistered SubscriptionBundle.
"""
bundle = SubscriptionBundle()
- for attr_name in dir(obj):
- attr = getattr(obj, attr_name)
- if isinstance(attr, subscription_handle_type):
- # Adds self to the list of arguments
- bundle.add_subscription(attr.subscription)
+ for attr_name, attr_value in obj_to_search.__dict__.items():
+ if isinstance(attr_value, subscription_handle_type):
+ bundle.add_subscription(getattr(obj, attr_name).subscription)
+ if isinstance(attr_value, staticmethod):
+ if isinstance(getattr(obj, attr_name), subscription_handle_type):
+ bundle.add_subscription(getattr(obj, attr_name).subscription)
return bundle
diff --git a/acts/framework/acts/event/subscription_handle.py b/acts/framework/acts/event/subscription_handle.py
index 1f61201..bc74c6d 100644
--- a/acts/framework/acts/event/subscription_handle.py
+++ b/acts/framework/acts/event/subscription_handle.py
@@ -18,10 +18,11 @@
class SubscriptionHandle(object):
"""The object created by a method decorated with an event decorator."""
- def __init__(self, event_type, func, _event_filter=None, order=0):
+
+ def __init__(self, event_type, func, event_filter=None, order=0):
self._event_type = event_type
self._func = func
- self._event_filter = _event_filter
+ self._event_filter = event_filter
self.order = order
self._subscription = None
self._owner = None
@@ -35,11 +36,33 @@
return self._subscription
def __get__(self, instance, owner):
- self._owner = instance
- return self
+ # If our owner has been initialized, or do not have an instance owner,
+ # return self.
+ if self._owner is not None or instance is None:
+ return self
- def __call__(self, event):
- return self._func(event)
+ # Otherwise, we create a new SubscriptionHandle that will only be used
+ # for the instance that owns this SubscriptionHandle.
+ ret = SubscriptionHandle(self._event_type, self._func,
+ self._event_filter, self.order)
+ ret._owner = instance
+ ret._func = ret._wrap_call(ret._func)
+ for attr, value in owner.__dict__.items():
+ if value is self:
+ setattr(instance, attr, ret)
+ break
+ return ret
+
+ def _wrap_call(self, func):
+ def _wrapped_call(*args, **kwargs):
+ if self._owner is None:
+ return func(*args, **kwargs)
+ else:
+ return func(self._owner, *args, **kwargs)
+ return _wrapped_call
+
+ def __call__(self, *args, **kwargs):
+ return self._func(*args, **kwargs)
class InstanceSubscriptionHandle(SubscriptionHandle):
diff --git a/acts/framework/acts/keys.py b/acts/framework/acts/keys.py
index 6f7819a..6c7d4ff 100644
--- a/acts/framework/acts/keys.py
+++ b/acts/framework/acts/keys.py
@@ -53,6 +53,7 @@
key_monsoon = "Monsoon"
key_sniffer = "Sniffer"
key_arduino_wifi_dongle = "ArduinoWifiDongle"
+ key_packet_capture = "PacketCapture"
# Internal keys, used internally, not exposed to user's config files.
ikey_user_param = "user_params"
ikey_testbed_name = "testbed_name"
@@ -73,6 +74,7 @@
m_key_packet_sender = "packet_sender"
m_key_sniffer = "sniffer"
m_key_arduino_wifi_dongle = "arduino_wifi_dongle"
+ m_key_packet_capture = "packet_capture"
# A list of keys whose values in configs should not be passed to test
# classes without unpacking first.
@@ -93,6 +95,7 @@
key_sniffer,
key_chameleon_device,
key_arduino_wifi_dongle,
+ key_packet_capture,
]
# Keys that are file or folder paths.
diff --git a/acts/framework/acts/libs/logging/__init__.py b/acts/framework/acts/libs/logging/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/acts/libs/logging/__init__.py
diff --git a/acts/framework/acts/libs/logging/log_stream.py b/acts/framework/acts/libs/logging/log_stream.py
new file mode 100644
index 0000000..f0e6121
--- /dev/null
+++ b/acts/framework/acts/libs/logging/log_stream.py
@@ -0,0 +1,338 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+import os
+import sys
+from logging import FileHandler
+from logging import Handler
+from logging import StreamHandler
+from logging.handlers import RotatingFileHandler
+
+from acts.event import event_bus
+from acts.event.decorators import subscribe_static
+from acts.event.event import TestCaseBeginEvent
+from acts.event.event import TestCaseEndEvent
+
+
+# yapf: disable
+class LogStyles:
+ NONE = 0x00
+ LOG_DEBUG = 0x01
+ LOG_INFO = 0x02
+ LOG_WARNING = 0x04
+ LOG_ERROR = 0x08
+ LOG_CRITICAL = 0x10
+
+ DEFAULT_LEVELS = LOG_DEBUG + LOG_INFO + LOG_ERROR
+ ALL_LEVELS = LOG_DEBUG + LOG_INFO + LOG_WARNING + LOG_ERROR + LOG_CRITICAL
+
+ MONOLITH_LOG = 0x0100
+ TESTCASE_LOG = 0x0200
+ TO_STDOUT = 0x0400
+ TO_ACTS_LOG = 0x0800
+ ROTATE_LOGS = 0x1000
+
+ LEVEL_NAMES = {
+ LOG_DEBUG: 'debug',
+ LOG_INFO: 'info',
+ LOG_WARNING: 'warning',
+ LOG_ERROR: 'error',
+ LOG_CRITICAL: 'critical',
+ }
+
+ LOG_LEVELS = [
+ LOG_DEBUG,
+ LOG_INFO,
+ LOG_WARNING,
+ LOG_ERROR,
+ LOG_CRITICAL,
+ ]
+
+ LEVEL_TO_NO = {
+ LOG_DEBUG: logging.DEBUG,
+ LOG_INFO: logging.INFO,
+ LOG_WARNING: logging.WARNING,
+ LOG_ERROR: logging.ERROR,
+ LOG_CRITICAL: logging.CRITICAL,
+ }
+# yapf: enable
+
+
+_log_streams = dict()
+
+
+@subscribe_static(TestCaseBeginEvent)
+def _on_test_case_begin(event):
+ log_path = os.path.join(logging.log_path, event.test_case_name)
+ if not os.path.exists(log_path):
+ os.mkdir(log_path)
+
+ for log_stream in _log_streams.values():
+ log_stream.on_test_case_begin(event)
+
+
+@subscribe_static(TestCaseEndEvent)
+def _on_test_case_end(event):
+ for log_stream in _log_streams.values():
+ log_stream.on_test_case_end(event)
+
+
+def create_logger(name, log_styles=LogStyles.NONE):
+ """Creates a Python Logger object with the given attributes.
+
+ Creation through this method will automatically manage the logger in the
+ background for test-related events, such as TestCaseBegin and TestCaseEnd
+ Events.
+
+ Args:
+ name: The name of the LogStream and underlying logger.
+ log_styles: An integer or array of integers that are the sum of
+ corresponding flag values in LogStyles. Examples include:
+
+ >>> LogStyles.LOG_INFO + LogStyles.TESTCASE_LOG
+
+ >>> LogStyles.ALL_LEVELS + LogStyles.MONOLITH_LOG
+
+ >>> [LogStyles.DEFAULT_LEVELS + LogStyles.MONOLITH_LOG]
+ >>> LogStyles.LOG_ERROR + LogStyles.TO_ACTS_LOG]
+ """
+ log_stream = _LogStream(name, log_styles)
+ _set_logger(log_stream)
+ return log_stream.logger
+
+
+def _set_logger(log_stream):
+ if log_stream.logger.name in _log_streams:
+ _log_streams[log_stream.logger.name].cleanup()
+ _log_streams[log_stream.logger.name] = log_stream
+ return log_stream
+
+
+event_bus.register_subscription(_on_test_case_begin.subscription)
+event_bus.register_subscription(_on_test_case_end.subscription)
+
+
+class AlsoToLogHandler(Handler):
+ """Logs a message at a given level also to another logger.
+
+ Used for logging messages at a high enough level to the main log, or another
+ logger.
+ """
+
+ def __init__(self, to_logger=None, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._log = logging.getLogger(to_logger)
+
+ def emit(self, record):
+ self._log.log(record.levelno, record.message)
+
+
+class InvalidStyleSetError(Exception):
+ """Raised when the given LogStyles are an invalid set."""
+
+
+class _LogStream(object):
+ """A class that sets up a logging.Logger object.
+
+ The LogStream class creates a logging.Logger object. LogStream is also
+ responsible for managing the logger when events take place, such as
+ TestCaseEndedEvents and TestCaseBeginEvents.
+
+ Attributes:
+ name: The name shared between this LogStream and its logger.
+ logger: The logger created by this LogStream.
+
+ _test_case_handler_descriptors: The list of HandlerDescriptors that are
+ used to create LogHandlers for each new test case.
+ _test_case_log_handlers: The list of current LogHandlers for the current
+ test case.
+ """
+
+ class HandlerDescriptor(object):
+ """A object that describes how to create a LogHandler.
+
+ Attributes:
+ _base_name: The name of the file generated by the FileLogHandler.
+ _creator: The callable that creates the FileLogHandler.
+ _level: The logging level (INFO, DEBUG, etc) for this handler.
+ """
+
+ def __init__(self, creator, level, name):
+ self._base_name = '%s_%s.txt' % (name, LogStyles.LEVEL_NAMES[level])
+ self._creator = creator
+ self._level = LogStyles.LEVEL_TO_NO[level]
+
+ def create(self, directory=''):
+ """Creates the FileLogHandler described by this HandlerDescriptor.
+
+ Args:
+ directory: The directory name for the file to be created under.
+ This name is relative to logging.log_path.
+ """
+ handler = self._creator(os.path.join(logging.log_path,
+ directory, self._base_name))
+ handler.setLevel(self._level)
+ return handler
+
+ def __init__(self, name, log_styles=LogStyles.NONE):
+ """Creates a LogStream.
+
+ Args:
+ name: The name of the LogStream and underlying logger.
+ log_styles: An integer or array of integers that are the sum of
+ corresponding flag values in LogStyles. Examples include:
+
+ >>> LogStyles.LOG_INFO + LogStyles.TESTCASE_LOG
+
+ >>> LogStyles.ALL_LEVELS + LogStyles.MONOLITH_LOG
+
+ >>> [LogStyles.DEFAULT_LEVELS + LogStyles.MONOLITH_LOG]
+ >>> LogStyles.LOG_ERROR + LogStyles.TO_ACTS_LOG]
+ """
+ self.name = name
+ self.logger = logging.getLogger(name)
+ self.logger.propagate = False
+ self._test_case_handler_descriptors = []
+ self._test_case_log_handlers = []
+ if not isinstance(log_styles, list):
+ log_styles = [log_styles]
+ self.__validate_styles(log_styles)
+ for log_style in log_styles:
+ self.__handle_style(log_style)
+
+ @staticmethod
+ def __validate_styles(_log_styles_list):
+ """Determines if the given list of styles is valid.
+
+ Terminology:
+ Log-level: any of [DEBUG, INFO, WARNING, ERROR, CRITICAL].
+ Log Location: any of [MONOLITH, TESTCASE, TO_STDOUT, TO_ACTS_LOG].
+
+ Styles are invalid when any of the below criteria are met:
+ A log-level is not set within an element of the list.
+ A log location is not set within an element of the list.
+ A log-level, log location pair appears twice within the list.
+ ROTATE_LOGS is set without MONOLITH_LOG or TESTCASE_LOG.
+
+ Raises:
+ InvalidStyleSetError if the given style cannot be achieved.
+ """
+
+ def invalid_style_error(message):
+ raise InvalidStyleSetError('{LogStyle Set: %s} %s' %
+ (_log_styles_list, message))
+
+ levels_dict = {}
+ log_locations = [LogStyles.TO_STDOUT, LogStyles.TO_ACTS_LOG,
+ LogStyles.MONOLITH_LOG, LogStyles.TESTCASE_LOG]
+ for log_style in _log_styles_list:
+ for level in LogStyles.LOG_LEVELS:
+ levels_dict[level] = levels_dict.get(level, [])
+ for log_location in log_locations:
+ if log_style & log_location:
+ if log_location in levels_dict[level]:
+ invalid_style_error(
+ 'The log location %s for log level %s has been '
+ 'set multiple times' % (log_location, level))
+ else:
+ levels_dict[level].append(log_location)
+ if log_style & LogStyles.ALL_LEVELS == 0:
+ invalid_style_error('LogStyle %s needs to set a log '
+ 'level.' % log_style)
+ if log_style & ~LogStyles.ALL_LEVELS == 0:
+ invalid_style_error('LogStyle %s needs to set a log '
+ 'location.' % log_style)
+ if log_style & LogStyles.ROTATE_LOGS and not log_style & (
+ LogStyles.MONOLITH_LOG | LogStyles.TESTCASE_LOG):
+ invalid_style_error('LogStyle %s has ROTATE_LOGS set, but does '
+ 'not specify a log type.' % log_style)
+
+ @staticmethod
+ def __create_rotating_file_handler(filename):
+ """Generates a callable to create an appropriate RotatingFileHandler."""
+ # Magic number explanation: 10485760 == 10MB
+ return RotatingFileHandler(filename, maxBytes=10485760)
+
+ @staticmethod
+ def __get_file_handler_creator(log_style):
+ """Gets the callable to create the correct FileLogHandler."""
+ create_file_handler = FileHandler
+ if log_style & LogStyles.ROTATE_LOGS:
+ create_file_handler = _LogStream.__create_rotating_file_handler
+ return create_file_handler
+
+ @staticmethod
+ def __get_lowest_log_level(log_style):
+ """Returns the lowest log level's LogStyle for the given log_style."""
+ for log_level in LogStyles.LOG_LEVELS:
+ if log_level & log_style:
+ return log_level
+ return LogStyles.NONE
+
+ def __handle_style(self, log_style):
+ """Creates the handlers described in the given log_style."""
+ handler_creator = self.__get_file_handler_creator(log_style)
+
+ # Handle streaming logs to STDOUT or the ACTS Logger
+ if log_style & (LogStyles.TO_ACTS_LOG | LogStyles.TO_STDOUT):
+ lowest_log_level = self.__get_lowest_log_level(log_style)
+
+ if log_style & LogStyles.TO_ACTS_LOG:
+ handler = AlsoToLogHandler()
+ else: # LogStyles.TO_STDOUT:
+ handler = StreamHandler(sys.stdout)
+
+ handler.setLevel(LogStyles.LEVEL_TO_NO[lowest_log_level])
+ self.logger.addHandler(handler)
+
+ # Handle streaming logs to log-level files
+ for log_level in LogStyles.LOG_LEVELS:
+ if not log_style & log_level:
+ continue
+ descriptor = self.HandlerDescriptor(handler_creator, log_level,
+ self.name)
+ if log_style & LogStyles.TESTCASE_LOG:
+ self._test_case_handler_descriptors.append(descriptor)
+ if log_style & LogStyles.MONOLITH_LOG:
+ handler = descriptor.create()
+ self.logger.addHandler(handler)
+
+ def __remove_handler(self, handler):
+ """Removes a handler from the logger."""
+ handler.close()
+ self.logger.removeHandler(handler)
+
+ def on_test_case_end(self, _):
+ """Internal use only. To be called when a test case has ended."""
+ for log_handler in self._test_case_log_handlers:
+ self.__remove_handler(log_handler)
+ self._test_case_log_handlers.clear()
+
+ def on_test_case_begin(self, test_case_event):
+ """Internal use only. To be called when a test case has begun."""
+ # Close test case handlers from previous tests.
+ self.on_test_case_end(None)
+
+ # Create new handlers for this test case.
+ for descriptor in self._test_case_handler_descriptors:
+ handler = descriptor.create(test_case_event.test_case_name)
+ self.logger.addHandler(handler)
+ self._test_case_log_handlers.append(handler)
+
+ def cleanup(self):
+ """Removes all LogHandlers from the logger."""
+ for handler in self.logger.handlers:
+ self.__remove_handler(handler)
diff --git a/acts/framework/acts/libs/ota/ota_runners/ota_runner.py b/acts/framework/acts/libs/ota/ota_runners/ota_runner.py
index 6f31f39..290e5bb 100644
--- a/acts/framework/acts/libs/ota/ota_runners/ota_runner.py
+++ b/acts/framework/acts/libs/ota/ota_runners/ota_runner.py
@@ -74,6 +74,7 @@
break
log.info('Starting services.')
self.android_device.start_services()
+ self.android_device.update_sdk_api_level()
log.info('Services started. Running ota tool cleanup.')
self.ota_tool.cleanup(self)
log.info('Cleanup complete.')
diff --git a/acts/framework/acts/libs/proc/process.py b/acts/framework/acts/libs/proc/process.py
new file mode 100644
index 0000000..e1e7b71
--- /dev/null
+++ b/acts/framework/acts/libs/proc/process.py
@@ -0,0 +1,211 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+import subprocess
+from threading import Thread
+
+import time
+
+
+class Process(object):
+ """A Process object used to run various commands.
+
+ Attributes:
+ _command: The initial command to run.
+ _subprocess_kwargs: The kwargs to send to Popen for more control over
+ execution.
+ _process: The subprocess.Popen object currently executing a process.
+ _listening_thread: The thread that is listening for the process to stop.
+ _redirection_thread: The thread that is redirecting process output.
+ _on_output_callback: The callback to call when output is received.
+ _on_terminate_callback: The callback to call when the process terminates
+ without stop() being called first.
+ _started: Whether or not the Process is in the running state.
+ _stopped: Whether or not stop() was called.
+ """
+
+ def __init__(self, command, **kwargs):
+ """Creates a Process object.
+
+ Note that this constructor does not begin the process. To start the
+ process, use Process.start().
+ """
+ self._command = command
+ self._subprocess_kwargs = kwargs
+ self._process = None
+
+ self._listening_thread = None
+ self._redirection_thread = None
+ self._on_output_callback = lambda *args, **kw: None
+ self._on_terminate_callback = lambda *args, **kw: ''
+
+ self._stopped = False
+
+ def set_on_output_callback(self, on_output_callback):
+ """Sets the on_output_callback function.
+
+ Args:
+ on_output_callback: The function to be called when output is sent to
+ the output. The output callback has the following signature:
+
+ >>> def on_output_callback(output_line):
+ >>> return None
+ Returns:
+ self
+ """
+ self._on_output_callback = on_output_callback
+ return self
+
+ def set_on_terminate_callback(self, on_terminate_callback):
+ """Sets the on_self_terminate callback function.
+
+ Args:
+ on_terminate_callback: The function to be called when the process
+ has terminated on its own. The callback has the following
+ signature:
+
+ >>> def on_self_terminate_callback(popen_process):
+ >>> return 'command to run' or None
+
+ If a string is returned, the string returned will be the command
+ line used to run the command again. If None is returned, the
+ process will end without restarting.
+
+ Returns:
+ self
+ """
+ self._on_terminate_callback = on_terminate_callback
+ return self
+
+ def start(self):
+ """Starts the process's execution."""
+ self._process = None
+ self._stopped = False
+
+ self._listening_thread = Thread(target=self._exec_loop)
+ self._listening_thread.start()
+
+ time_up_at = time.time() + 1
+
+ while self._process is None:
+ if time.time() > time_up_at:
+ raise OSError('Unable to open process!')
+
+ @staticmethod
+ def _get_timeout_left(timeout, start_time):
+ return max(.1, timeout - (time.time() - start_time))
+
+ def wait(self, kill_timeout=60.0):
+ """Waits for the process to finish execution.
+
+ If the process has reached the kill_timeout, the process will be killed
+ instead.
+
+ Args:
+ kill_timeout: The amount of time to wait until killing the process.
+ """
+ start_time = time.time()
+
+ try:
+ self._process.wait(kill_timeout)
+ except subprocess.TimeoutExpired:
+ self._stopped = True
+ self._process.kill()
+
+ time_left = self._get_timeout_left(kill_timeout, start_time)
+
+ if self._listening_thread is not None:
+ self._listening_thread.join(timeout=time_left)
+ self._listening_thread = None
+
+ time_left = self._get_timeout_left(kill_timeout, start_time)
+
+ if self._redirection_thread is not None:
+ self._redirection_thread.join(timeout=time_left)
+ self._redirection_thread = None
+
+ def stop(self, timeout=60.0):
+ """Stops the process.
+
+ This command is effectively equivalent to kill, but gives time to clean
+ up any related work on the process, such as output redirection.
+
+ Note: the on_self_terminate callback will NOT be called when calling
+ this function.
+
+ Args:
+ timeout: The amount of time to wait for the program output to finish
+ being handled.
+ """
+ self._stopped = True
+
+ start_time = time.time()
+
+ if self._process is not None and self._process.poll() is None:
+ self._process.kill()
+ self.wait(self._get_timeout_left(timeout, start_time))
+
+ def _redirect_output(self):
+ """Redirects the output from the command into the on_output_callback."""
+ while True:
+ line = self._process.stdout.readline().decode('utf-8',
+ errors='replace')
+
+ if line == '':
+ return
+ else:
+ # Output the line without trailing \n and whitespace.
+ self._on_output_callback(line.rstrip())
+
+ @staticmethod
+ def __start_process(command, **kwargs):
+ """A convenient wrapper function for starting the process."""
+ acts_logger = logging.getLogger()
+ acts_logger.debug(
+ 'Starting command "%s" with kwargs %s', command, kwargs)
+ return subprocess.Popen(command, **kwargs)
+
+ def _exec_loop(self):
+ """Executes Popen in a loop.
+
+ When Popen terminates without stop() being called,
+ self._on_terminate_callback() will be called. The returned value from
+ _on_terminate_callback will then be used to determine if the loop should
+ continue and start up the process again. See set_on_terminate_callback()
+ for more information.
+ """
+ command = self._command
+ while True:
+ self._process = self.__start_process(command,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ bufsize=1,
+ **self._subprocess_kwargs)
+ self._redirection_thread = Thread(target=self._redirect_output)
+ self._redirection_thread.start()
+ self._process.wait()
+
+ if self._stopped:
+ break
+ else:
+ # Wait for all output to be processed before sending
+ # _on_terminate_callback()
+ self._redirection_thread.join()
+ retry_value = self._on_terminate_callback(self._process)
+ if retry_value:
+ command = retry_value
+ else:
+ break
diff --git a/acts/framework/acts/libs/proto/proto_utils.py b/acts/framework/acts/libs/proto/proto_utils.py
index da38082..3857a3e 100644
--- a/acts/framework/acts/libs/proto/proto_utils.py
+++ b/acts/framework/acts/libs/proto/proto_utils.py
@@ -43,7 +43,7 @@
return None
# Validate output py-proto path
if not os.path.exists(output_dir):
- os.mkdirs(output_dir)
+ os.makedirs(output_dir)
elif not os.path.isdir(output_dir):
logging.error("Output path is not a valid directory: %s" %
(output_dir))
@@ -65,12 +65,13 @@
def compile_import_proto(output_dir, proto_path):
- """
- Compile protobuf from PROTO_PATH and put the result in OUTPUT_DIR.
- Return the imported module to caller.
- :param output_dir: To store generated python proto library
- :param proto_path: Path to the .proto file that needs to be compiled
- :return: python proto module
+ """Compiles the given protobuf file and return the module.
+
+ Args:
+ output_dir: The directory to put the compilation output.
+ proto_path: The path to the .proto file that needs to be compiled.
+ Returns:
+ The protobuf module.
"""
output_module_name = compile_proto(proto_path, output_dir)
if not output_module_name:
@@ -86,9 +87,24 @@
def parse_proto_to_ascii(binary_proto_msg):
- """
- Parse binary protobuf message to human readable ascii string
- :param binary_proto_msg:
- :return: ascii string of the message
+ """ Parses binary protobuf message to human readable ascii string.
+
+ Args:
+ binary_proto_msg: The binary format of the proto message.
+ Returns:
+ The ascii format of the proto message.
"""
return protobuf.text_format.MessageToString(binary_proto_msg)
+
+
+def to_descriptor_proto(proto):
+ """Retrieves the descriptor proto for the given protobuf message.
+
+ Args:
+ proto: the original message.
+ Returns:
+ The descriptor proto for the input meessage.
+ """
+ descriptor_proto = protobuf.descriptor_pb2.DescriptorProto()
+ proto.DESCRIPTOR.CopyToProto(descriptor_proto)
+ return descriptor_proto
diff --git a/acts/framework/acts/libs/version_selector.py b/acts/framework/acts/libs/version_selector.py
new file mode 100644
index 0000000..28ea674
--- /dev/null
+++ b/acts/framework/acts/libs/version_selector.py
@@ -0,0 +1,275 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import bisect
+import inspect
+import numbers
+
+
+def _fully_qualified_name(func):
+ """Returns the fully-qualified name of a function.
+
+ Note: __qualname__ is not the fully qualified name. It is the the fully
+ qualified name without the module name.
+
+ See: https://www.python.org/dev/peps/pep-3155/#naming-choice
+ """
+ return '%s:%s' % (func.__module__, func.__qualname__)
+
+
+def set_version(get_version_func, min_version, max_version):
+ """Returns a decorator returning a VersionSelector containing all versions
+ of the decorated func.
+
+ Args:
+ get_version_func: The lambda that returns the version level based on the
+ arguments sent to versioned_func
+ min_version: The minimum API level for calling versioned_func.
+ max_version: The maximum API level for calling versioned_func.
+
+ Raises:
+ SyntaxError if get_version_func is different between versioned funcs.
+
+ Returns:
+ A VersionSelector containing all versioned calls to the decorated func.
+ """
+ func_owner_variables = None
+ for frame_info in inspect.stack():
+ if frame_info.function == '<module>':
+ # We've reached the end of the most recently imported module in our
+ # stack without finding a class first. This indicates that the
+ # decorator is on a module-level function.
+ func_owner_variables = frame_info.frame.f_locals
+ break
+ elif '__qualname__' in frame_info.frame.f_locals:
+ # __qualname__ appears in stack frames of objects that have
+ # yet to be interpreted. Here we can guarantee that the object in
+ # question is the innermost class that contains the function.
+ func_owner_variables = frame_info.frame.f_locals
+ break
+
+ def decorator(func):
+ if isinstance(func, (staticmethod, classmethod)):
+ raise SyntaxError('@staticmethod and @classmethod decorators must '
+ 'be placed before the versioning decorator.')
+ func_name = func.__name__
+
+ if func_name in func_owner_variables:
+ # If the function already exists within the class/module, get it.
+ version_selector = func_owner_variables[func_name]
+ if isinstance(version_selector, (staticmethod, classmethod)):
+ # If the function was also decorated with @staticmethod or
+ # @classmethod, the version_selector will be stored in __func__.
+ version_selector = version_selector.__func__
+ if not isinstance(version_selector, _VersionSelector):
+ raise SyntaxError('The previously defined function "%s" is not '
+ 'decorated with a versioning decorator.' %
+ version_selector.__qualname__)
+ if (version_selector.comparison_func_name !=
+ _fully_qualified_name(get_version_func)):
+ raise SyntaxError('Functions of the same name must be decorated'
+ ' with the same versioning decorator.')
+ else:
+ version_selector = _VersionSelector(get_version_func)
+
+ version_selector.add_fn(func, min_version, max_version)
+ return version_selector
+
+ return decorator
+
+
+class _VersionSelector(object):
+ """A class that maps API levels to versioned functions for that API level.
+
+ Attributes:
+ entry_list: A sorted list of Entries that define which functions to call
+ for a given API level.
+ """
+
+ class ListWrap(object):
+ """This class wraps a list of VersionSelector.Entry objects.
+
+ This is required to make the bisect functions work, since the underlying
+ implementation of those functions do not use __cmp__, __lt__, __gt__,
+ etc. because they are not implemented in Python.
+
+ See: https://docs.python.org/3/library/bisect.html#other-examples
+ """
+
+ def __init__(self, entry_list):
+ self.list = entry_list
+
+ def __len__(self):
+ return len(self.list)
+
+ def __getitem__(self, index):
+ return self.list[index].level
+
+ class Entry(object):
+ def __init__(self, level, func, direction):
+ """Creates an Entry object.
+
+ Args:
+ level: The API level for this point.
+ func: The function to call.
+ direction: (-1, 0 or 1) the direction the ray from this level
+ points towards.
+ """
+ self.level = level
+ self.func = func
+ self.direction = direction
+
+ def __init__(self, version_func):
+ """Creates a VersionSelector object.
+
+ Args:
+ version_func: The function that converts the arguments into an
+ integer that represents the API level.
+ """
+ self.entry_list = list()
+ self.get_version = version_func
+ self.instance = None
+ self.comparison_func_name = _fully_qualified_name(version_func)
+
+ def __name__(self):
+ if len(self.entry_list) > 0:
+ return self.entry_list[0].func.__name__
+ return '%s<%s>' % (self.__class__.__name__, self.get_version.__name__)
+
+ def print_ranges(self):
+ """Returns all ranges as a string.
+
+ The string is formatted as '[min_a, max_a], [min_b, max_b], ...'
+ """
+ ranges = []
+ min_boundary = None
+ for entry in self.entry_list:
+ if entry.direction == 1:
+ min_boundary = entry.level
+ elif entry.direction == 0:
+ ranges.append(str([entry.level, entry.level]))
+ else:
+ ranges.append(str([min_boundary, entry.level]))
+ return ', '.join(ranges)
+
+ def add_fn(self, fn, min_version, max_version):
+ """Adds a function to the VersionSelector for the given API range.
+
+ Args:
+ fn: The function to call when the API level is met.
+ min_version: The minimum version level for calling this function.
+ max_version: The maximum version level for calling this function.
+
+ Raises:
+ ValueError if min_version > max_version or another versioned
+ function overlaps this new range.
+ """
+ if min_version > max_version:
+ raise ValueError('The minimum API level must be greater than the'
+ 'maximum API level.')
+ insertion_index = bisect.bisect_left(
+ _VersionSelector.ListWrap(self.entry_list), min_version)
+ if insertion_index != len(self.entry_list):
+ right_neighbor = self.entry_list[insertion_index]
+ if not (min_version <= max_version < right_neighbor.level and
+ right_neighbor.direction != -1):
+ raise ValueError('New range overlaps another API level. '
+ 'New range: %s, Existing ranges: %s' %
+ ([min_version, max_version],
+ self.print_ranges()))
+ if min_version == max_version:
+ new_entry = _VersionSelector.Entry(min_version, fn, direction=0)
+ self.entry_list.insert(insertion_index, new_entry)
+ else:
+ # Inserts the 2 entries into the entry list at insertion_index.
+ self.entry_list[insertion_index:insertion_index] = [
+ _VersionSelector.Entry(min_version, fn, direction=1),
+ _VersionSelector.Entry(max_version, fn, direction=-1)]
+
+ def __call__(self, *args, **kwargs):
+ """Calls the proper versioned function for the given API level.
+
+ This is a magic python function that gets called whenever parentheses
+ immediately follow the attribute access (e.g. obj.version_selector()).
+
+ Args:
+ *args, **kwargs: The arguments passed into this call. These
+ arguments are intended for the decorated function.
+
+ Returns:
+ The result of the called function.
+ """
+ if self.instance is not None:
+ # When the versioned function is a classmethod, the class is passed
+ # into __call__ as the first argument.
+ level = self.get_version(self.instance, *args, **kwargs)
+ else:
+ level = self.get_version(*args, **kwargs)
+ if not isinstance(level, numbers.Number):
+ kwargs_out = []
+ for key, value in kwargs.items():
+ kwargs_out.append('%s=%s' % (key, str(value)))
+ args_out = str(list(args))[1:-1]
+ kwargs_out = ', '.join(kwargs_out)
+ raise ValueError(
+ 'The API level the function %s returned %s for the arguments '
+ '(%s). This function must return a number.' %
+ (self.get_version.__qualname__, repr(level),
+ ', '.join(i for i in [args_out, kwargs_out] if i)))
+
+ index = bisect.bisect_left(_VersionSelector.ListWrap(self.entry_list),
+ level)
+
+ # Check to make sure the function being called is within the API range
+ if index == len(self.entry_list):
+ raise NotImplementedError('No function %s exists for API level %s'
+ % (self.entry_list[0].func.__qualname__,
+ level))
+ closest_entry = self.entry_list[index]
+ if (closest_entry.direction == 0 and closest_entry.level != level or
+ closest_entry.direction == 1 and closest_entry.level > level or
+ closest_entry.direction == -1 and closest_entry.level < level):
+ raise NotImplementedError('No function %s exists for API level %s'
+ % (self.entry_list[0].func.__qualname__,
+ level))
+
+ func = self.entry_list[index].func
+ if self.instance is None:
+ # __get__ was not called, so the function is module-level.
+ return func(*args, **kwargs)
+
+ return func(self.instance, *args, **kwargs)
+
+ def __get__(self, instance, owner):
+ """Gets the instance and owner whenever this function is obtained.
+
+ These arguments will be used to pass in the self to instance methods.
+ If the function is marked with @staticmethod or @classmethod, those
+ decorators will handle removing self or getting the class, respectively.
+
+ Note that this function will NOT be called on module-level functions.
+
+ Args:
+ instance: The instance of the object this function is being called
+ from. If this function is static or a classmethod,
+ instance will be None.
+ owner: The object that owns this function. This is the class object
+ that defines the function.
+
+ Returns:
+ self, this VersionSelector instance.
+ """
+ self.instance = instance
+ return self
diff --git a/acts/framework/acts/logger.py b/acts/framework/acts/logger.py
index 6bdef0c..eb6bc57 100755
--- a/acts/framework/acts/logger.py
+++ b/acts/framework/acts/logger.py
@@ -29,9 +29,9 @@
# The micro seconds are added by the format string above,
# so the time format does not include ms.
log_line_time_format = "%Y-%m-%d %H:%M:%S"
-log_line_timestamp_len = 18
+log_line_timestamp_len = 23
-logline_timestamp_re = re.compile("\d\d-\d\d \d\d:\d\d:\d\d.\d\d\d")
+logline_timestamp_re = re.compile("\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d.\d\d\d")
def _parse_logline_timestamp(t):
diff --git a/acts/framework/acts/metrics/__init__.py b/acts/framework/acts/metrics/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/acts/metrics/__init__.py
diff --git a/acts/framework/acts/metrics/context.py b/acts/framework/acts/metrics/context.py
new file mode 100644
index 0000000..2eb63c0
--- /dev/null
+++ b/acts/framework/acts/metrics/context.py
@@ -0,0 +1,229 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import logging
+
+from acts.event.event import TestCaseEvent
+from acts.event.event import TestClassEvent
+
+
+def get_context_for_event(event):
+ if isinstance(event, TestCaseEvent):
+ return _get_context_for_test_case_event(event)
+ if isinstance(event, TestClassEvent):
+ return _get_context_for_test_class_event(event)
+ raise TypeError('Unrecognized event type: %s %s', event, event.__class__)
+
+def _get_context_for_test_case_event(event):
+ return TestCaseContext(event.test_class, event.test_case)
+
+
+def _get_context_for_test_class_event(event):
+ return TestClassContext(event.test_class)
+
+
+class TestContext(object):
+ """An object representing the current context in which a test is executing.
+
+ The context encodes the current state of the test runner with respect to a
+ particular scenario in which code is being executed. For example, if some
+ code is being executed as part of a test case, then the context should
+ encode information about that test case such as its name or enclosing
+ class.
+
+ Attributes:
+ _base_output_path_override: an override of the base output path to use.
+ _output_dir_override: an override of the output directory specific to
+ the represented context.
+ """
+
+ def __init__(self):
+ self._base_output_path_override = None
+ self._output_dir_override = None
+
+ def get_base_output_path(self):
+ """Gets the base output path for this context.
+
+ The base output path is interpreted as the reporting root for the
+ entire test runner.
+
+ If a path has been set by set_base_output_path, it is returned.
+ Otherwise, a default is determined by _get_default_base_output_path().
+
+ Returns:
+ The output path.
+ """
+ if self._base_output_path_override:
+ return self._base_output_path_override
+ return self._get_default_base_output_path()
+
+ def set_base_output_path(self, base_output_path):
+ """Sets the base output path for this context.
+
+ The base output path is interpreted as the reporting root for the
+ entire test runner. However, setting this value here will not affect
+ the test runner itself in any way, only the interpretation of this
+ context object.
+
+ Args:
+ base_output_path: The path to set.
+ """
+ self._base_output_path_override = base_output_path
+
+ def get_output_dir(self):
+ """Gets the output directory for this context.
+
+ This represents the directory for all outputs specific to this context.
+ This directory will be interpreted as being relative to the base output
+ path as determined by get_base_output_path.
+
+ Returns:
+ The output directory.
+ """
+ if self._output_dir_override:
+ return self._output_dir_override
+ return self._get_default_output_dir()
+
+ def set_output_dir(self, output_dir):
+ """Sets the output directory for this context.
+
+ This represents the directory for all outputs specific to this context.
+ This directory will be interpreted as being relative to the base output
+ path as determined by get_base_output_path.
+
+ Args:
+ output_dir: The directory to set.
+ """
+ self._output_dir_override = output_dir
+
+ def get_full_output_path(self):
+ """Gets the full output path for this context.
+
+ This is the absolute path to the context specific output directory
+ provided by get_output_dir().
+
+ Returns:
+ The output path.
+ """
+ return os.path.join(
+ self.get_base_output_path(),
+ self.get_output_dir())
+
+ @property
+ def identifier(self):
+ raise NotImplementedError()
+
+ def _get_default_base_output_path(self):
+ """Gets the default base output path.
+
+ This will attempt to use the ACTS logging path set up in the global
+ logger.
+
+ Returns:
+ The logging path.
+
+ Raises:
+ EnvironmentError: If the ACTS logger has not been initialized.
+ """
+ try:
+ return logging.log_path
+ except AttributeError as e:
+ raise EnvironmentError(
+ 'The ACTS logger has not been set up and'
+ ' "base_output_path" has not been set.') from e
+
+ def _get_default_output_dir(self):
+ """Gets the default output directory for this context."""
+ raise NotImplementedError()
+
+
+class TestClassContext(TestContext):
+ """A TestContext that represents a test class.
+
+ Attributes:
+ test_class: The test class instance that this context represents.
+ """
+
+ def __init__(self, test_class):
+ """Initializes a TestClassContext for the given test class.
+
+ Args:
+ test_class: A test class object. Must be an instance of the test
+ class, not the class object itself.
+ """
+ super().__init__()
+ self.test_class = test_class
+
+ @property
+ def test_class_name(self):
+ return self.test_class.__class__.__name__
+
+ @property
+ def identifier(self):
+ return self.test_class_name
+
+ def _get_default_output_dir(self):
+ """Gets the default output directory for this context.
+
+ For TestClassContexts, this will be the name of the test class. This is
+ in line with the ACTS logger itself.
+ """
+ return self.test_class_name
+
+
+class TestCaseContext(TestContext):
+ """A TestContext that represents a test case.
+
+ Attributes:
+ test_case_name: The method object of the test case.
+ test_class: The test class instance enclosing the test case.
+ """
+
+ def __init__(self, test_class, test_case):
+ """Initializes a TestCaseContext for the given test case.
+
+ Args:
+ test_class: A test class object. Must be an instance of the test
+ class, not the class object itself.
+ test_case_name: The string name of the test case.
+ """
+ super().__init__()
+ self.test_class = test_class
+ self.test_case = test_case
+
+ @property
+ def test_case_name(self):
+ return self.test_case.__name__
+
+ @property
+ def test_class_name(self):
+ return self.test_class.__class__.__name__
+
+ @property
+ def identifier(self):
+ return '%s.%s' % (self.test_class_name, self.test_case_name)
+
+ def _get_default_output_dir(self):
+ """Gets the default output directory for this context.
+
+ For TestCaseContexts, this will be the name of the test class followed
+ by the name of the test case. This is in line with the ACTS logger
+ itself.
+ """
+ return os.path.join(
+ self.test_class_name,
+ self.test_case_name)
\ No newline at end of file
diff --git a/acts/framework/acts/metrics/core.py b/acts/framework/acts/metrics/core.py
new file mode 100644
index 0000000..52e7820
--- /dev/null
+++ b/acts/framework/acts/metrics/core.py
@@ -0,0 +1,236 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from acts.libs.proto.proto_utils import parse_proto_to_ascii
+from acts.libs.proto.proto_utils import to_descriptor_proto
+from acts.utils import dump_string_to_file
+
+
+class ProtoMetric(object):
+ """A wrapper around a protobuf containing metrics data.
+
+ This is the primary data structure used as the output of MetricLoggers. It
+ is generally intended to be used as-is as a simple wrapper structure, but
+ can be extended to provide self-populating APIs.
+
+ Attributes:
+ name: The name of the metric.
+ data: The data of the metric.
+ """
+
+ def __init__(self, name=None, data=None):
+ """Initializes a metric with given name and data.
+
+ Args:
+ name: The name of the metric. Used in identifiers such as filename.
+ data: The data of the metric. Should be a protobuf object.
+ """
+ self.name = name
+ if not data:
+ raise ValueError("Parameter 'data' cannot be None.")
+ self.data = data
+
+ def get_binary(self):
+ """Gets the binary representation of the protobuf data."""
+ return self.data.SerializeToString()
+
+ def get_ascii(self):
+ """Gets the ascii representation of the protobuf data."""
+ return parse_proto_to_ascii(self.data)
+
+ def get_descriptor_binary(self):
+ """Gets the binary representation of the descriptor protobuf."""
+ return to_descriptor_proto(self.data).SerializeToString()
+
+ def get_descriptor_ascii(self):
+ """Gets the ascii representation of the descriptor protobuf."""
+ return parse_proto_to_ascii(to_descriptor_proto(self.data))
+
+
+class MetricPublisher(object):
+ """A helper object for publishing metric data.
+
+ This is a base class intended to be implemented to accommodate specific
+ metric types and output formats.
+
+ Attributes:
+ context: The context in which the metrics are being published.
+ """
+
+ def __init__(self, context):
+ """Initializes a publisher for the give context.
+
+ Args:
+ context: The context in which the metrics are being published.
+ Typically matches that of a containing MetricLogger.
+ """
+ if not context:
+ raise ValueError("Parameter 'context' cannot be None.")
+ self.context = context
+
+ def publish(self, metrics):
+ """Publishes a list of metrics.
+
+ Args:
+ metrics: A list of metrics to publish. The requirements on the
+ object type of these metrics is up to the implementing class.
+ """
+ raise NotImplementedError()
+
+
+class ProtoMetricPublisher(MetricPublisher):
+ """A MetricPublisher that will publish ProtoMetrics to files.
+
+ Attributes:
+ publishes_binary: Whether to publish the binary proto.
+ publishes_ascii: Whether to publish the ascii proto.
+ publishes_descriptor_binary: Whether to publish the binary descriptor.
+ publishes_descriptor_ascii: Whether to publish the ascii descriptor.
+ """
+
+ ASCII_EXTENSION = 'proto'
+ BINARY_EXTENSION = 'proto.bin'
+ ASCII_DESCRIPTOR_EXTENSION = 'proto.desc'
+ BINARY_DESCRIPTOR_EXTENSION = 'proto.desc.bin'
+
+ METRICS_DIR = 'metrics'
+
+ def __init__(self,
+ context,
+ publishes_binary=True,
+ publishes_ascii=True,
+ publishes_descriptor_binary=True,
+ publishes_descriptor_ascii=True):
+ """Initializes a ProtoMetricPublisher.
+
+ Args:
+ context: The context in which the metrics are being published.
+ publishes_binary: Whether to publish the binary proto.
+ publishes_ascii: Whether to publish the ascii proto.
+ publishes_descriptor_binary: Whether to publish the binary
+ descriptor.
+ publishes_descriptor_ascii: Whether to publish the ascii
+ descriptor.
+ """
+ super().__init__(context)
+ self.publishes_binary = publishes_binary
+ self.publishes_ascii = publishes_ascii
+ self.publishes_descriptor_binary = publishes_descriptor_binary
+ self.publishes_descriptor_ascii = publishes_descriptor_ascii
+
+ def get_output_path(self):
+ """Gets the output directory path of the metrics."""
+ return os.path.join(self.context.get_full_output_path(),
+ self.METRICS_DIR)
+
+ def publish(self, metrics):
+ """Publishes the given list of metrics.
+
+ Based on the publish_* attributes of this class, this will publish
+ the varying data formats provided by the metric object. Data is written
+ to files on disk named according to the name of the metric.
+
+ Args:
+ metrics: The list metric to publish. Assumed to be a list of
+ ProtoMetric objects.
+ """
+ if isinstance(metrics, list):
+ for metric in metrics:
+ self._publish_single(metric)
+ else:
+ self._publish_single(metrics)
+
+ def _publish_single(self, metric):
+ """Publishes a single metric.
+
+ Based on the publish_* attributes of this class, this will publish
+ the varying data formats provided by the metric object. Data is written
+ to files on disk named according to the name of the metric.
+
+ Args:
+ metric: The metric to publish. Assumed to be a ProtoMetric object.
+ """
+ output_path = self.get_output_path()
+
+ os.makedirs(output_path, exist_ok=True)
+
+ if self.publishes_binary:
+ self.write_binary(metric, output_path)
+ if self.publishes_ascii:
+ self.write_ascii(metric, output_path)
+ if self.publishes_descriptor_binary:
+ self.write_descriptor_binary(metric, output_path)
+ if self.publishes_descriptor_ascii:
+ self.write_descriptor_ascii(metric, output_path)
+
+ def write_binary(self, metric, output_path):
+ """Writes the binary format of the protobuf to file.
+
+ Args:
+ metric: The metric to write.
+ output_path: The output directory path to write the file to.
+ """
+ filename = self._get_output_file(
+ output_path, metric.name, self.BINARY_EXTENSION)
+ dump_string_to_file(metric.get_binary(), filename, mode='wb')
+
+ def write_ascii(self, metric, output_path):
+ """Writes the ascii format of the protobuf to file.
+
+ Args:
+ metric: The metric to write.
+ output_path: The output directory path to write the file to.
+ """
+ filename = self._get_output_file(
+ output_path, metric.name, self.ASCII_EXTENSION)
+ dump_string_to_file(metric.get_ascii(), filename)
+
+ def write_descriptor_binary(self, metric, output_path):
+ """Writes the binary format of the protobuf descriptor to file.
+
+ Args:
+ metric: The metric to write.
+ output_path: The output directory path to write the file to.
+ """
+ filename = self._get_output_file(
+ output_path, metric.name, self.BINARY_DESCRIPTOR_EXTENSION)
+ dump_string_to_file(metric.get_descriptor_binary(), filename, mode='wb')
+
+ def write_descriptor_ascii(self, metric, output_path):
+ """Writes the ascii format of the protobuf descriptor to file.
+
+ Args:
+ metric: The metric to write.
+ output_path: The output directory path to write the file to.
+ """
+ filename = self._get_output_file(
+ output_path, metric.name, self.ASCII_DESCRIPTOR_EXTENSION)
+ dump_string_to_file(metric.get_descriptor_ascii(), filename)
+
+ def _get_output_file(self, output_path, filename, extension):
+ """Gets the full output file path.
+
+ Args:
+ output_path: The output directory path.
+ filename: The base filename of the file.
+ extension: The extension of the file, without the leading '.'
+
+ Returns:
+ The full file path.
+ """
+ return os.path.join(output_path, "%s.%s" % (filename, extension))
diff --git a/acts/framework/acts/metrics/logger.py b/acts/framework/acts/metrics/logger.py
new file mode 100644
index 0000000..dc2c9d9
--- /dev/null
+++ b/acts/framework/acts/metrics/logger.py
@@ -0,0 +1,352 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import inspect
+import os
+import tempfile
+
+from acts.event.decorators import subscribe
+from acts.event.event import TestCaseBeginEvent
+from acts.event.event import TestCaseEndEvent
+from acts.event.event import TestClassBeginEvent
+from acts.event.event import TestClassEndEvent
+from acts.event import subscription_bundle
+from acts.event import event_bus
+from acts.libs.proto.proto_utils import compile_import_proto
+from acts.metrics.context import get_context_for_event
+from acts.metrics.core import ProtoMetricPublisher
+
+
+class MetricLogger(object):
+ """The base class for a logger object that records metric data.
+
+ This is the central component to the ACTS metrics framework. Users should
+ extend this class with the functionality needed to log their specific
+ metric.
+
+ The public API for this class contains only a start() and end() method,
+ intended to bookend the logging process for a particular metric. The timing
+ of when those methods are called depends on how the logger is subscribed.
+ The canonical use for this class is to use the class methods to
+ automatically subscribe the logger to certain test events.
+
+ Example:
+ def MyTestClass(BaseTestClass):
+ def __init__(self):
+ self.my_metric_logger = MyMetricLogger.for_test_case()
+
+ This would subscribe the logger to test case begin and end events. For each
+ test case in MyTestClass, a new MyMetricLogger instance will be created,
+ and start() and end() will be called at the before and after the test case,
+ respectively.
+
+ The self.my_metric_logger object will be a proxy object that points to
+ whatever MyMetricLogger is being used in the current context. This means
+ that test code can access this logger without worrying about managing
+ separate instances for each test case.
+
+ Example:
+ def MyMetricLogger(MetricLogger):
+ def store_data(self, data):
+ # store data
+
+ def end(self, event):
+ # write out stored data
+
+ def MyTestClass(BaseTestClass):
+ def __init__(self):
+ self.my_metric_logger = MyMetricLogger.for_test_case()
+
+ def test_case_a(self):
+ # do some test stuff
+ self.my_metric_logger.store_data(data)
+ # more test stuff
+
+ def test_case_b(self):
+ # do some test stuff
+ self.my_metric_logger.store_data(data)
+ # more test stuff
+
+ In the above example, test_case_a and test_case_b both record data to
+ self.my_metric_logger. However, because the MyMetricLogger was subscribed
+ to test cases, the proxy object would point to a new instance for each
+ test case.
+
+
+ Attributes:
+
+ context: A MetricContext object describing metadata about how the
+ logger is being run. For example, on a test case metric
+ logger, the context should contain the test class and test
+ case name.
+ publisher: A MetricPublisher object that provides an API for publishing
+ metric data, typically to a file.
+ """
+
+ @classmethod
+ def for_test_case(cls, *args, **kwargs):
+ """Registers the logger class for each test case.
+
+ Creates a proxy logger that will instantiate this method's logger class
+ for each test case. Any arguments passed to this method will be
+ forwarded to the underlying MetricLogger construction by the proxy.
+
+ Returns:
+ The proxy logger.
+ """
+ return TestCaseLoggerProxy(cls, args, kwargs)
+
+ @classmethod
+ def for_test_class(cls, *args, **kwargs):
+ """Registers the logger class for each test class.
+
+ Creates a proxy logger that will instantiate this method's logger class
+ for each test class. Any arguments passed to this method will be
+ forwarded to the underlying MetricLogger construction by the proxy.
+
+ Returns:
+ The proxy logger.
+ """
+ return TestClassLoggerProxy(cls, args, kwargs)
+
+ @classmethod
+ def _compile_proto(cls, proto_path, compiler_out=None):
+ """Compile and return a proto file into a module.
+
+ Args:
+ proto_path: the path to the proto file. Can be either relative to
+ the logger class file or absolute.
+ compiler_out: the directory in which to write the result of the
+ compilation
+ """
+ if not compiler_out:
+ compiler_out = tempfile.mkdtemp()
+
+ if os.path.isabs(proto_path):
+ abs_proto_path = proto_path
+ else:
+ classfile = inspect.getfile(cls)
+ base_dir = os.path.dirname(os.path.realpath(classfile))
+ abs_proto_path = os.path.normpath(os.path.join(base_dir, proto_path))
+
+ return compile_import_proto(compiler_out, abs_proto_path)
+
+ def __init__(self, context=None, publisher=None, event=None):
+ """Initializes a MetricLogger.
+
+ If context or publisher are passed, they are set as attributes to the
+ logger. Otherwise, they will be initialized later by an event.
+
+ If event is passed, it is used immediately to populate the context and
+ publisher (unless they are explicitly passed as well).
+
+ Args:
+ context: the MetricContext in which this logger has been created
+ publisher: the MetricPublisher to use
+ event: an event triggering the creation of this logger, used to
+ populate context and publisher
+ """
+ self.context = context
+ self.publisher = publisher
+ if event:
+ self._init_for_event(event)
+
+ def start(self, event):
+ """Start the logging process.
+
+ Args:
+ event: the event that is triggering this start
+ """
+ pass
+
+ def end(self, event):
+ """End the logging process.
+
+ Args:
+ event: the event that is triggering this start
+ """
+ pass
+
+ def _init_for_event(self, event):
+ """Populate unset attributes with default values."""
+ if not self.context:
+ self.context = self._get_default_context(event)
+ if not self.publisher:
+ self.publisher = self._get_default_publisher(event)
+
+ def _get_default_context(self, event):
+ """Get the default context for the given event."""
+ return get_context_for_event(event)
+
+ def _get_default_publisher(self, event):
+ """Get the default publisher for the given event."""
+ return ProtoMetricPublisher(self.context)
+
+
+class LoggerProxy(object):
+ """A proxy object to manage and forward calls to an underlying logger.
+
+ The proxy is intended to respond to certain framework events and
+ create/discard the underlying logger as appropriate. It should be treated
+ as an abstract class, with subclasses specifying what actions to be taken
+ based on certain events.
+
+ There is no global registry of proxies, so implementations should be
+ inherently self-managing. In particular, they should unregister any
+ subscriptions they have once they are finished.
+
+ Attributes:
+ _logger_cls: the class object for the underlying logger
+ _logger_args: the position args for the logger constructor
+ _logger_kwargs: the keyword args for the logger constructor. Note that
+ the triggering even is always passed as a keyword arg.
+ __initialized: Whether the class attributes have been initialized. Used
+ by __getattr__ and __setattr__ to prevent infinite
+ recursion.
+ """
+
+ def __init__(self, logger_cls, logger_args, logger_kwargs):
+ """Constructs a proxy for the given logger class.
+
+ The logger class will later be constructed using the triggering event,
+ along with the args and kwargs passed here.
+
+ This will also register any methods decorated with event subscriptions
+ that may have been defined in a subclass. It is the subclass's
+ responsibility to unregister them once the logger is finished.
+
+ Args:
+ logger_cls: The class object for the underlying logger.
+ logger_args: The position args for the logger constructor.
+ logger_kwargs: The keyword args for the logger constructor.
+ """
+ self._logger_cls = logger_cls
+ self._logger_args = logger_args
+ self._logger_kwargs = logger_kwargs
+ self._logger = None
+ bundle = subscription_bundle.create_from_instance(self)
+ bundle.register()
+ self.__initialized = True
+
+ def _setup_proxy(self, event):
+ """Creates and starts the underlying logger based on the triggering event.
+
+ Args:
+ event: The triggering event.
+ """
+ self._logger = self._logger_cls(event=event, *self._logger_args, **self._logger_kwargs)
+ self._logger.start(event)
+
+ def _teardown_proxy(self, event):
+ """Ends and removes the underlying logger.
+
+ If the underlying logger does not exist, no action is taken. We avoid
+ raising an error in this case with the implicit assumption that
+ _setup_proxy would have raised one already if logger creation failed.
+
+ Args:
+ event: The triggering event.
+ """
+ if self._logger:
+ self._logger.end(event)
+ self._logger = None
+
+ def __getattr__(self, attr):
+ """Forwards attribute access to the underlying logger.
+
+ Args:
+ attr: The name of the attribute to retrieve.
+
+ Returns:
+ The attribute with name attr from the underlying logger.
+
+ Throws:
+ ValueError: If the underlying logger is not set.
+ """
+ logger = getattr(self, '_logger', None)
+ if not logger:
+ raise ValueError('Underlying logger is not initialized.')
+ return getattr(logger, attr)
+
+ def __setattr__(self, attr, value):
+ """Forwards attribute access to the underlying logger.
+
+ Args:
+ attr: The name of the attribute to set.
+ value: The value of the attribute to set.
+
+ Throws:
+ ValueError: If the underlying logger is not set.
+ """
+ if not self.__dict__.get('_LoggerProxy__initialized', False):
+ return super().__setattr__(attr, value)
+ if attr == '_logger':
+ return super().__setattr__(attr, value)
+ logger = getattr(self, '_logger', None)
+ if not logger:
+ raise ValueError('Underlying logger is not initialized.')
+ return setattr(logger, attr, value)
+
+
+class TestCaseLoggerProxy(LoggerProxy):
+ """A LoggerProxy implementation to subscribe to test case events.
+
+ The underlying logger will be created and destroyed on test case begin and
+ end events respectively. The proxy will unregister itself from the event
+ bus at the end of the test class.
+ """
+
+ def __init__(self, logger_cls, logger_args, logger_kwargs):
+ super().__init__(logger_cls, logger_args, logger_kwargs)
+
+ @subscribe(TestCaseBeginEvent)
+ def __on_test_case_begin(self, event):
+ """Sets up the proxy for a test case."""
+ self._setup_proxy(event)
+
+ @subscribe(TestCaseEndEvent)
+ def __on_test_case_end(self, event):
+ """Tears down the proxy for a test case."""
+ self._teardown_proxy(event)
+
+ @subscribe(TestClassEndEvent)
+ def __on_test_class_end(self, event):
+ """Cleans up the subscriptions at the end of a class."""
+ event_bus.unregister(self.__on_test_case_begin)
+ event_bus.unregister(self.__on_test_case_end)
+ event_bus.unregister(self.__on_test_class_end)
+
+class TestClassLoggerProxy(LoggerProxy):
+ """A LoggerProxy implementation to subscribe to test class events.
+
+ The underlying logger will be created and destroyed on test class begin and
+ end events respectively. The proxy will also unregister itself from the
+ event bus at the end of the test class.
+ """
+ def __init__(self, logger_cls, logger_args, logger_kwargs):
+ super().__init__(logger_cls, logger_args, logger_kwargs)
+
+ @subscribe(TestClassBeginEvent)
+ def __on_test_class_begin(self, event):
+ """Sets up the proxy for a test class."""
+ self._setup_proxy(event)
+
+ @subscribe(TestClassEndEvent)
+ def __on_test_class_end(self, event):
+ """Tears down the proxy for a test class and removes subscriptions."""
+ self._teardown_proxy(event)
+ event_bus.unregister(self.__on_test_class_begin)
+ event_bus.unregister(self.__on_test_class_end)
diff --git a/acts/framework/acts/metrics/loggers/__init__.py b/acts/framework/acts/metrics/loggers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/acts/metrics/loggers/__init__.py
diff --git a/acts/framework/acts/metrics/loggers/blackbox.py b/acts/framework/acts/metrics/loggers/blackbox.py
new file mode 100644
index 0000000..70b2990
--- /dev/null
+++ b/acts/framework/acts/metrics/loggers/blackbox.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python3
+#
+# Copyright 2016 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from acts.metrics.core import ProtoMetric
+from acts.metrics.logger import MetricLogger
+
+
+class BlackboxMetricLogger(MetricLogger):
+ """A MetricLogger for logging and publishing Blackbox metrics.
+
+ The logger will publish an ActsBlackboxMetricResult message, containing
+ data intended to be uploaded to Blackbox. The message itself contains only
+ minimal information specific to the metric, with the intention being that
+ all other metadata is extracted from the test_run_summary.json.
+
+ This logger will extract an attribute from the test class as the metric
+ result. The metric key will be either the context's identifier or a custom
+ value assigned to this class.
+
+ Attributes:
+ proto_module: The proto module for ActsBlackboxMetricResult.
+ metric_name: The name of the metric, used to determine output filename.
+ result_attr: The name of the attribute of the test class where the
+ result is stored.
+ metric_key: The metric key to use. If unset, the logger will use the
+ context's identifier.
+ """
+
+ PROTO_FILE = 'protos/acts_blackbox.proto'
+
+ def __init__(self,
+ metric_name,
+ result_attr='result',
+ metric_key=None,
+ event=None):
+ """Initializes a logger for Blackbox metrics.
+
+ Args:
+ metric_name: The name of the metric.
+ result_attr: The name of the attribute of the test class where the
+ result is stored.
+ metric_key: The metric key to use. If unset, the logger will use
+ the context's identifier.
+ event: The event triggering the creation of this logger.
+ """
+ super().__init__(event=event)
+ self.proto_module = self._compile_proto(self.PROTO_FILE)
+ if not metric_name:
+ raise ValueError("metric_name must be supplied.")
+ self.metric_name = metric_name
+ self.result_attr = result_attr
+ self.metric_key = metric_key
+
+ def _get_metric_value(self):
+ """Extracts the metric value from the current context."""
+ return getattr(self.context.test_class, self.result_attr)
+
+ def _get_metric_key(self):
+ """Gets the metric key to use.
+
+ If the metric_key is explicitly set, returns that value. Otherwise,
+ extracts an identifier from the context.
+ """
+ if self.metric_key:
+ return self.metric_key
+ return self.context.identifier
+
+ def _get_file_name(self):
+ """Gets the base file name to publish to."""
+ return 'blackbox_%s' % self.metric_name
+
+ def end(self, event):
+ """Creates and publishes a ProtoMetric with blackbox data.
+
+ Builds an ActsBlackboxMetricResult message based on the result
+ generated, and passes it off to the publisher.
+
+ Args:
+ event: The triggering event.
+ """
+ result = self.proto_module.ActsBlackboxMetricResult()
+ result.test_identifier = self.context.identifier
+ result.metric_key = self._get_metric_key()
+ result.metric_value = self._get_metric_value()
+
+ metric = ProtoMetric(
+ name=self._get_file_name(),
+ data=result)
+ return self.publisher.publish(metric)
diff --git a/acts/framework/acts/metrics/loggers/protos/acts_blackbox.proto b/acts/framework/acts/metrics/loggers/protos/acts_blackbox.proto
new file mode 100644
index 0000000..3062d94
--- /dev/null
+++ b/acts/framework/acts/metrics/loggers/protos/acts_blackbox.proto
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+syntax = "proto2";
+
+package acts.metrics.blackbox;
+
+option java_package = "com.android.acts.metrics.blackbox";
+
+message ActsBlackboxMetricResult {
+
+ optional string test_identifier = 1;
+
+ required string metric_key = 3;
+
+ required double metric_value = 4;
+
+}
\ No newline at end of file
diff --git a/acts/framework/acts/test_utils/bt/bt_contacts_utils.py b/acts/framework/acts/test_utils/bt/bt_contacts_utils.py
index 466a345..41de7c2 100644
--- a/acts/framework/acts/test_utils/bt/bt_contacts_utils.py
+++ b/acts/framework/acts/test_utils/bt/bt_contacts_utils.py
@@ -425,3 +425,4 @@
log.info(pce_call_log)
return call_logs_match
+
diff --git a/acts/framework/acts/test_utils/bt/bt_test_utils.py b/acts/framework/acts/test_utils/bt/bt_test_utils.py
index 8bf6279..0ef0243 100644
--- a/acts/framework/acts/test_utils/bt/bt_test_utils.py
+++ b/acts/framework/acts/test_utils/bt/bt_test_utils.py
@@ -422,7 +422,7 @@
regex = "(" + adv_succ.format(
advertise_callback) + "|" + adv_fail.format(
- advertise_callback) + ")"
+ advertise_callback) + ")"
# wait for either success or failure event
evt = android_device.ed.pop_events(regex, bt_default_timeout,
small_timeout)
@@ -475,7 +475,7 @@
else:
max_advertisements = determine_max_advertisements(a)
max_tries = 3
- #Retry to calculate max advertisements
+ # Retry to calculate max advertisements
while max_advertisements == -1 and max_tries > 0:
a.log.info(
"Attempts left to determine max advertisements: {}".format(
@@ -498,7 +498,7 @@
def generate_id_by_size(
size,
chars=(
- string.ascii_lowercase + string.ascii_uppercase + string.digits)):
+ string.ascii_lowercase + string.ascii_uppercase + string.digits)):
"""Generate random ascii characters of input size and input char types
Args:
@@ -538,7 +538,7 @@
except Exception as err:
adv_android_device.log.debug(
"Failed to stop LE advertisement... reseting Bluetooth. Error {}".
- format(err))
+ format(err))
reset_bluetooth([adv_android_device])
@@ -903,7 +903,7 @@
paired = False
for paired_device in pri_ad.droid.bluetoothGetBondedDevices():
if paired_device['address'] == \
- sec_ad.droid.bluetoothGetLocalAddress():
+ sec_ad.droid.bluetoothGetLocalAddress():
paired = True
break
@@ -961,7 +961,7 @@
device_addr = profile_event['data']['addr']
if state == bt_profile_states['connected'] and \
- device_addr == sec_ad.droid.bluetoothGetLocalAddress():
+ device_addr == sec_ad.droid.bluetoothGetLocalAddress():
profile_connected.add(profile)
pri_ad.log.info(
"Profiles connected until now {}".format(profile_connected))
@@ -1021,7 +1021,7 @@
device_addr = profile_event['data']['addr']
if state == bt_profile_states['disconnected'] and \
- device_addr == sec_ad.droid.bluetoothGetLocalAddress():
+ device_addr == sec_ad.droid.bluetoothGetLocalAddress():
profile_disconnected.add(profile)
pri_ad.log.info(
"Profiles disconnected so far {}".format(profile_disconnected))
@@ -1253,6 +1253,11 @@
if not toggle_airplane_mode_by_adb(log, panu_dut, True):
panu_dut.log.error("Failed to toggle airplane mode on")
return False
+ if not toggle_airplane_mode_by_adb(log, panu_dut, False):
+ pan_dut.log.error("Failed to toggle airplane mode off")
+ return False
+ pan_dut.droid.bluetoothStartConnectionStateChangeMonitor("")
+ panu_dut.droid.bluetoothStartConnectionStateChangeMonitor("")
if not bluetooth_enabled_check(panu_dut):
return False
if not bluetooth_enabled_check(pan_dut):
@@ -1388,6 +1393,26 @@
hid_keyboard_report("00"))
+def is_a2dp_connected(sink, source):
+ """
+ Convenience Function to see if the 2 devices are connected on
+ A2dp.
+ Args:
+ sink: Audio Sink
+ source: Audio Source
+ Returns:
+ True if Connected
+ False if Not connected
+ """
+
+ devices = sink.droid.bluetoothA2dpSinkGetConnectedDevices()
+ for device in devices:
+ sink.log.info("A2dp Connected device {}".format(device["name"]))
+ if (device["address"] == source.droid.bluetoothGetLocalAddress()):
+ return True
+ return False
+
+
def get_device_selector_dictionary(android_device_list):
"""Create a dictionary of Bluetooth features vs Android devices.
diff --git a/acts/framework/acts/test_utils/car/car_media_utils.py b/acts/framework/acts/test_utils/car/car_media_utils.py
index 6c7bc3d..3fb1e6c 100644
--- a/acts/framework/acts/test_utils/car/car_media_utils.py
+++ b/acts/framework/acts/test_utils/car/car_media_utils.py
@@ -84,26 +84,6 @@
return verifyEventReceived(log, toDevice, expctEvent, timeout)
-def is_a2dp_connected(log, sink, source):
- """
- Convenience Function to see if the 2 devices are connected on
- A2dp.
- ToDo: Move to bt_test_utils if used in more places.
- Args:
- sink: Audio Sink
- source: Audio Source
- Returns:
- True if Connected
- False if Not connected
- """
- devices = sink.droid.bluetoothA2dpSinkGetConnectedDevices()
- for device in devices:
- log.info("A2dp Connected device {}".format(device["name"]))
- if (device["address"] == source.droid.bluetoothGetLocalAddress()):
- return True
- return False
-
-
def log_metadata(log, metadata):
"""
Log the Metadata to the console.
@@ -154,7 +134,7 @@
return False
if not (metadata1[MEDIA_KEY_NUM_TRACKS] == metadata2[MEDIA_KEY_NUM_TRACKS]
- ):
+ ):
log.info("Song Num Tracks do not match")
return False
diff --git a/acts/framework/acts/test_utils/power/IperfHelper.py b/acts/framework/acts/test_utils/power/IperfHelper.py
new file mode 100644
index 0000000..c65db1e
--- /dev/null
+++ b/acts/framework/acts/test_utils/power/IperfHelper.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import math
+
+import acts.controllers.iperf_server as ipf
+
+
+class IperfHelper(object):
+ """ Helps with iperf config and processing the results
+
+ This class can be used to process the results of multiple iperf servers
+ (for example, dual traffic scenarios). It also helps in setting the
+ correct arguments for when using the phone as an iperf server
+ """
+ IPERF_CLIENT_RESULT_FILE_LOC_PHONE = '/sdcard/Download/'
+
+ def __init__(self, config):
+ self.traffic_type = config['traffic_type']
+ self.traffic_direction = config['traffic_direction']
+ self.duration = config['duration']
+ self.port = config['port']
+ self.server_idx = config['server_idx']
+ self.use_client_output = False
+ if 'bandwidth' in config:
+ self.bandwidth = config['bandwidth']
+ else:
+ self.bandwidth = None
+ if 'start_meas_time' in config:
+ self.start_meas_time = config['start_meas_time']
+ else:
+ self.start_meas_time = 0
+
+ iperf_args = '-i 1 -t {} -p {} -J'.format(self.duration, self.port)
+
+ if self.traffic_type == "UDP":
+ iperf_args = iperf_args + ' -u'
+ if self.traffic_direction == "DL":
+ iperf_args = iperf_args + ' -R'
+ self.use_client_output = True
+ # Set bandwidth in Mbit/s
+ if self.bandwidth is not None:
+ iperf_args = iperf_args + ' -b {}M'.format(self.bandwidth)
+
+ # Parse the client side data to a file saved on the phone
+ self.results_filename_phone = self.IPERF_CLIENT_RESULT_FILE_LOC_PHONE \
+ + 'iperf_client_port_{}_{}.log'.format( \
+ self.port, self.traffic_direction)
+ iperf_args = iperf_args + ' > %s' % self.results_filename_phone
+
+ self.iperf_args = iperf_args
+
+ def process_iperf_results(self, dut, log, iperf_servers, test_name):
+ """Gets the iperf results from the phone and computes the average rate
+
+ Returns:
+ throughput: the average throughput (Mbit/s).
+ """
+ # Get IPERF results and add this to the plot title
+ RESULTS_DESTINATION = os.path.join(
+ iperf_servers[self.server_idx].log_path,
+ 'iperf_client_output_{}.log'.format(test_name))
+
+ PULL_FILE = '{} {}'.format(self.results_filename_phone,
+ RESULTS_DESTINATION)
+ dut.adb.pull(PULL_FILE)
+
+ # Calculate the average throughput
+ if self.use_client_output:
+ iperf_file = RESULTS_DESTINATION
+ else:
+ iperf_file = iperf_servers[self.server_idx].log_files[-1]
+ try:
+ iperf_result = ipf.IPerfResult(iperf_file)
+
+ # Compute the throughput in Mbit/s
+ throughput = (math.fsum(
+ iperf_result.instantaneous_rates[self.start_meas_time:-1]
+ ) / len(iperf_result.instantaneous_rates[self.start_meas_time:-1])
+ ) * 8 * (1.024**2)
+
+ log.info('The average throughput is {}'.format(throughput))
+
+ except:
+ log.warning('Cannot get iperf result. Setting to 0')
+ throughput = 0
+ return throughput
diff --git a/acts/framework/acts/test_utils/power/PowerBaseTest.py b/acts/framework/acts/test_utils/power/PowerBaseTest.py
index 78c5090..59aab6e 100644
--- a/acts/framework/acts/test_utils/power/PowerBaseTest.py
+++ b/acts/framework/acts/test_utils/power/PowerBaseTest.py
@@ -24,12 +24,14 @@
from acts import base_test
from acts import utils
from acts.controllers import monsoon
+from acts.metrics.loggers.blackbox import BlackboxMetricLogger
from acts.test_utils.wifi import wifi_test_utils as wutils
from acts.test_utils.wifi import wifi_power_test_utils as wputils
SETTINGS_PAGE = 'am start -n com.android.settings/.Settings'
SCROLL_BOTTOM = 'input swipe 0 2000 0 0'
UNLOCK_SCREEN = 'input keyevent 82'
+SET_BATTERY_LEVEL = 'dumpsys battery set level 100'
SCREENON_USB_DISABLE = 'dumpsys battery unplug'
RESET_BATTERY_STATS = 'dumpsys batterystats --reset'
AOD_OFF = 'settings put secure doze_always_on 0'
@@ -93,6 +95,8 @@
def __init__(self, controllers):
base_test.BaseTestClass.__init__(self, controllers)
+ BlackboxMetricLogger.for_test_case(metric_name='avg_current',
+ result_attr='test_result')
def setup_class(self):
@@ -252,6 +256,7 @@
self.dut.adb.shell(ASSIST_GESTURE)
self.dut.adb.shell(ASSIST_GESTURE_ALERT)
self.dut.adb.shell(ASSIST_GESTURE_WAKE)
+ self.dut.adb.shell(SET_BATTERY_LEVEL)
self.dut.adb.shell(SCREENON_USB_DISABLE)
self.dut.adb.shell(UNLOCK_SCREEN)
self.dut.adb.shell(SETTINGS_PAGE)
@@ -494,3 +499,4 @@
self.dut.reboot()
self.dut.adb.root()
self.dut.adb.remount()
+ self.dut.adb.shell(SET_BATTERY_LEVEL)
diff --git a/acts/framework/acts/test_utils/power/PowerCellularLabBaseTest.py b/acts/framework/acts/test_utils/power/PowerCellularLabBaseTest.py
new file mode 100644
index 0000000..bc7f46f
--- /dev/null
+++ b/acts/framework/acts/test_utils/power/PowerCellularLabBaseTest.py
@@ -0,0 +1,212 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import acts.test_utils.power.PowerBaseTest as PBT
+from acts.controllers.anritsu_lib._anritsu_utils import AnritsuError
+from acts.controllers.anritsu_lib.md8475a import MD8475A
+from acts.test_utils.power.tel_simulations.GsmSimulation import GsmSimulation
+from acts.test_utils.power.tel_simulations.LteSimulation import LteSimulation
+from acts.test_utils.power.tel_simulations.UmtsSimulation import UmtsSimulation
+
+
+class PowerCellularLabBaseTest(PBT.PowerBaseTest):
+ """ Base class for Cellular power related tests.
+
+ Inherits from PowerBaseTest so it has methods to collect power measurements.
+ On top of that, it provides functions to setup and control the Anritsu simulation.
+
+ """
+
+ # List of test name keywords that indicate the RAT to be used
+
+ PARAM_SIM_TYPE_LTE = "lte"
+ PARAM_SIM_TYPE_UMTS = "umts"
+ PARAM_SIM_TYPE_GSM = "gsm"
+
+ def __init__(self, controllers):
+ """ Class initialization.
+
+ Sets class attributes to None.
+ """
+
+ super().__init__(controllers)
+
+ # Tests are sorted alphabetically so all the tests in the same band are grouped together
+ self.tests = sorted(self.tests)
+
+ self.simulation = None
+ self.anritsu = None
+
+ def setup_class(self):
+ """ Executed before any test case is started.
+
+ Sets the device to rockbottom and connects to the anritsu callbox.
+
+ Returns:
+ False if connecting to the callbox fails.
+ """
+
+ super().setup_class()
+ if hasattr(self, 'network_file'):
+ self.networks = self.unpack_custom_file(self.network_file, False)
+ self.main_network = self.networks['main_network']
+ self.aux_network = self.networks['aux_network']
+ if hasattr(self, 'packet_senders'):
+ self.pkt_sender = self.packet_senders[0]
+
+ # Set DUT to rockbottom
+ self.dut_rockbottom()
+
+ # Establish connection to Anritsu Callbox
+ return self.connect_to_anritsu()
+
+ def connect_to_anritsu(self):
+ """ Connects to Anritsu Callbox and gets handle object.
+
+ Returns:
+ False if a connection with the callbox could not be started
+ """
+
+ try:
+ self.anritsu = MD8475A(self.md8475a_ip_address, self.log,
+ self.wlan_option)
+ return True
+ except AnritsuError:
+ self.log.error('Error in connecting to Anritsu Callbox')
+ return False
+
+ def setup_test(self):
+ """ Executed before every test case.
+
+ Parses parameters from the test name and sets a simulation up according to those values.
+ Also takes care of attaching the phone to the base station. Because starting new simulations
+ and recalibrating takes some time, the same simulation object is kept between tests and is only
+ destroyed and re instantiated in case the RAT is different from the previous tests.
+
+ Children classes need to call the parent method first. This method will create the list self.parameters
+ with the keywords separated by underscores in the test name and will remove the ones that were consumed
+ for the simulation config. The setup_test methods in the children classes can then consume the remaining
+ values.
+ """
+
+ # Get list of parameters from the test name
+ self.parameters = self.current_test_name.split('_')
+
+ # Remove the 'test' keyword
+ self.parameters.remove('test')
+
+ # Changing cell parameters requires the phone to be detached
+ if self.simulation:
+ self.simulation.stop()
+
+ # Decide what type of simulation and instantiate it if needed
+ if self.consume_parameter(self.PARAM_SIM_TYPE_LTE):
+ self.init_simulation(self.PARAM_SIM_TYPE_LTE)
+ elif self.consume_parameter(self.PARAM_SIM_TYPE_UMTS):
+ self.init_simulation(self.PARAM_SIM_TYPE_UMTS)
+ elif self.consume_parameter(self.PARAM_SIM_TYPE_GSM):
+ self.init_simulation(self.PARAM_SIM_TYPE_GSM)
+ else:
+ self.log.error("Simulation type needs to be indicated in the test name.")
+ return False
+
+ # Parse simulation parameters
+ if not self.simulation.parse_parameters(self.parameters):
+ return False
+
+ # Attach the phone to the basestation
+ self.simulation.start()
+
+ # Make the device go to sleep
+ self.dut.droid.goToSleepNow()
+
+ return True
+
+ def consume_parameter(self, parameter_name, num_values=0):
+ """ Parses a parameter from the test name.
+
+ Allows the test to get parameters from its name. Will delete parameters from the list after
+ consuming them to ensure that they are not used twice.
+
+ Args:
+ parameter_name: keyword to look up in the test name
+ num_values: number of arguments following the parameter name in the test name
+ Returns:
+ A list containing the parameter name and the following num_values arguments
+ """
+
+ try:
+ i = self.parameters.index(parameter_name)
+ except ValueError:
+ # parameter_name is not set
+ return []
+
+ return_list = []
+
+ try:
+ for j in range(num_values+1):
+ return_list.append(self.parameters.pop(i))
+ except IndexError:
+ self.log.error("Parameter {} has to be followed by {} values.".format(parameter_name, num_values))
+ raise ValueError()
+
+ return return_list
+
+
+ def teardown_class(self):
+ """Clean up the test class after tests finish running.
+
+ Stop the simulation and then disconnect from the Anritsu Callbox.
+
+ """
+ super().teardown_class()
+
+ if self.anritsu:
+ self.anritsu.stop_simulation()
+ self.anritsu.disconnect()
+
+ def init_simulation(self, sim_type):
+ """ Starts a new simulation only if needed.
+
+ Only starts a new simulation if type is different from the one running before.
+
+ Args:
+ type: defines the type of simulation to be started.
+ """
+
+ if sim_type == self.PARAM_SIM_TYPE_LTE:
+
+ if self.simulation and type(self.simulation) is LteSimulation:
+ # The simulation object we already have is enough.
+ return
+
+ # Instantiate a new simulation
+ self.simulation = LteSimulation(self.anritsu, self.log, self.dut)
+
+ elif sim_type == self.PARAM_SIM_TYPE_UMTS:
+
+ if self.simulation and type(self.simulation) is UmtsSimulation:
+ return
+
+ self.simulation = UmtsSimulation(self.anritsu, self.log, self.dut)
+
+ elif sim_type == self.PARAM_SIM_TYPE_GSM:
+
+ if self.simulation and type(self.simulation) is GsmSimulation:
+ return
+
+ self.simulation = GsmSimulation(self.anritsu, self.log, self.dut)
+
diff --git a/acts/framework/acts/test_utils/power/tel_simulations/BaseSimulation.py b/acts/framework/acts/test_utils/power/tel_simulations/BaseSimulation.py
new file mode 100644
index 0000000..01491e3
--- /dev/null
+++ b/acts/framework/acts/test_utils/power/tel_simulations/BaseSimulation.py
@@ -0,0 +1,444 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import time
+from enum import Enum
+
+import numpy as np
+
+from acts.controllers.anritsu_lib._anritsu_utils import AnritsuError
+from acts.controllers.anritsu_lib.md8475a import BtsNumber
+from acts.test_utils.tel.tel_test_utils import toggle_airplane_mode
+from acts.test_utils.tel.tel_test_utils import get_telephony_signal_strength, set_preferred_apn_by_adb
+
+class BaseSimulation():
+ """ Base class for an Anritsu Simulation abstraction.
+
+ Classes that inherit from this base class implement different simulation
+ setups. The base class contains methods that are common to all simulation
+ configurations.
+
+ """
+
+ NUM_UPLINK_CAL_READS = 3
+ NUM_DOWNLINK_CAL_READS = 5
+ DOWNLINK_CAL_TARGET_POWER_DBM = -15
+ MAX_BTS_INPUT_POWER_DBM = 30
+ MAX_PHONE_OUTPUT_POWER_DBM = 23
+
+ # Time in seconds to wait for the phone to settle
+ # after attaching to the base station.
+ SETTLING_TIME = 10
+
+ def __init__(self, anritsu, log, dut):
+ """ Initializes the Simulation object.
+
+ Keeps a reference to the callbox, log and dut handlers and
+ initializes the class attributes.
+
+ Args:
+ anritsu: the Anritsu callbox controller
+ log: a logger handle
+ dut: the android device handler
+ """
+
+ self.anritsu = anritsu
+ self.log = log
+ self.dut = dut
+
+ # Gets BTS1 since this sim only has 1 BTS
+ self.bts1 = self.anritsu.get_BTS(BtsNumber.BTS1)
+
+ # Path loss measured during calibration
+ self.dl_path_loss = None
+ self.ul_path_loss = None
+
+ # Target signal levels obtained during configuration
+ self.sim_dl_power = None
+ self.sim_ul_power = None
+
+ def start(self):
+ """ Start simulation and attach the DUT to the basestation
+
+ Starts the simulation in the Anritsu Callbox and waits for the
+ UE to attach.
+
+ """
+
+ # Start simulation if it wasn't started
+ self.anritsu.start_simulation()
+
+ # Turn on airplane mode
+ toggle_airplane_mode(self.log, self.dut, True)
+
+ # Provide a good signal power for the phone to attach easily
+ self.bts1.input_level = -10
+ self.bts1.output_level = -30
+
+ # Turn off airplane mode
+ toggle_airplane_mode(self.log, self.dut, False)
+
+ # Wait until the phone is attached
+ self.anritsu.wait_for_registration_state()
+ time.sleep(self.SETTLING_TIME)
+ self.log.info("UE attached to the callbox.")
+
+ # Set signal levels obtained from the test parameters
+ if self.sim_dl_power:
+ self.set_downlink_rx_power(self.sim_dl_power)
+ if self.sim_ul_power:
+ self.set_uplink_tx_power(self.sim_ul_power)
+
+ def stop(self):
+ """ Detach phone from the basestation by stopping the simulation.
+
+ Send stop command to anritsu and turn on airplane mode.
+
+ """
+
+ self.anritsu.stop_simulation()
+
+ toggle_airplane_mode(self.log, self.dut, True)
+
+ def parse_parameters(self, parameters):
+ """ Configures simulation using a list of parameters.
+
+ Consumes parameters from a list. Children classes need to call this method first.
+
+ Args:
+ parameters: list of parameters
+ Returns:
+ False if there was an error while parsing parameters
+ """
+
+ return True
+
+ def consume_parameter(self, parameters, parameter_name, num_values=0):
+ """ Parses a parameter from a list.
+
+ Allows to parse the parameter list. Will delete parameters from the list after
+ consuming them to ensure that they are not used twice.
+
+ Args:
+ parameters: list of parameters
+ parameter_name: keyword to look up in the list
+ num_values: number of arguments following the parameter name in the list
+ Returns:
+ A list containing the parameter name and the following num_values arguments
+ """
+
+ try:
+ i = parameters.index(parameter_name)
+ except ValueError:
+ # parameter_name is not set
+ return []
+
+ return_list = []
+
+ try:
+ for j in range(num_values+1):
+ return_list.append(parameters.pop(i))
+ except IndexError:
+ self.log.error("Parameter {} has to be followed by {} values.".format(parameter_name, num_values))
+ raise ValueError()
+
+ return return_list
+
+ def set_downlink_rx_power(self, signal_level):
+ """ Sets downlink rx power using calibration if available
+
+ Args:
+ signal_level: desired downlink received power, can be either a key value pair,
+ an int or a float
+ """
+
+ # Obtain power value if the provided signal_level is a key value pair
+ if isinstance(signal_level, Enum):
+ power = signal_level.value
+ else:
+ power = signal_level
+
+ # Try to use measured path loss value. If this was not set, it will throw an TypeError exception
+ try:
+ calibrated_power = round(power + self.dl_path_loss)
+ self.log.info("Requested DL Rx power of {} dBm, setting callbox Tx power at {} dBm".format(power, calibrated_power))
+ self.bts1.output_level = calibrated_power
+ # Power has to be a natural number so calibration wont be exact. Inform the actual
+ # received power after rounding.
+ self.log.info("Downlink received power is {}".format(calibrated_power - self.dl_path_loss))
+ except TypeError:
+ self.bts1.output_level = round(power)
+ self.log.info("Downlink received power set to {} (link is uncalibrated).".format(round(power)))
+
+ def set_uplink_tx_power(self, signal_level):
+ """ Sets uplink tx power using calibration if available
+
+ Args:
+ signal_level: desired uplink transmitted power, can be either a key value pair,
+ an int or a float
+ """
+
+ # Obtain power value if the provided signal_level is a key value pair
+ if isinstance(signal_level, Enum):
+ power = signal_level.value
+ else:
+ power = signal_level
+
+ # Try to use measured path loss value. If this was not set, it will throw an TypeError exception
+ try:
+ calibrated_power = round(power - self.ul_path_loss)
+ self.log.info("Requested UL Tx power of {} dBm, setting callbox Rx power at {} dBm".format(power, calibrated_power))
+ self.bts1.input_level = calibrated_power
+ # Power has to be a natural number so calibration wont be exact. Inform the actual
+ # transmitted power after rounding.
+ self.log.info("Uplink transmitted power is {}".format(calibrated_power + self.ul_path_loss))
+ except TypeError:
+ self.bts1.input_level = round(power)
+ self.log.info("Uplink transmitted power set to {} (link is uncalibrated).".format(round(power)))
+
+ def calibrate(self):
+ """ Calculates UL and DL path loss if it wasn't done before.
+
+ """
+
+ if self.dl_path_loss and self.ul_path_loss:
+ self.log.info("Measurements are already calibrated.")
+
+ # Start simulation if needed
+ self.start()
+
+ # If downlink or uplink were not yet calibrated, do it now
+ if not self.dl_path_loss:
+ self.dl_path_loss = self.downlink_calibration(self.bts1)
+ if not self.ul_path_loss:
+ self.ul_path_loss = self.uplink_calibration(self.bts1)
+
+ # Stop simulation after calibrating
+ self.stop()
+
+
+ def downlink_calibration(self, bts, rat = None, power_units_conversion_func = None):
+ """ Computes downlink path loss and returns the calibration value
+
+ The bts needs to be set at the desired config (bandwidth, mode, etc)
+ before running the calibration. The phone also needs to be attached
+ to the desired basesation for calibration
+
+ Args:
+ bts: basestation handle
+ rat: desired RAT to calibrate (matching the label reported by the phone)
+ power_units_conversion_func: a function to convert the units reported
+ by the phone to dBm. needs to take two arguments: the reported
+ signal level and bts. use None if no conversion is needed.
+ Returns:
+ Dowlink calibration value and measured DL power.
+ """
+
+ # Check if this parameter was set. Child classes may need to override this class
+ # passing the necessary parameters.
+ if not rat:
+ raise ValueError("The parameter 'rat' has to indicate the RAT being used as reported by the phone.")
+
+ # Set BTS to maximum output level to minimize error
+ init_output_level = bts.output_level
+ initial_screen_timeout = self.dut.droid.getScreenTimeout()
+ bts.output_level = self.DOWNLINK_CAL_TARGET_POWER_DBM
+ time.sleep(3)
+
+ # Set BTS to maximum input allowed in order to do uplink calibration
+ self.dut.droid.setScreenTimeout(1800)
+ self.dut.droid.goToSleepNow()
+
+ # Starting first the IP traffic (UDP): Using always APN 1
+ try:
+ cmd = 'OPERATEIPTRAFFIC START,1'
+ self.anritsu.send_command(cmd)
+ except AnritsuError as inst:
+ self.log.warning("{}\n".format(inst)) # Typically RUNNING already
+ time.sleep(4)
+
+ down_power_measured = []
+ for i in range(0, self.NUM_DOWNLINK_CAL_READS):
+ # For some reason, the RSRP gets updated on Screen ON event
+ self.dut.droid.wakeUpNow()
+ time.sleep(4)
+ signal_strength = get_telephony_signal_strength(self.dut)
+ down_power_measured.append(signal_strength[rat])
+ self.dut.droid.goToSleepNow()
+ time.sleep(4)
+
+ # Stop the IP traffic (UDP)
+ try:
+ cmd = 'OPERATEIPTRAFFIC STOP,1'
+ self.anritsu.send_command(cmd)
+ except AnritsuError as inst:
+ self.log.warning("{}\n".format(inst)) # Typically STOPPED already
+ time.sleep(1.5)
+
+ # Reset phone and bts to original settings
+ self.dut.droid.goToSleepNow()
+ self.dut.droid.setScreenTimeout(initial_screen_timeout)
+ bts.output_level = init_output_level
+
+ # Calculate the mean of the measurements
+ reported_asu_power = np.nanmean(down_power_measured)
+
+ # Convert from RSRP to signal power
+ if power_units_conversion_func:
+ avg_down_power = power_units_conversion_func(reported_asu_power, bts)
+ else:
+ avg_down_power = reported_asu_power
+
+ # Calculate Path Loss
+ down_call_path_loss = self.DOWNLINK_CAL_TARGET_POWER_DBM - avg_down_power
+
+ self.log.info("Measured downlink path loss: {} dB".format(down_call_path_loss))
+
+ return down_call_path_loss
+
+
+ def uplink_calibration(self, bts):
+ """ Computes uplink path loss and returns the calibration value
+
+ The bts needs to be set at the desired config (bandwidth, mode, etc)
+ before running the calibration. The phone also neeeds to be attached
+ to the desired basesation for calibration
+
+ Args:
+ bts: basestation handle
+
+ Returns:
+ Uplink calibration value and measured UL power
+ """
+
+ # Set BTS1 to maximum input allowed in order to do uplink calibration
+ target_power = self.MAX_PHONE_OUTPUT_POWER_DBM
+ initial_input_level = bts.input_level
+ initial_screen_timeout = self.dut.droid.getScreenTimeout()
+ bts.input_level = self.MAX_BTS_INPUT_POWER_DBM
+ time.sleep(3)
+
+ # Set BTS to maximum input allowed in order to do uplink calibration
+ self.dut.droid.setScreenTimeout(1800)
+ self.dut.droid.wakeUpNow()
+
+ # Starting first the IP traffic (UDP): Using always APN 1
+ try:
+ cmd = 'OPERATEIPTRAFFIC START,1'
+ self.anritsu.send_command(cmd)
+ except AnritsuError as inst:
+ self.log.warning("{}\n".format(inst)) # Typically RUNNING already
+ time.sleep(4)
+
+ up_power_per_chain = []
+ # Get the number of chains
+ cmd = 'MONITOR? UL_PUSCH'
+ uplink_meas_power = self.anritsu.send_query(cmd)
+ str_power_chain = uplink_meas_power.split(',')
+ num_chains = len(str_power_chain)
+ for ichain in range(0, num_chains):
+ up_power_per_chain.append([])
+
+ for i in range(0, self.NUM_UPLINK_CAL_READS):
+ uplink_meas_power = self.anritsu.send_query(cmd)
+ str_power_chain = uplink_meas_power.split(',')
+
+ for ichain in range(0, num_chains):
+ if (str_power_chain[ichain] == 'DEACTIVE'):
+ up_power_per_chain[ichain].append(float('nan'))
+ else:
+ up_power_per_chain[ichain].append(
+ float(str_power_chain[ichain]))
+
+ time.sleep(2)
+
+ # Stop the IP traffic (UDP)
+ try:
+ cmd = 'OPERATEIPTRAFFIC STOP,1'
+ self.anritsu.send_command(cmd)
+ except AnritsuError as inst:
+ self.log.warning("{}\n".format(inst)) # Typically STOPPED already
+ time.sleep(1.5)
+
+ # Reset phone and bts to original settings
+ self.dut.droid.goToSleepNow()
+ self.dut.droid.setScreenTimeout(initial_screen_timeout)
+ bts.input_level = initial_input_level
+
+ # Phone only supports 1x1 Uplink so always chain 0
+ avg_up_power = np.nanmean(up_power_per_chain[0])
+ if np.isnan(avg_up_power):
+ raise ValueError("Calibration failed because the callbox reported the chain to be deactive.")
+
+ up_call_path_loss = target_power - avg_up_power
+
+ self.up_call_path_loss = up_call_path_loss
+ self.up_call_power_per_chain = up_power_per_chain
+
+ self.log.info("Measured uplink path loss: {} dB".format(up_call_path_loss))
+
+ return up_call_path_loss
+
+
+ def set_band(self, bts, band, calibrate_if_necessary=False):
+ """ Sets the band used for communication.
+
+ When moving to a new band, recalibrate the link.
+
+ Args:
+ bts: basestation handle
+ band: desired band
+ calibrate_if_necessary: run calibration procedure if true and new band is different to current
+ """
+
+ current_band = bts.band
+
+ # Change band only if it is needed
+ if current_band != band:
+ bts.band = band
+
+ # If band is being changed, then invalidate calibration
+ self.dl_path_loss = None
+ self.ul_path_loss = None
+
+ # self.dl_path_loss and self.ul_path_loss may be None if calibration was never done or if it was invalidated
+ # in the previous lines.
+ if calibrate_if_necessary and (not self.dl_path_loss or not self.ul_path_loss):
+ self.calibrate()
+
+ def maximum_downlink_throughput(self):
+ """ Calculates maximum achievable downlink throughput in the current simulation state.
+
+ Because thoughput is dependent on the RAT, this method needs to be implemented
+ by children classes.
+
+ Returns:
+ Maximum throughput in mbps
+ """
+ raise NotImplementedError()
+
+ def maximum_uplink_throughput(self):
+ """ Calculates maximum achievable downlink throughput in the current simulation state.
+
+ Because thoughput is dependent on the RAT, this method needs to be implemented
+ by children classes.
+
+ Returns:
+ Maximum throughput in mbps
+ """
+ raise NotImplementedError()
+
+
diff --git a/acts/framework/acts/test_utils/power/tel_simulations/GsmSimulation.py b/acts/framework/acts/test_utils/power/tel_simulations/GsmSimulation.py
new file mode 100644
index 0000000..652658c
--- /dev/null
+++ b/acts/framework/acts/test_utils/power/tel_simulations/GsmSimulation.py
@@ -0,0 +1,136 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from acts.controllers.anritsu_lib.md8475a import BtsGprsMode
+from acts.test_utils.power.tel_simulations.BaseSimulation import BaseSimulation
+from acts.test_utils.tel.anritsu_utils import GSM_BAND_DCS1800
+from acts.test_utils.tel.anritsu_utils import GSM_BAND_EGSM900
+from acts.test_utils.tel.anritsu_utils import GSM_BAND_GSM850
+from acts.test_utils.tel.anritsu_utils import GSM_BAND_RGSM900
+from acts.test_utils.tel.tel_defines import NETWORK_MODE_GSM_ONLY
+
+
+class GsmSimulation(BaseSimulation):
+ """ Simple GSM simulation with only one basestation.
+
+ """
+
+ # Simulation config files in the callbox computer.
+ # These should be replaced in the future by setting up
+ # the same configuration manually.
+
+ GSM_BASIC_SIM_FILE = ('C:\\Users\MD8475A\Documents\DAN_configs\\'
+ 'SIM_default_GSM.wnssp')
+
+ GSM_CELL_FILE = ('C:\\Users\MD8475A\Documents\\DAN_configs\\'
+ 'CELL_GSM_config.wnscp')
+
+ # Test name parameters
+
+ PARAM_BAND = "band"
+ PARAM_GPRS = "gprs"
+ PARAM_EGPRS = "edge"
+ PARAM_NO_GPRS = "nogprs"
+ PARAM_SLOTS = "slots"
+
+ bands_parameter_mapping = {
+ '850': GSM_BAND_GSM850,
+ '900': GSM_BAND_EGSM900,
+ '1800': GSM_BAND_DCS1800,
+ '1900': GSM_BAND_RGSM900
+ }
+
+ def __init__(self, anritsu, log, dut):
+ """ Configures Anritsu system for GSM simulation with 1 basetation
+
+ Loads a simple LTE simulation enviroment with 1 basestation. It also
+ creates the BTS handle so we can change the parameters as desired.
+
+ Args:
+ anritsu: the Anritsu callbox controller
+ log: a logger handle
+ dut: the android device handler
+
+ """
+
+ super().__init__(anritsu, log, dut)
+
+ anritsu.load_simulation_paramfile(self.GSM_BASIC_SIM_FILE)
+ self.anritsu.load_cell_paramfile(self.GSM_CELL_FILE)
+
+ if not dut.droid.telephonySetPreferredNetworkTypesForSubscription(NETWORK_MODE_GSM_ONLY,
+ dut.droid.subscriptionGetDefaultSubId()):
+ log.error("Coold not set preferred network type.")
+ else:
+ log.info("Preferred network type set.")
+
+ def parse_parameters(self, parameters):
+ """ Configs a GSM simulation using a list of parameters.
+
+ Calls the parent method first, then consumes parameters specific to GSM.
+
+ Args:
+ parameters: list of parameters
+ Returns:
+ False if there was an error while parsing the config
+ """
+
+ if not super().parse_parameters(parameters):
+ return False
+
+ # Setup band
+
+ try:
+ values = self.consume_parameter(parameters, self.PARAM_BAND, 1)
+ band = self.bands_parameter_mapping[values[1]]
+ except:
+ self.log.error("The test name needs to include parameter {} followed by required band.".format(self.PARAM_BAND))
+ return False
+ else:
+ self.set_band(self.bts1, band, calibrate_if_necessary=False)
+
+ # Setup GPRS mode
+
+ if self.consume_parameter(parameters, self.PARAM_GPRS):
+ self.bts1.gsm_gprs_mode = BtsGprsMode.GPRS
+ elif self.consume_parameter(parameters, self.PARAM_EGPRS):
+ self.bts1.gsm_gprs_mode = BtsGprsMode.EGPRS
+ elif self.consume_parameter(parameters, self.PARAM_NO_GPRS):
+ self.bts1.gsm_gprs_mode = BtsGprsMode.NO_GPRS
+ else:
+ self.log.error("GPRS mode needs to be indicated in the test name with either {}, {} or {}.".format(
+ self.PARAM_GPRS,
+ self.PARAM_EGPRS,
+ self.PARAM_NO_GPRS
+ ))
+ return False
+
+ # Setup slot allocation
+
+ try:
+ values = self.consume_parameter(parameters, self.PARAM_SLOTS, 2)
+ dl = int(values[1])
+ ul = int(values[2])
+ except:
+ self.log.error("The test name needs to include parameter {} followed by two int values indicating DL and UL slots.".format(self.PARAM_SLOTS))
+ return False
+ else:
+ self.bts1.gsm_slots = (dl, ul)
+
+ # No errors were found
+ return True
+
+
+
diff --git a/acts/framework/acts/test_utils/power/tel_simulations/LteSimulation.py b/acts/framework/acts/test_utils/power/tel_simulations/LteSimulation.py
new file mode 100644
index 0000000..fb02de8
--- /dev/null
+++ b/acts/framework/acts/test_utils/power/tel_simulations/LteSimulation.py
@@ -0,0 +1,443 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from enum import Enum
+
+from acts.controllers.anritsu_lib.md8475a import BtsBandwidth
+from acts.controllers.anritsu_lib.md8475a import BtsPacketRate
+from acts.test_utils.power.tel_simulations.BaseSimulation import BaseSimulation
+from acts.test_utils.tel.tel_defines import NETWORK_MODE_LTE_ONLY
+from acts.test_utils.tel.tel_test_utils import set_preferred_apn_by_adb
+
+
+class LteSimulation(BaseSimulation):
+ """ Simple LTE simulation with only one basestation.
+
+ """
+
+ # Simulation config files in the callbox computer.
+ # These should be replaced in the future by setting up
+ # the same configuration manually.
+
+ LTE_BASIC_SIM_FILE = ('C:\\Users\MD8475A\Documents\DAN_configs\\'
+ 'SIM_default_LTE.wnssp')
+ LTE_BASIC_CELL_FILE = ('C:\\Users\MD8475A\Documents\\DAN_configs\\'
+ 'CELL_LTE_config.wnscp')
+
+ # Simulation config keywords contained in the test name
+
+ PARAM_BW = "bw"
+ PARAM_SCHEDULING = "scheduling"
+ PARAM_TM = "tm"
+ PARAM_UL_PW = 'pul'
+ PARAM_DL_PW = 'pdl'
+ PARAM_BAND = "band"
+
+ class TransmissionMode(Enum):
+ ''' Transmission modes for LTE (e.g., TM1, TM4, ..)
+
+ '''
+ TM1 = "TM1"
+ TM2 = "TM2"
+ TM3 = "TM3"
+ TM4 = "TM4"
+
+ class SchedulingMode(Enum):
+ ''' Traffic scheduling modes (e.g., STATIC, DYNAMIC)
+
+ '''
+ DYNAMIC = 0
+ STATIC = 1
+
+ # RSRP signal levels thresholds (as reported by Android). Units are dBm/15KHz
+
+ downlink_rsrp_dictionary = {
+ 'excellent': -60,
+ 'high': -110,
+ 'medium': -115,
+ 'weak': -120
+ }
+
+ # Transmitted output power for the phone
+ # Units are dBm
+
+ uplink_signal_level_dictionary = {
+ 'max': 23,
+ 'high': 13,
+ 'medium': 3,
+ 'low': -20
+ }
+
+ def __init__(self, anritsu, log, dut):
+ """ Configures Anritsu system for LTE simulation with 1 basetation
+
+ Loads a simple LTE simulation enviroment with 1 basestation.
+
+ Args:
+ anritsu: the Anritsu callbox controller
+ log: a logger handle
+ dut: the android device handler
+
+ """
+
+ super().__init__(anritsu, log, dut)
+
+ anritsu.load_simulation_paramfile(self.LTE_BASIC_SIM_FILE)
+ anritsu.load_cell_paramfile(self.LTE_BASIC_CELL_FILE)
+
+ if not dut.droid.telephonySetPreferredNetworkTypesForSubscription(NETWORK_MODE_LTE_ONLY,
+ dut.droid.subscriptionGetDefaultSubId()):
+ log.error("Coold not set preferred network type.")
+ else:
+ log.info("Preferred network type set.")
+
+ set_preferred_apn_by_adb(self.dut, "anritsu1.com")
+ log.info("Prefered apn set to anritsu1.com")
+
+ def parse_parameters(self, parameters):
+ """ Configs an LTE simulation using a list of parameters.
+
+ Calls the parent method first, then consumes parameters specific to LTE.
+
+ Args:
+ parameters: list of parameters
+ Returns:
+ False if there was an error while parsing the config
+ """
+
+ if not super().parse_parameters(parameters):
+ return False
+
+ # Setup band
+
+ try:
+ values = self.consume_parameter(parameters, self.PARAM_BAND, 1)
+ band = values[1]
+ except:
+ self.log.error("The test name needs to include parameter {} followed by required band.".format(self.PARAM_BAND))
+ return False
+ else:
+ self.set_band(self.bts1, band, calibrate_if_necessary=True)
+
+ # Setup bandwidth
+
+ try:
+ values = self.consume_parameter(parameters, self.PARAM_BW, 1)
+
+ bw = float(values[1])
+
+ if bw == 14:
+ bw = 1.4
+
+ except:
+ self.log.error("The test name needs to include parameter {} followed by an int value "
+ "(to indicate 1.4 MHz use 14).".format(self.PARAM_BW))
+ return False
+ else:
+ self.set_channel_bandwidth(self.bts1, bw)
+
+ # Setup transmission mode
+
+ try:
+ values = self.consume_parameter(parameters, self.PARAM_TM, 1)
+
+ if values[1] == "1":
+ tm = LteSimulation.TransmissionMode.TM1
+ elif values[1] == "2":
+ tm = LteSimulation.TransmissionMode.TM2
+ elif values[1] == "3":
+ tm = LteSimulation.TransmissionMode.TM3
+ elif values[1] == "4":
+ tm = LteSimulation.TransmissionMode.TM4
+ else:
+ raise ValueError()
+
+ except:
+ self.log.error("The test name needs to include parameter {} followed by an int value from 1 to 4 indicating"
+ " transmission mode.".format(self.PARAM_TM))
+ return False
+ else:
+ self.set_transmission_mode(self.bts1, tm)
+
+ # Setup scheduling mode
+
+ try:
+ values = self.consume_parameter(parameters, self.PARAM_SCHEDULING, 1)
+
+ if values[1] == "dynamic":
+ scheduling = LteSimulation.SchedulingMode.DYNAMIC
+ elif values[1] == "static":
+ scheduling = LteSimulation.SchedulingMode.STATIC
+
+ except:
+ self.log.error(
+ "The test name needs to include parameter {} followed by either "
+ "dynamic or static.".format(self.PARAM_SCHEDULING))
+ return False
+ else:
+ self.set_scheduling_mode(self.bts1, scheduling)
+
+ # Setup uplink power
+
+ try:
+ values = self.consume_parameter(parameters, self.PARAM_UL_PW, 1)
+
+ if values[1] not in self.uplink_signal_level_dictionary:
+ raise ValueError("Invalid signal level value.")
+ else:
+ power = self.uplink_signal_level_dictionary[values[1]]
+
+ except:
+ self.log.error(
+ "The test name needs to include parameter {} followed by one the following values: {}.".format(
+ self.PARAM_UL_PW,
+ ["\n" + val for val in self.uplink_signal_level_dictionary.keys()]
+ ))
+ return False
+ else:
+ # Power is not set on the callbox until after the simulation is started. Will save this value in
+ # a variable and use it lated
+ self.sim_ul_power = power
+
+ # Setup downlink power
+
+ try:
+ values = self.consume_parameter(parameters, self.PARAM_DL_PW, 1)
+
+ if values[1] not in self.downlink_rsrp_dictionary:
+ raise ValueError("Invalid signal level value.")
+ else:
+ power = self.downlink_rsrp_dictionary[values[1]]
+
+ except:
+ self.log.error(
+ "The test name needs to include parameter {} followed by one the following values: {}.".format(
+ self.PARAM_DL_PW,
+ ["\n" + val for val in self.downlink_rsrp_dictionary.keys()]
+ ))
+ return False
+ else:
+ # Power is not set on the callbox until after the simulation is started. Will save this value in
+ # a variable and use it later
+ self.sim_dl_power = power
+
+ # No errors were found
+ return True
+
+
+ def set_downlink_rx_power(self, rsrp):
+ """ Sets downlink rx power in RSRP using calibration
+
+ Lte simulation overrides this method so that it can convert from
+ RSRP to total signal power transmitted from the basestation.
+
+ Args:
+ rsrp: desired rsrp, contained in a key value pair
+ """
+
+ power = self.rsrp_to_signal_power(rsrp, self.bts1)
+
+ self.log.info("Setting downlink signal level to {} RSRP ({} dBm)".format(rsrp, power))
+
+ # Use parent method to set signal level
+ super().set_downlink_rx_power(power)
+
+
+ def downlink_calibration(self, bts, rat = None, power_units_conversion_func = None):
+ """ Computes downlink path loss and returns the calibration value
+
+ The bts needs to be set at the desired config (bandwidth, mode, etc)
+ before running the calibration. The phone also needs to be attached
+ to the desired basesation for calibration
+
+ Args:
+ bts: basestation handle
+ rat: ignored, replaced by 'lteRsrp'
+ power_units_conversion_func: ignored, replaced by self.rsrp_to_signal_power
+
+ Returns:
+ Dowlink calibration value and measured DL power. Note that the
+ phone only reports RSRP of the primary chain
+ """
+
+ return super().downlink_calibration(bts, rat='lteRsrp', power_units_conversion_func=self.rsrp_to_signal_power)
+
+ def rsrp_to_signal_power(self, rsrp, bts):
+ """ Converts rsrp to signal power
+
+ RSRP is measured per subcarrier, so linear power needs to be multiplied
+ by the number of subcarriers in the channel.
+
+ Args:
+ rsrp: desired rsrp in dBm
+ bts: basestation handler for which the unit conversion is done
+
+ Returns:
+ Transmitted signal power in dBm
+ """
+
+ bandwidth = bts.bandwidth
+
+ if bandwidth == BtsBandwidth.LTE_BANDWIDTH_20MHz.value:
+ # 100 RBs
+ power = rsrp + 18.57
+ elif bandwidth == BtsBandwidth.LTE_BANDWIDTH_15MHz.value:
+ # 75 RBs
+ power = rsrp + 22.55
+ elif bandwidth == BtsBandwidth.LTE_BANDWIDTH_10MHz.value:
+ # 50 RBs
+ power = rsrp + 24.77
+ elif bandwidth == BtsBandwidth.LTE_BANDWIDTH_5MHz.value:
+ # 25 RBs
+ power = rsrp + 27.78
+ elif bandwidth == BtsBandwidth.LTE_BANDWIDTH_3MHz.value:
+ # 15 RBs
+ power = rsrp + 29.54
+ elif bandwidth == BtsBandwidth.LTE_BANDWIDTH_1dot4MHz.value:
+ # 6 RBs
+ power = rsrp + 30.79
+ else:
+ raise ValueError("Invalidad bandwith value.")
+
+ return power
+
+ def maximum_downlink_throughput(self):
+ """ Calculates maximum achievable downlink throughput in the current simulation state.
+
+ Returns:
+ Maximum throughput in mbps.
+
+ """
+
+ bandwidth = self.bts1.bandwidth
+ chains = float(self.bts1.dl_antenna)
+
+ if bandwidth == BtsBandwidth.LTE_BANDWIDTH_20MHz.value:
+ return 71.11 * chains
+ elif bandwidth == BtsBandwidth.LTE_BANDWIDTH_15MHz.value:
+ return 52.75 * chains
+ elif bandwidth == BtsBandwidth.LTE_BANDWIDTH_10MHz.value:
+ return 29.88 * chains
+ elif bandwidth == BtsBandwidth.LTE_BANDWIDTH_5MHz.value:
+ return 14.11 * chains
+ elif bandwidth == BtsBandwidth.LTE_BANDWIDTH_3MHz.value:
+ return 5.34 * chains
+ elif bandwidth == BtsBandwidth.LTE_BANDWIDTH_1dot4MHz.value:
+ return 0.842 * chains
+ else:
+ raise ValueError("Invalid bandwidth value.")
+
+ def maximum_uplink_throughput(self):
+ """ Calculates maximum achievable uplink throughput in the current simulation state.
+
+ Returns:
+ Maximum throughput in mbps.
+
+ """
+
+ bandwidth = self.bts1.bandwidth
+
+ if bandwidth == BtsBandwidth.LTE_BANDWIDTH_20MHz.value:
+ return 51.02
+ elif bandwidth == BtsBandwidth.LTE_BANDWIDTH_15MHz.value:
+ return 37.88
+ elif bandwidth == BtsBandwidth.LTE_BANDWIDTH_10MHz.value:
+ return 25.45
+ elif bandwidth == BtsBandwidth.LTE_BANDWIDTH_5MHz.value:
+ return 17.57
+ elif bandwidth == BtsBandwidth.LTE_BANDWIDTH_3MHz.value:
+ return 7.99
+ elif bandwidth == BtsBandwidth.LTE_BANDWIDTH_1dot4MHz.value:
+ return 2.98
+ else:
+ raise ValueError("Invalid bandwidth value.")
+
+
+ def set_transmission_mode(self, bts, tmode):
+ """ Sets the transmission mode for the LTE basetation
+
+ Args:
+ bts: basestation handle
+ tmode: Enum list from class 'TransmissionModeLTE'
+ """
+
+ if tmode == self.TransmissionMode.TM1:
+ bts.dl_antenna = 1
+ bts.transmode = "TM1"
+ elif tmode == self.TransmissionMode.TM4:
+ bts.dl_antenna = 2
+ bts.transmode = "TM4"
+ else:
+ msg = "TM = {} is not valid for LTE".format(tmode)
+ self.log.error(msg)
+ raise ValueError(msg)
+
+ def set_scheduling_mode(self,
+ bts,
+ scheduling,
+ packet_rate=BtsPacketRate.LTE_BESTEFFORT,
+ mcs_dl=0,
+ mcs_ul=0,
+ nrb_dl=5,
+ nrb_ul=5):
+ """ Sets the scheduling mode for LTE
+
+ Args:
+ bts: basestation handle
+ scheduling: DYNAMIC or STATIC scheduling (Enum list)
+ mcs_dl: Downlink MCS (only for STATIC scheduling)
+ mcs_ul: Uplink MCS (only for STATIC scheduling)
+ nrb_dl: Number of RBs for downlink (only for STATIC scheduling)
+ nrb_ul: Number of RBs for uplink (only for STATIC scheduling)
+ """
+
+ if scheduling == self.SchedulingMode.DYNAMIC:
+ bts.lte_scheduling_mode = "DYNAMIC"
+ else:
+ bts.lte_scheduling_mode = "STATIC"
+ bts.packet_rate = packet_rate
+ cmd = "TBSPATTERN OFF, " + bts._bts_number
+ self.anritsu.send_command(cmd)
+ if packet_rate == BtsPacketRate.LTE_MANUAL:
+ bts.lte_mcs_dl = mcs_dl
+ bts.lte_mcs_ul = mcs_ul
+ bts.nrb_dl = nrb_dl
+ bts.nrb_ul = nrb_ul
+
+ def set_channel_bandwidth(self, bts, bandwidth):
+ """ Sets the LTE channel bandwidth (MHz)
+
+ Args:
+ bts: basestation handle
+ bandwidth: desired bandwidth (MHz)
+ """
+ if bandwidth == 20:
+ bts.bandwidth = BtsBandwidth.LTE_BANDWIDTH_20MHz
+ elif bandwidth == 15:
+ bts.bandwidth = BtsBandwidth.LTE_BANDWIDTH_15MHz
+ elif bandwidth == 10:
+ bts.bandwidth = BtsBandwidth.LTE_BANDWIDTH_10MHz
+ elif bandwidth == 5:
+ bts.bandwidth = BtsBandwidth.LTE_BANDWIDTH_5MHz
+ elif bandwidth == 3:
+ bts.bandwidth = BtsBandwidth.LTE_BANDWIDTH_3MHz
+ elif bandwidth == 1.4:
+ bts.bandwidth = BtsBandwidth.LTE_BANDWIDTH_1dot4MHz
+ else:
+ msg = "Bandwidth = {} MHz is not valid for LTE".format(bandwidth)
+ self.log.Error(msg)
+ raise ValueError(msg)
+
diff --git a/acts/framework/acts/test_utils/power/tel_simulations/UmtsSimulation.py b/acts/framework/acts/test_utils/power/tel_simulations/UmtsSimulation.py
new file mode 100644
index 0000000..c129abb
--- /dev/null
+++ b/acts/framework/acts/test_utils/power/tel_simulations/UmtsSimulation.py
@@ -0,0 +1,208 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from acts.controllers.anritsu_lib.md8475a import BtsPacketRate
+from acts.test_utils.power.tel_simulations.BaseSimulation import BaseSimulation
+from acts.test_utils.tel.tel_defines import NETWORK_MODE_WCDMA_ONLY
+
+
+class UmtsSimulation(BaseSimulation):
+ """ Simple UMTS simulation with only one basestation.
+
+ """
+
+ # Simulation config files in the callbox computer.
+ # These should be replaced in the future by setting up
+ # the same configuration manually.
+
+ UMTS_BASIC_SIM_FILE = ('C:\\Users\MD8475A\Documents\DAN_configs\\'
+ 'SIM_default_WCDMA.wnssp')
+
+ UMTS_R99_CELL_FILE = ('C:\\Users\MD8475A\Documents\\DAN_configs\\'
+ 'CELL_WCDMA_R99_config.wnscp')
+
+ UMTS_R7_CELL_FILE = ('C:\\Users\MD8475A\Documents\\DAN_configs\\'
+ 'CELL_WCDMA_R7_config.wnscp')
+
+ UMTS_R8_CELL_FILE = ('C:\\Users\MD8475A\Documents\\DAN_configs\\'
+ 'CELL_WCDMA_R8_config.wnscp')
+
+ # Test name parameters
+ PARAM_RELEASE_VERSION = "r"
+ PARAM_RELEASE_VERSION_99 = "99"
+ PARAM_RELEASE_VERSION_8 = "8"
+ PARAM_RELEASE_VERSION_7 = "7"
+ PARAM_UL_PW = 'pul'
+ PARAM_DL_PW = 'pdl'
+ PARAM_BAND = "band"
+
+ # RSCP signal levels thresholds (as reported by Android). Units are dBm
+ # Using LTE thresholds + 24 dB to have equivalent SPD
+ # 24 dB comes from 10 * log10(3.84 MHz / 15 KHz)
+
+ downlink_rscp_dictionary = {
+ 'excellent': -51,
+ 'high': -76,
+ 'medium': -86,
+ 'weak': -96
+ }
+
+ # Transmitted output power for the phone
+ # Stronger Tx power means that the signal received by the BTS is weaker
+ # Units are dBm
+
+ uplink_signal_level_dictionary = {
+ 'excellent': -20,
+ 'high': 2,
+ 'medium': 8,
+ 'weak': 15,
+ 'edge': 23
+ }
+
+ def __init__(self, anritsu, log, dut):
+ """ Configures Anritsu system for UMTS simulation with 1 basetation
+
+ Loads a simple UMTS simulation enviroment with 1 basestation. It also
+ creates the BTS handle so we can change the parameters as desired.
+
+ Args:
+ anritsu: the Anritsu callbox controller
+ log: a logger handle
+ dut: the android device handler
+
+ """
+
+ super().__init__(anritsu, log, dut)
+
+ anritsu.load_simulation_paramfile(self.UMTS_BASIC_SIM_FILE)
+
+ if not dut.droid.telephonySetPreferredNetworkTypesForSubscription(NETWORK_MODE_WCDMA_ONLY,
+ dut.droid.subscriptionGetDefaultSubId()):
+ log.error("Coold not set preferred network type.")
+ else:
+ log.info("Preferred network type set.")
+
+ def parse_parameters(self, parameters):
+ """ Configs an UMTS simulation using a list of parameters.
+
+ Calls the parent method first, then consumes parameters specific to UMTS.
+
+ Args:
+ parameters: list of parameters
+ Returns:
+ False if there was an error while parsing the config
+ """
+
+ if not super().parse_parameters(parameters):
+ return False
+
+ # Setup band
+
+ try:
+ values = self.consume_parameter(parameters, self.PARAM_BAND, 1)
+ band = values[1]
+ except:
+ self.log.error("The test name needs to include parameter {} followed by required band.".format(self.PARAM_BAND))
+ return False
+ else:
+ self.set_band(self.bts1, band, calibrate_if_necessary=False)
+
+ # Setup release version
+
+ try:
+ values = self.consume_parameter(parameters, self.PARAM_RELEASE_VERSION, 1)
+
+ if values[1] in [self.PARAM_RELEASE_VERSION_7, self.PARAM_RELEASE_VERSION_8, self.PARAM_RELEASE_VERSION_99]:
+ release_version = values[1]
+ else:
+ raise ValueError()
+
+ except:
+ self.log.error("The test name needs to include the parameter {} followed by valid release version."
+ .format(self.PARAM_RELEASE_VERSION))
+ return False
+ else:
+ self.set_release_version(self.bts1, release_version)
+
+ # Setup uplink power
+
+ try:
+ values = self.consume_parameter(parameters, self.PARAM_UL_PW, 1)
+
+ if values[1] not in self.uplink_signal_level_dictionary:
+ raise ValueError("Invalid signal level value.")
+ else:
+ power = self.uplink_signal_level_dictionary[values[1]]
+
+ except:
+ self.log.error(
+ "The test name needs to include parameter {} followed by one the following values: {}.".format(
+ self.PARAM_UL_PW,
+ ["\n" + val for val in self.uplink_signal_level_dictionary.keys()]
+ ))
+ return False
+ else:
+ # Power is not set on the callbox until after the simulation is started. Will save this value in
+ # a variable and use it lated
+ self.sim_ul_power = power
+
+ # Setup downlink power
+
+ try:
+ values = self.consume_parameter(parameters, self.PARAM_DL_PW, 1)
+
+ if values[1] not in self.downlink_rscp_dictionary:
+ raise ValueError("Invalid signal level value.")
+ else:
+ power = self.downlink_rscp_dictionary[values[1]]
+
+ except:
+ self.log.error(
+ "The test name needs to include parameter {} followed by one the following values: {}.".format(
+ self.PARAM_DL_PW,
+ ["\n" + val for val in self.downlink_rscp_dictionary.keys()]
+ ))
+ return False
+ else:
+ # Power is not set on the callbox until after the simulation is started. Will save this value in
+ # a variable and use it later
+ self.sim_dl_power = power
+
+ # No errors were found
+ return True
+
+
+ def set_release_version(self, bts, release_version):
+
+ if release_version == self.PARAM_RELEASE_VERSION_99:
+
+ cell_parameter_file = self.UMTS_R99_CELL_FILE
+ packet_rate = BtsPacketRate.WCDMA_DL384K_UL64K
+
+ elif release_version == self.PARAM_RELEASE_VERSION_7:
+
+ cell_parameter_file = self.UMTS_R7_CELL_FILE
+ packet_rate = BtsPacketRate.WCDMA_DL21_6M_UL5_76M
+
+ elif release_version == self.PARAM_RELEASE_VERSION_8:
+
+ cell_parameter_file = self.UMTS_R8_CELL_FILE
+ packet_rate = BtsPacketRate.WCDMA_DL43_2M_UL5_76M
+
+ else:
+ raise ValueError("Invalid UMTS release version number.")
+
+ self.anritsu.load_cell_paramfile(cell_parameter_file)
+ bts.packet_rate = packet_rate
diff --git a/acts/framework/acts/test_utils/tel/tel_test_utils.py b/acts/framework/acts/test_utils/tel/tel_test_utils.py
index 3e8e6e6..3489b8e 100644
--- a/acts/framework/acts/test_utils/tel/tel_test_utils.py
+++ b/acts/framework/acts/test_utils/tel/tel_test_utils.py
@@ -2441,7 +2441,6 @@
if ad.droid.connectivityNetworkIsConnected() != expected_state:
ad.log.info("NetworkIsConnected = %s, expecting %s",
not expected_state, expected_state)
- return False
if verify_internet_connection_by_ping(
log, ad, retries=retries, expected_state=expected_state):
return True
@@ -2552,6 +2551,7 @@
try:
ad.log.info("Download %s to %s by adb shell command %s", url,
file_path, curl_cmd)
+
ad.adb.shell(curl_cmd, timeout=timeout)
if _check_file_existance(ad, file_path, expected_file_size):
ad.log.info("%s is downloaded to %s successfully", url, file_path)
@@ -2561,6 +2561,13 @@
return False
except Exception as e:
ad.log.warning("Download %s failed with exception %s", url, e)
+ for cmd in ("ls -lh /data/local/tmp/tcpdump/",
+ "ls -lh /sdcard/Download/",
+ "ls -lh /data/vendor/radio/diag_logs/logs/",
+ "df -h",
+ "du -d 4 -h /data"):
+ out = ad.adb.shell(cmd)
+ ad.log.debug("%s", out)
return False
finally:
if remove_file_after_check:
@@ -2718,6 +2725,13 @@
ad.data_droid.httpDownloadFile(url, file_path, timeout=timeout)
except Exception as e:
ad.log.warning("SL4A file download error: %s", e)
+ for cmd in ("ls -lh /data/local/tmp/tcpdump/",
+ "ls -lh /sdcard/Download/",
+ "ls -lh /data/vendor/radio/diag_logs/logs/",
+ "df -h",
+ "du -d 4 -h /data"):
+ out = ad.adb.shell(cmd)
+ ad.log.debug("%s", out)
ad.data_droid.terminate()
return False
if _check_file_existance(ad, file_path, expected_file_size):
@@ -2837,6 +2851,9 @@
def trigger_modem_crash_by_modem(ad, timeout=120):
begin_time = get_device_epoch_time(ad)
ad.adb.shell(
+ "setprop persist.vendor.sys.modem.diag.mdlog false", ignore_status=True)
+ # Legacy pixels use persist.sys.modem.diag.mdlog.
+ ad.adb.shell(
"setprop persist.sys.modem.diag.mdlog false", ignore_status=True)
disable_qxdm_logger(ad)
cmd = ('am instrument -w -e request "4b 25 03 00" '
@@ -3915,6 +3932,7 @@
result = operator_name_from_network_name(result)
except Exception:
result = CARRIER_UNKNOWN
+ ad.log.info("Operator Name is %s", result)
return result
@@ -5959,6 +5977,8 @@
else:
ad.adb.shell(
"find /data/local/tmp/tcpdump -type f -not -mtime -1800s -delete")
+ ad.adb.shell(
+ "find /data/local/tmp/tcpdump -type f -size +5G -delete")
if not begin_time:
begin_time = get_current_epoch_time()
@@ -6233,7 +6253,8 @@
def get_sim_state(ad):
try:
state = ad.droid.telephonyGetSimState()
- except:
+ except Exception as e:
+ ad.log.error(e)
state = ad.adb.getprop("gsm.sim.state")
return state
diff --git a/acts/framework/acts/test_utils/wifi/WifiBaseTest.py b/acts/framework/acts/test_utils/wifi/WifiBaseTest.py
index 1f69270..fb7b496 100755
--- a/acts/framework/acts/test_utils/wifi/WifiBaseTest.py
+++ b/acts/framework/acts/test_utils/wifi/WifiBaseTest.py
@@ -44,12 +44,13 @@
for attenuator in self.attenuators:
attenuator.set_atten(0)
- def get_wpa2_network(
+ def get_psk_network(
self,
mirror_ap,
reference_networks,
hidden=False,
same_ssid=False,
+ security_mode=hostapd_constants.WPA2_STRING,
ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
@@ -73,8 +74,8 @@
"""
network_dict_2g = {}
network_dict_5g = {}
- ref_5g_security = hostapd_constants.WPA2_STRING
- ref_2g_security = hostapd_constants.WPA2_STRING
+ ref_5g_security = security_mode
+ ref_2g_security = security_mode
if same_ssid:
ref_2g_ssid = 'xg_%s' % utils.rand_ascii_str(ssid_length_2g)
@@ -90,32 +91,19 @@
ref_5g_ssid = '5g_%s' % utils.rand_ascii_str(ssid_length_5g)
ref_5g_passphrase = utils.rand_ascii_str(passphrase_length_5g)
- if hidden:
- network_dict_2g = {
- "SSID": ref_2g_ssid,
- "security": ref_2g_security,
- "password": ref_2g_passphrase,
- "hiddenSSID": True
- }
+ network_dict_2g = {
+ "SSID": ref_2g_ssid,
+ "security": ref_2g_security,
+ "password": ref_2g_passphrase,
+ "hiddenSSID": hidden
+ }
- network_dict_5g = {
- "SSID": ref_5g_ssid,
- "security": ref_5g_security,
- "password": ref_5g_passphrase,
- "hiddenSSID": True
- }
- else:
- network_dict_2g = {
- "SSID": ref_2g_ssid,
- "security": ref_2g_security,
- "password": ref_2g_passphrase
- }
-
- network_dict_5g = {
- "SSID": ref_5g_ssid,
- "security": ref_5g_security,
- "password": ref_5g_passphrase
- }
+ network_dict_5g = {
+ "SSID": ref_5g_ssid,
+ "security": ref_5g_security,
+ "password": ref_5g_passphrase,
+ "hiddenSSID": hidden
+ }
ap = 0
for ap in range(MAX_AP_COUNT):
@@ -159,28 +147,17 @@
open_2g_ssid = '2g_%s' % utils.rand_ascii_str(ssid_length_2g)
open_5g_ssid = '5g_%s' % utils.rand_ascii_str(ssid_length_5g)
- if hidden:
- network_dict_2g = {
+ network_dict_2g = {
"SSID": open_2g_ssid,
"security": 'none',
- "hiddenSSID": True
- }
+ "hiddenSSID": hidden
+ }
- network_dict_5g = {
+ network_dict_5g = {
"SSID": open_5g_ssid,
"security": 'none',
- "hiddenSSID": True
- }
- else:
- network_dict_2g = {
- "SSID": open_2g_ssid,
- "security": 'none'
- }
-
- network_dict_5g = {
- "SSID": open_5g_ssid,
- "security": 'none'
- }
+ "hiddenSSID": hidden
+ }
ap = 0
for ap in range(MAX_AP_COUNT):
@@ -192,6 +169,76 @@
break
return {"2g": network_dict_2g, "5g": network_dict_5g}
+ def get_wep_network(
+ self,
+ mirror_ap,
+ networks,
+ hidden=False,
+ same_ssid=False,
+ ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
+ ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
+ passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
+ passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G):
+ """Generates SSID and passphrase for a WEP network using random
+ generator.
+
+ Args:
+ mirror_ap: Boolean, determines if both APs use the same hostapd
+ config or different configs.
+ networks: List of WEP networks.
+ same_ssid: Boolean, determines if both bands on AP use the same
+ SSID.
+ ssid_length_2gecond AP Int, number of characters to use for 2G SSID.
+ ssid_length_5g: Int, number of characters to use for 5G SSID.
+ passphrase_length_2g: Int, length of password for 2G network.
+ passphrase_length_5g: Int, length of password for 5G network.
+
+ Returns: A dict of 2G and 5G network lists for hostapd configuration.
+
+ """
+ network_dict_2g = {}
+ network_dict_5g = {}
+ ref_5g_security = hostapd_constants.WEP_STRING
+ ref_2g_security = hostapd_constants.WEP_STRING
+
+ if same_ssid:
+ ref_2g_ssid = 'xg_%s' % utils.rand_ascii_str(ssid_length_2g)
+ ref_5g_ssid = ref_2g_ssid
+
+ ref_2g_passphrase = utils.rand_hex_str(passphrase_length_2g)
+ ref_5g_passphrase = ref_2g_passphrase
+
+ else:
+ ref_2g_ssid = '2g_%s' % utils.rand_ascii_str(ssid_length_2g)
+ ref_2g_passphrase = utils.rand_hex_str(passphrase_length_2g)
+
+ ref_5g_ssid = '5g_%s' % utils.rand_ascii_str(ssid_length_5g)
+ ref_5g_passphrase = utils.rand_hex_str(passphrase_length_5g)
+
+ network_dict_2g = {
+ "SSID": ref_2g_ssid,
+ "security": ref_2g_security,
+ "wepKeys": [ref_2g_passphrase] * 4,
+ "hiddenSSID": hidden
+ }
+
+ network_dict_5g = {
+ "SSID": ref_5g_ssid,
+ "security": ref_5g_security,
+ "wepKeys": [ref_2g_passphrase] * 4,
+ "hiddenSSID": hidden
+ }
+
+ ap = 0
+ for ap in range(MAX_AP_COUNT):
+ networks.append({
+ "2g": copy.copy(network_dict_2g),
+ "5g": copy.copy(network_dict_5g)
+ })
+ if not mirror_ap:
+ break
+ return {"2g": network_dict_2g, "5g": network_dict_5g}
+
def update_bssid(self, ap_instance, ap, network, band):
"""Get bssid and update network dictionary.
@@ -208,7 +255,10 @@
# TODO:(bamahadev) Change all occurances of reference_networks
# to wpa_networks.
self.reference_networks[ap_instance][band]["bssid"] = bssid
-
+ if network["security"] == hostapd_constants.WPA_STRING:
+ self.wpa_networks[ap_instance][band]["bssid"] = bssid
+ if network["security"] == hostapd_constants.WEP_STRING:
+ self.wep_networks[ap_instance][band]["bssid"] = bssid
if network["security"] == 'none':
self.open_network[ap_instance][band]["bssid"] = bssid
@@ -251,6 +301,8 @@
hidden=False,
same_ssid=False,
mirror_ap=True,
+ wpa_network=False,
+ wep_network=False,
ap_count=1):
asserts.assert_true(
len(self.user_params["AccessPoint"]) == 2,
@@ -271,6 +323,10 @@
self.user_params["reference_networks"] = []
self.user_params["open_network"] = []
+ if wpa_network:
+ self.user_params["wpa_networks"] = []
+ if wep_network:
+ self.user_params["wep_networks"] = []
for count in range(config_count):
@@ -283,7 +339,7 @@
network_list_2g.append({"channel": channel_2g})
network_list_5g.append({"channel": channel_5g})
- networks_dict = self.get_wpa2_network(
+ networks_dict = self.get_psk_network(
mirror_ap,
self.user_params["reference_networks"],
hidden=hidden,
@@ -309,6 +365,29 @@
network_list_2g.append(networks_dict["2g"])
network_list_5g.append(networks_dict["5g"])
+ if wpa_network:
+ networks_dict = self.get_psk_network(
+ mirror_ap,
+ self.user_params["wpa_networks"],
+ hidden=hidden,
+ same_ssid=same_ssid,
+ security_mode=hostapd_constants.WPA_STRING)
+ self.wpa_networks = self.user_params["wpa_networks"]
+
+ network_list_2g.append(networks_dict["2g"])
+ network_list_5g.append(networks_dict["5g"])
+
+ if wep_network:
+ networks_dict = self.get_wep_network(
+ mirror_ap,
+ self.user_params["wep_networks"],
+ hidden=hidden,
+ same_ssid=same_ssid)
+ self.wep_networks = self.user_params["wep_networks"]
+
+ network_list_2g.append(networks_dict["2g"])
+ network_list_5g.append(networks_dict["5g"])
+
orig_network_list_5g = copy.copy(network_list_5g)
orig_network_list_2g = copy.copy(network_list_2g)
@@ -338,47 +417,77 @@
# build config based on the bss_Settings alone.
hostapd_config_settings = network_list.pop(0)
for network in network_list:
- if "password" in network and "hiddenSSID" in network:
+ if "password" in network:
bss_settings.append(
hostapd_bss_settings.BssSettings(
name=network["SSID"],
ssid=network["SSID"],
- hidden=True,
+ hidden=network["hiddenSSID"],
security=hostapd_security.Security(
security_mode=network["security"],
password=network["password"])))
- elif "password" in network and not "hiddenSSID" in network:
+ elif "wepKeys" in network:
bss_settings.append(
hostapd_bss_settings.BssSettings(
name=network["SSID"],
ssid=network["SSID"],
+ hidden=network["hiddenSSID"],
security=hostapd_security.Security(
security_mode=network["security"],
- password=network["password"])))
- elif not "password" in network and "hiddenSSID" in network:
+ password=network["wepKeys"][0])))
+ else:
bss_settings.append(
hostapd_bss_settings.BssSettings(
name=network["SSID"],
ssid=network["SSID"],
- hidden=True))
- elif not "password" in network and not "hiddenSSID" in network:
- bss_settings.append(
- hostapd_bss_settings.BssSettings(
- name=network["SSID"],
- ssid=network["SSID"]))
+ hidden=network["hiddenSSID"]))
+
if "password" in hostapd_config_settings:
config = hostapd_ap_preset.create_ap_preset(
channel=ap_settings["channel"],
ssid=hostapd_config_settings["SSID"],
+ hidden=hostapd_config_settings["hiddenSSID"],
security=hostapd_security.Security(
security_mode=hostapd_config_settings["security"],
password=hostapd_config_settings["password"]),
bss_settings=bss_settings,
profile_name='whirlwind')
+ elif "wepKeys" in hostapd_config_settings:
+ config = hostapd_ap_preset.create_ap_preset(
+ channel=ap_settings["channel"],
+ ssid=hostapd_config_settings["SSID"],
+ hidden=hostapd_config_settings["hiddenSSID"],
+ security=hostapd_security.Security(
+ security_mode=hostapd_config_settings["security"],
+ password=hostapd_config_settings["wepKeys"][0]),
+ bss_settings=bss_settings,
+ profile_name='whirlwind')
else:
config = hostapd_ap_preset.create_ap_preset(
channel=ap_settings["channel"],
ssid=hostapd_config_settings["SSID"],
+ hidden=hostapd_config_settings["hiddenSSID"],
bss_settings=bss_settings,
profile_name='whirlwind')
return config
+
+ def configure_packet_capture(
+ self,
+ channel_5g=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+ channel_2g=hostapd_constants.AP_DEFAULT_CHANNEL_2G):
+ """Configure packet capture for 2G and 5G bands.
+
+ Args:
+ channel_5g: Channel to set the monitor mode to for 5G band.
+ channel_2g: Channel to set the monitor mode to for 2G band.
+ """
+ self.packet_capture = self.packet_capture[0]
+ result = self.packet_capture.configure_monitor_mode(
+ hostapd_constants.BAND_2G, channel_2g)
+ if not result:
+ raise ValueError("Failed to configure channel for 2G band")
+
+ result = self.packet_capture.configure_monitor_mode(
+ hostapd_constants.BAND_5G, channel_5g)
+ if not result:
+ raise ValueError("Failed to configure channel for 5G band.")
diff --git a/acts/framework/acts/test_utils/wifi/aware/AwareBaseTest.py b/acts/framework/acts/test_utils/wifi/aware/AwareBaseTest.py
index dec26a2..9ca84d3 100644
--- a/acts/framework/acts/test_utils/wifi/aware/AwareBaseTest.py
+++ b/acts/framework/acts/test_utils/wifi/aware/AwareBaseTest.py
@@ -15,6 +15,7 @@
# limitations under the License.
from acts import asserts
+from acts import utils
from acts.base_test import BaseTestClass
from acts.test_utils.wifi import wifi_test_utils as wutils
from acts.test_utils.wifi.aware import aware_const as aconsts
@@ -23,7 +24,7 @@
class AwareBaseTest(BaseTestClass):
def __init__(self, controllers):
- BaseTestClass.__init__(self, controllers)
+ super(AwareBaseTest, self).__init__(controllers)
# message ID counter to make sure all uses are unique
msg_id = 0
@@ -43,6 +44,7 @@
"Device under test does not support Wi-Fi Aware - skipping test")
wutils.wifi_toggle_state(ad, True)
ad.droid.wifiP2pClose()
+ utils.set_location_service(ad, True)
aware_avail = ad.droid.wifiIsAwareAvailable()
if not aware_avail:
self.log.info('Aware not available. Waiting ...')
@@ -52,7 +54,11 @@
self.reset_device_parameters(ad)
self.reset_device_statistics(ad)
self.set_power_mode_parameters(ad)
-
+ ad.droid.wifiSetCountryCode(wutils.WifiEnums.CountryCode.US)
+ autils.configure_ndp_allow_any_override(ad, True)
+ # set randomization interval to 0 (disable) to reduce likelihood of
+ # interference in tests
+ autils.configure_mac_random_interval(ad, 0)
def teardown_test(self):
for ad in self.android_devices:
@@ -85,9 +91,9 @@
"""Set the power configuration DW parameters for the device based on any
configuration overrides (if provided)"""
if self.aware_default_power_mode == "INTERACTIVE":
- autils.config_dw_high_power(ad)
+ autils.config_settings_high_power(ad)
elif self.aware_default_power_mode == "NON_INTERACTIVE":
- autils.config_dw_low_power(ad)
+ autils.config_settings_low_power(ad)
else:
asserts.assert_false(
"The 'aware_default_power_mode' configuration must be INTERACTIVE or "
@@ -102,3 +108,8 @@
"""
self.msg_id = self.msg_id + 1
return self.msg_id
+
+ def on_fail(self, test_name, begin_time):
+ for ad in self.android_devices:
+ ad.take_bug_report(test_name, begin_time)
+ ad.cat_adb_log(test_name, begin_time)
diff --git a/acts/framework/acts/test_utils/wifi/aware/aware_const.py b/acts/framework/acts/test_utils/wifi/aware/aware_const.py
index 608c5c7..36c469e 100644
--- a/acts/framework/acts/test_utils/wifi/aware/aware_const.py
+++ b/acts/framework/acts/test_utils/wifi/aware/aware_const.py
@@ -15,14 +15,21 @@
# limitations under the License.
######################################################
-# Aware DW (Discovery Window) power mode values
+# Aware power settings values for interactive (high power) and
+# non-interactive (low power) modes
######################################################
-DW_24_INTERACTIVE = 1
-DW_5_INTERACTIVE = 1
+POWER_DW_24_INTERACTIVE = 1
+POWER_DW_5_INTERACTIVE = 1
+POWER_DISC_BEACON_INTERVAL_INTERACTIVE = 0
+POWER_NUM_SS_IN_DISC_INTERACTIVE = 0
+POWER_ENABLE_DW_EARLY_TERM_INTERACTIVE = 0
-DW_24_NON_INTERACTIVE = 4
-DW_5_NON_INTERACTIVE = 0
+POWER_DW_24_NON_INTERACTIVE = 4
+POWER_DW_5_NON_INTERACTIVE = 0
+POWER_DISC_BEACON_INTERVAL_NON_INTERACTIVE = 0
+POWER_NUM_SS_IN_DISC_NON_INTERACTIVE = 0
+POWER_ENABLE_DW_EARLY_TERM_NON_INTERACTIVE = 0
######################################################
# Broadcast events
@@ -51,6 +58,9 @@
DISCOVERY_KEY_DISCOVERY_TYPE = "DiscoveryType"
DISCOVERY_KEY_TTL = "TtlSec"
DISCOVERY_KEY_TERM_CB_ENABLED = "TerminateNotificationEnabled"
+DISCOVERY_KEY_RANGING_ENABLED = "RangingEnabled"
+DISCOVERY_KEY_MIN_DISTANCE_MM = "MinDistanceMm"
+DISCOVERY_KEY_MAX_DISTANCE_MM = "MaxDistanceMm"
PUBLISH_TYPE_UNSOLICITED = 0
PUBLISH_TYPE_SOLICITED = 1
@@ -101,6 +111,7 @@
SESSION_CB_KEY_MESSAGE_AS_STRING = "messageAsString"
SESSION_CB_KEY_LATENCY_MS = "latencyMs"
SESSION_CB_KEY_TIMESTAMP_MS = "timestampMs"
+SESSION_CB_KEY_DISTANCE_MM = "distanceMm"
######################################################
# WifiAwareRangingListener events (RttManager.RttListener)
diff --git a/acts/framework/acts/test_utils/wifi/aware/aware_test_utils.py b/acts/framework/acts/test_utils/wifi/aware/aware_test_utils.py
index 092df2b..ca15db5 100644
--- a/acts/framework/acts/test_utils/wifi/aware/aware_test_utils.py
+++ b/acts/framework/acts/test_utils/wifi/aware/aware_test_utils.py
@@ -310,21 +310,78 @@
data_min = min(data)
data_max = max(data)
data_mean = statistics.mean(data)
+ data_cdf = extract_cdf(data)
+ data_cdf_decile = extract_cdf_decile(data_cdf)
results['%smin' % key_prefix] = data_min
results['%smax' % key_prefix] = data_max
results['%smean' % key_prefix] = data_mean
+ results['%scdf' % key_prefix] = data_cdf
+ results['%scdf_decile' % key_prefix] = data_cdf_decile
results['%sraw_data' % key_prefix] = data
if num_samples > 1:
data_stdev = statistics.stdev(data)
results['%sstdev' % key_prefix] = data_stdev
- ad.log.info('%s: num_samples=%d, min=%.2f, max=%.2f, mean=%.2f, stdev=%.2f',
- log_prefix, num_samples, data_min, data_max, data_mean,
- data_stdev)
+ ad.log.info(
+ '%s: num_samples=%d, min=%.2f, max=%.2f, mean=%.2f, stdev=%.2f, cdf_decile=%s',
+ log_prefix, num_samples, data_min, data_max, data_mean, data_stdev,
+ data_cdf_decile)
else:
- ad.log.info('%s: num_samples=%d, min=%.2f, max=%.2f, mean=%.2f', log_prefix,
- num_samples, data_min, data_max, data_mean)
+ ad.log.info(
+ '%s: num_samples=%d, min=%.2f, max=%.2f, mean=%.2f, cdf_decile=%s',
+ log_prefix, num_samples, data_min, data_max, data_mean, data_cdf_decile)
+
+def extract_cdf_decile(cdf):
+ """Extracts the 10%, 20%, ..., 90% points from the CDF and returns their
+ value (a list of 9 values).
+
+ Since CDF may not (will not) have exact x% value picks the value >= x%.
+
+ Args:
+ cdf: a list of 2 lists, the X and Y of the CDF.
+ """
+ decades = []
+ next_decade = 10
+ for x, y in zip(cdf[0], cdf[1]):
+ while 100*y >= next_decade:
+ decades.append(x)
+ next_decade = next_decade + 10
+ if next_decade == 100:
+ break
+ return decades
+
+def extract_cdf(data):
+ """Calculates the Cumulative Distribution Function (CDF) of the data.
+
+ Args:
+ data: A list containing data (does not have to be sorted).
+
+ Returns: a list of 2 lists: the X and Y axis of the CDF.
+ """
+ x = []
+ cdf = []
+ if not data:
+ return (x, cdf)
+
+ all_values = sorted(data)
+ for val in all_values:
+ if not x:
+ x.append(val)
+ cdf.append(1)
+ else:
+ if x[-1] == val:
+ cdf[-1] += 1
+ else:
+ x.append(val)
+ cdf.append(cdf[-1] + 1)
+
+ scale = 1.0 / len(all_values)
+ for i in range(len(cdf)):
+ cdf[i] = cdf[i] * scale
+
+ return (x, cdf)
+
def get_mac_addr(device, interface):
"""Get the MAC address of the specified interface. Uses ifconfig and parses
@@ -392,74 +449,134 @@
return dut.droid.wifiAwareCreateNetworkSpecifierOob(
id, dev_type, peer_mac, None, sec)
-def configure_dw(device, is_default, is_24_band, value):
- """Use the command-line API to configure the DW (discovery window) setting
+def configure_power_setting(device, mode, name, value):
+ """Use the command-line API to configure the power setting
Args:
device: Device on which to perform configuration
- is_default: True for the default setting, False for the non-interactive
- setting
- is_24_band: True for 2.4GHz band, False for 5GHz band
- value: An integer 0 to 5
+ mode: The power mode being set, should be "default", "inactive", or "idle"
+ name: One of the power settings from 'wifiaware set-power'.
+ value: An integer.
"""
- variable = 'dw_%s_%sghz' % ('default' if is_default else 'on_inactive', '24'
- if is_24_band else '5')
- device.adb.shell("cmd wifiaware native_api set %s %d" % (variable, value))
+ device.adb.shell(
+ "cmd wifiaware native_api set-power %s %s %d" % (mode, name, value))
-def config_dw_high_power(device):
- """Configure device's discovery window (DW) values to high power mode -
+def configure_mac_random_interval(device, interval_sec):
+ """Use the command-line API to configure the MAC address randomization
+ interval.
+
+ Args:
+ device: Device on which to perform configuration
+ interval_sec: The MAC randomization interval in seconds. A value of 0
+ disables all randomization.
+ """
+ device.adb.shell(
+ "cmd wifiaware native_api set mac_random_interval_sec %d" % interval_sec)
+
+def configure_ndp_allow_any_override(device, override_api_check):
+ """Use the command-line API to configure whether an NDP Responder may be
+ configured to accept an NDP request from ANY peer.
+
+ By default the target API level of the requesting app determines whether such
+ configuration is permitted. This allows overriding the API check and allowing
+ it.
+
+ Args:
+ device: Device on which to perform configuration.
+ override_api_check: True to allow a Responder to ANY configuration, False to
+ perform the API level check.
+ """
+ device.adb.shell("cmd wifiaware state_mgr allow_ndp_any %s" % (
+ "true" if override_api_check else "false"))
+
+def config_settings_high_power(device):
+ """Configure device's power settings values to high power mode -
whether device is in interactive or non-interactive modes"""
- configure_dw(
- device, is_default=True, is_24_band=True, value=aconsts.DW_24_INTERACTIVE)
- configure_dw(
- device, is_default=True, is_24_band=False, value=aconsts.DW_5_INTERACTIVE)
- configure_dw(
- device,
- is_default=False,
- is_24_band=True,
- value=aconsts.DW_24_INTERACTIVE)
- configure_dw(
- device,
- is_default=False,
- is_24_band=False,
- value=aconsts.DW_5_INTERACTIVE)
+ configure_power_setting(device, "default", "dw_24ghz",
+ aconsts.POWER_DW_24_INTERACTIVE)
+ configure_power_setting(device, "default", "dw_5ghz",
+ aconsts.POWER_DW_5_INTERACTIVE)
+ configure_power_setting(device, "default", "disc_beacon_interval_ms",
+ aconsts.POWER_DISC_BEACON_INTERVAL_INTERACTIVE)
+ configure_power_setting(device, "default", "num_ss_in_discovery",
+ aconsts.POWER_NUM_SS_IN_DISC_INTERACTIVE)
+ configure_power_setting(device, "default", "enable_dw_early_term",
+ aconsts.POWER_ENABLE_DW_EARLY_TERM_INTERACTIVE)
-def config_dw_low_power(device):
- """Configure device's discovery window (DW) values to low power mode - whether
+ configure_power_setting(device, "inactive", "dw_24ghz",
+ aconsts.POWER_DW_24_INTERACTIVE)
+ configure_power_setting(device, "inactive", "dw_5ghz",
+ aconsts.POWER_DW_5_INTERACTIVE)
+ configure_power_setting(device, "inactive", "disc_beacon_interval_ms",
+ aconsts.POWER_DISC_BEACON_INTERVAL_INTERACTIVE)
+ configure_power_setting(device, "inactive", "num_ss_in_discovery",
+ aconsts.POWER_NUM_SS_IN_DISC_INTERACTIVE)
+ configure_power_setting(device, "inactive", "enable_dw_early_term",
+ aconsts.POWER_ENABLE_DW_EARLY_TERM_INTERACTIVE)
+
+def config_settings_low_power(device):
+ """Configure device's power settings values to low power mode - whether
device is in interactive or non-interactive modes"""
- configure_dw(
- device,
- is_default=True,
- is_24_band=True,
- value=aconsts.DW_24_NON_INTERACTIVE)
- configure_dw(
- device,
- is_default=True,
- is_24_band=False,
- value=aconsts.DW_5_NON_INTERACTIVE)
- configure_dw(
- device,
- is_default=False,
- is_24_band=True,
- value=aconsts.DW_24_NON_INTERACTIVE)
- configure_dw(
- device,
- is_default=False,
- is_24_band=False,
- value=aconsts.DW_5_NON_INTERACTIVE)
+ configure_power_setting(device, "default", "dw_24ghz",
+ aconsts.POWER_DW_24_NON_INTERACTIVE)
+ configure_power_setting(device, "default", "dw_5ghz",
+ aconsts.POWER_DW_5_NON_INTERACTIVE)
+ configure_power_setting(device, "default", "disc_beacon_interval_ms",
+ aconsts.POWER_DISC_BEACON_INTERVAL_NON_INTERACTIVE)
+ configure_power_setting(device, "default", "num_ss_in_discovery",
+ aconsts.POWER_NUM_SS_IN_DISC_NON_INTERACTIVE)
+ configure_power_setting(device, "default", "enable_dw_early_term",
+ aconsts.POWER_ENABLE_DW_EARLY_TERM_NON_INTERACTIVE)
-def config_dw_all_modes(device, dw_24ghz, dw_5ghz):
+ configure_power_setting(device, "inactive", "dw_24ghz",
+ aconsts.POWER_DW_24_NON_INTERACTIVE)
+ configure_power_setting(device, "inactive", "dw_5ghz",
+ aconsts.POWER_DW_5_NON_INTERACTIVE)
+ configure_power_setting(device, "inactive", "disc_beacon_interval_ms",
+ aconsts.POWER_DISC_BEACON_INTERVAL_NON_INTERACTIVE)
+ configure_power_setting(device, "inactive", "num_ss_in_discovery",
+ aconsts.POWER_NUM_SS_IN_DISC_NON_INTERACTIVE)
+ configure_power_setting(device, "inactive", "enable_dw_early_term",
+ aconsts.POWER_ENABLE_DW_EARLY_TERM_NON_INTERACTIVE)
+
+
+def config_power_settings(device, dw_24ghz, dw_5ghz, disc_beacon_interval=None,
+ num_ss_in_disc=None, enable_dw_early_term=None):
"""Configure device's discovery window (DW) values to the specified values -
whether the device is in interactive or non-interactive mode.
Args:
dw_24ghz: DW interval in the 2.4GHz band.
dw_5ghz: DW interval in the 5GHz band.
+ disc_beacon_interval: The discovery beacon interval (in ms). If None then
+ not set.
+ num_ss_in_disc: Number of spatial streams to use for discovery. If None then
+ not set.
+ enable_dw_early_term: If True then enable early termination of the DW. If
+ None then not set.
"""
- configure_dw(device, is_default=True, is_24_band=True, value=dw_24ghz)
- configure_dw(device, is_default=True, is_24_band=False, value=dw_5ghz)
- configure_dw(device, is_default=False, is_24_band=True, value=dw_24ghz)
- configure_dw(device, is_default=False, is_24_band=False, value=dw_5ghz)
+ configure_power_setting(device, "default", "dw_24ghz", dw_24ghz)
+ configure_power_setting(device, "default", "dw_5ghz", dw_5ghz)
+ configure_power_setting(device, "inactive", "dw_24ghz", dw_24ghz)
+ configure_power_setting(device, "inactive", "dw_5ghz", dw_5ghz)
+
+ if disc_beacon_interval is not None:
+ configure_power_setting(device, "default", "disc_beacon_interval_ms",
+ disc_beacon_interval)
+ configure_power_setting(device, "inactive", "disc_beacon_interval_ms",
+ disc_beacon_interval)
+
+ if num_ss_in_disc is not None:
+ configure_power_setting(device, "default", "num_ss_in_discovery",
+ num_ss_in_disc)
+ configure_power_setting(device, "inactive", "num_ss_in_discovery",
+ num_ss_in_disc)
+
+ if enable_dw_early_term is not None:
+ configure_power_setting(device, "default", "enable_dw_early_term",
+ enable_dw_early_term)
+ configure_power_setting(device, "inactive", "enable_dw_early_term",
+ enable_dw_early_term)
def create_discovery_config(service_name,
d_type,
@@ -495,6 +612,36 @@
config[aconsts.DISCOVERY_KEY_TERM_CB_ENABLED] = term_cb_enable
return config
+def add_ranging_to_pub(p_config, enable_ranging):
+ """Add ranging enabled configuration to a publish configuration (only relevant
+ for publish configuration).
+
+ Args:
+ p_config: The Publish discovery configuration.
+ enable_ranging: True to enable ranging, False to disable.
+ Returns:
+ The modified publish configuration.
+ """
+ p_config[aconsts.DISCOVERY_KEY_RANGING_ENABLED] = enable_ranging
+ return p_config
+
+def add_ranging_to_sub(s_config, min_distance_mm, max_distance_mm):
+ """Add ranging distance configuration to a subscribe configuration (only
+ relevant to a subscribe configuration).
+
+ Args:
+ s_config: The Subscribe discovery configuration.
+ min_distance_mm, max_distance_mm: The min and max distance specification.
+ Used if not None.
+ Returns:
+ The modified subscribe configuration.
+ """
+ if min_distance_mm is not None:
+ s_config[aconsts.DISCOVERY_KEY_MIN_DISTANCE_MM] = min_distance_mm
+ if max_distance_mm is not None:
+ s_config[aconsts.DISCOVERY_KEY_MAX_DISTANCE_MM] = max_distance_mm
+ return s_config
+
def attach_with_identity(dut):
"""Start an Aware session (attach) and wait for confirmation and identity
information (mac address).
diff --git a/acts/framework/acts/test_utils/wifi/rpm_controller_utils.py b/acts/framework/acts/test_utils/wifi/rpm_controller_utils.py
new file mode 100644
index 0000000..6aa8e3e
--- /dev/null
+++ b/acts/framework/acts/test_utils/wifi/rpm_controller_utils.py
@@ -0,0 +1,183 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 Google, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from acts.controllers.attenuator_lib._tnhelper import _ascii_string
+
+import logging
+import telnetlib
+
+ID = '.A'
+LOGIN_PWD = 'admn'
+ON = 'On'
+OFF = 'Off'
+PASSWORD = 'Password: '
+PORT = 23
+RPM_PROMPT = 'Switched CDU: '
+SEPARATOR = '\n'
+TIMEOUT = 3
+USERNAME = 'Username: '
+
+
+class RpmControllerError(Exception):
+ """Error related to RPM switch."""
+
+class RpmController(object):
+ """Class representing telnet to RPM switch.
+
+ Each object represents a telnet connection to the RPM switch's IP.
+
+ Attributes:
+ tn: represents a connection to RPM switch.
+ host: IP address of the RPM controller.
+ """
+ def __init__(self, host):
+ """Initializes the RPM controller object.
+
+ Establishes a telnet connection and login to the switch.
+ """
+ self.host = host
+ logging.info('RPM IP: %s' % self.host)
+
+ self.tn = telnetlib.Telnet(self.host)
+ self.tn.open(self.host, PORT, TIMEOUT)
+ self.run(USERNAME, LOGIN_PWD)
+ result = self.run(PASSWORD, LOGIN_PWD)
+ if RPM_PROMPT not in result:
+ raise RpmControllerError('Failed to login to rpm controller %s'
+ % self.host)
+
+ def run(self, prompt, cmd_str):
+ """Method to run commands on the RPM.
+
+ This method simply runs a command and returns output in decoded format.
+ The calling methods should take care of parsing the expected result
+ from this output.
+
+ Args:
+ prompt: Expected prompt before running a command.
+ cmd_str: Command to run on RPM.
+
+ Returns:
+ Decoded text returned by the command.
+ """
+ cmd_str = '%s%s' % (cmd_str, SEPARATOR)
+ res = self.tn.read_until(_ascii_string(prompt), TIMEOUT)
+
+ self.tn.write(_ascii_string(cmd_str))
+ idx, val, txt = self.tn.expect(
+ [_ascii_string('\S+%s' % SEPARATOR)], TIMEOUT)
+
+ return txt.decode()
+
+ def set_rpm_port_state(self, rpm_port, state):
+ """Method to turn on/off rpm port.
+
+ Args:
+ rpm_port: port number of the switch to turn on.
+ state: 'on' or 'off'
+
+ Returns:
+ True: if the state is set to the expected value
+ """
+ port = '%s%s' % (ID, rpm_port)
+ logging.info('Turning %s port: %s' % (state, port))
+ self.run(RPM_PROMPT, '%s %s' % (state.lower(), port))
+ result = self.run(RPM_PROMPT, 'status %s' % port)
+ if port not in result:
+ raise RpmControllerError('Port %s doesn\'t exist' % port)
+ return state in result
+
+ def turn_on(self, rpm_port):
+ """Method to turn on a port on the RPM switch.
+
+ Args:
+ rpm_port: port number of the switch to turn on.
+
+ Returns:
+ True if the port is turned on.
+ False if not turned on.
+ """
+ return self.set_rpm_port_state(rpm_port, ON)
+
+ def turn_off(self, rpm_port):
+ """Method to turn off a port on the RPM switch.
+
+ Args:
+ rpm_port: port number of the switch to turn off.
+
+ Returns:
+ True if the port is turned off.
+ False if not turned off.
+ """
+ return self.set_rpm_port_state(rpm_port, OFF)
+
+ def __del__(self):
+ """Close the telnet connection. """
+ self.tn.close()
+
+
+def create_telnet_session(ip):
+ """Returns telnet connection object to RPM's IP."""
+ return RpmController(ip)
+
+def turn_on_ap(pcap, ssid, rpm_port, rpm_ip=None, rpm=None):
+ """Turn on the AP.
+
+ This method turns on the RPM port the AP is connected to,
+ verify the SSID of the AP is found in the scan result through the
+ packet capturer.
+
+ Either IP addr of the RPM switch or the existing telnet connection
+ to the RPM is required. Multiple APs might be connected to the same RPM
+ switch. Instead of connecting/terminating telnet for each AP, the test
+ can maintain a single telnet connection for all the APs.
+
+ Args:
+ pcap: packet capture object.
+ ssid: SSID of the wifi network.
+ rpm_port: Port number on the RPM switch the AP is connected to.
+ rpm_ip: IP address of the RPM switch.
+ rpm: telnet connection object to the RPM switch.
+ """
+ if not rpm and not rpm_ip:
+ logging.error("Failed to turn on AP. Need telnet object or RPM IP")
+ return False
+ elif not rpm:
+ rpm = create_telnet_session(rpm_ip)
+
+ return rpm.turn_on(rpm_port) and pcap.start_scan_and_find_network(ssid)
+
+def turn_off_ap(rpm_port, rpm_ip=None, rpm=None):
+ """ Turn off AP.
+
+ This method turns off the RPM port the AP is connected to.
+
+ Either IP addr of the RPM switch or the existing telnet connection
+ to the RPM is required.
+
+ Args:
+ rpm_port: Port number on the RPM switch the AP is connected to.
+ rpm_ip: IP address of the RPM switch.
+ rpm: telnet connection object to the RPM switch.
+ """
+ if not rpm and not rpm_ip:
+ logging.error("Failed to turn off AP. Need telnet object or RPM IP")
+ return False
+ elif not rpm:
+ rpm = create_telnet_session(rpm_ip)
+
+ return rpm.turn_off(rpm_port)
diff --git a/acts/framework/acts/test_utils/wifi/rtt/RttBaseTest.py b/acts/framework/acts/test_utils/wifi/rtt/RttBaseTest.py
new file mode 100644
index 0000000..2182780
--- /dev/null
+++ b/acts/framework/acts/test_utils/wifi/rtt/RttBaseTest.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2017 - Google
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from acts import asserts
+from acts import utils
+from acts.base_test import BaseTestClass
+from acts.test_utils.wifi import wifi_test_utils as wutils
+from acts.test_utils.wifi.rtt import rtt_const as rconsts
+from acts.test_utils.wifi.rtt import rtt_test_utils as rutils
+
+
+class RttBaseTest(BaseTestClass):
+
+ def __init__(self, controllers):
+ super(RttBaseTest, self).__init__(controllers)
+
+ def setup_test(self):
+ required_params = ("lci_reference", "lcr_reference",
+ "rtt_reference_distance_mm",
+ "stress_test_min_iteration_count",
+ "stress_test_target_run_time_sec")
+ self.unpack_userparams(required_params)
+
+ # can be moved to JSON config file
+ self.rtt_reference_distance_margin_mm = 1000
+ self.rtt_max_failure_rate_two_sided_rtt_percentage = 10
+ self.rtt_max_failure_rate_one_sided_rtt_percentage = 50
+ self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage = 10
+ self.rtt_max_margin_exceeded_rate_one_sided_rtt_percentage = 50
+ self.rtt_min_expected_rssi_dbm = -100
+
+ for ad in self.android_devices:
+ utils.set_location_service(ad, True)
+ asserts.skip_if(
+ not ad.droid.doesDeviceSupportWifiRttFeature(),
+ "Device under test does not support Wi-Fi RTT - skipping test")
+ wutils.wifi_toggle_state(ad, True)
+ rtt_avail = ad.droid.wifiIsRttAvailable()
+ if not rtt_avail:
+ self.log.info('RTT not available. Waiting ...')
+ rutils.wait_for_event(ad, rconsts.BROADCAST_WIFI_RTT_AVAILABLE)
+ ad.ed.clear_all_events()
+ rutils.config_privilege_override(ad, False)
+ ad.droid.wifiSetCountryCode(wutils.WifiEnums.CountryCode.US)
+
+ def teardown_test(self):
+ for ad in self.android_devices:
+ if not ad.droid.doesDeviceSupportWifiRttFeature():
+ return
+
+ # clean-up queue from the System Service UID
+ ad.droid.wifiRttCancelRanging([1000])
+
+ def on_fail(self, test_name, begin_time):
+ for ad in self.android_devices:
+ ad.take_bug_report(test_name, begin_time)
+ ad.cat_adb_log(test_name, begin_time)
diff --git a/acts/framework/acts/test_utils/wifi/rtt/__init__.py b/acts/framework/acts/test_utils/wifi/rtt/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/acts/test_utils/wifi/rtt/__init__.py
diff --git a/acts/framework/acts/test_utils/wifi/rtt/rtt_const.py b/acts/framework/acts/test_utils/wifi/rtt/rtt_const.py
new file mode 100644
index 0000000..ddf29e5
--- /dev/null
+++ b/acts/framework/acts/test_utils/wifi/rtt/rtt_const.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2017 - Google
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+######################################################
+# Broadcast events
+######################################################
+BROADCAST_WIFI_RTT_AVAILABLE = "WifiRttAvailable"
+BROADCAST_WIFI_RTT_NOT_AVAILABLE = "WifiRttNotAvailable"
+
+######################################################
+# RangingResultCallback events
+######################################################
+EVENT_CB_RANGING_ON_FAIL = "WifiRttRangingFailure"
+EVENT_CB_RANGING_ON_RESULT = "WifiRttRangingResults"
+
+EVENT_CB_RANGING_KEY_RESULTS = "Results"
+
+EVENT_CB_RANGING_KEY_STATUS = "status"
+EVENT_CB_RANGING_KEY_DISTANCE_MM = "distanceMm"
+EVENT_CB_RANGING_KEY_DISTANCE_STD_DEV_MM = "distanceStdDevMm"
+EVENT_CB_RANGING_KEY_RSSI = "rssi"
+EVENT_CB_RANGING_KEY_NUM_ATTEMPTED_MEASUREMENTS = "numAttemptedMeasurements"
+EVENT_CB_RANGING_KEY_NUM_SUCCESSFUL_MEASUREMENTS = "numSuccessfulMeasurements"
+EVENT_CB_RANGING_KEY_LCI = "lci"
+EVENT_CB_RANGING_KEY_LCR = "lcr"
+EVENT_CB_RANGING_KEY_TIMESTAMP = "timestamp"
+EVENT_CB_RANGING_KEY_MAC = "mac"
+EVENT_CB_RANGING_KEY_PEER_ID = "peerId"
+EVENT_CB_RANGING_KEY_MAC_AS_STRING = "macAsString"
+
+EVENT_CB_RANGING_STATUS_SUCCESS = 0
+EVENT_CB_RANGING_STATUS_FAIL = 1
+EVENT_CB_RANGING_STATUS_RESPONDER_DOES_NOT_SUPPORT_IEEE80211MC = 2
+
+######################################################
+# status codes
+######################################################
+
+RANGING_FAIL_CODE_GENERIC = 1
+RANGING_FAIL_CODE_RTT_NOT_AVAILABLE = 2
+
+######################################################
+# ScanResults keys
+######################################################
+
+SCAN_RESULT_KEY_RTT_RESPONDER = "is80211McRTTResponder"
\ No newline at end of file
diff --git a/acts/framework/acts/test_utils/wifi/rtt/rtt_test_utils.py b/acts/framework/acts/test_utils/wifi/rtt/rtt_test_utils.py
new file mode 100644
index 0000000..c24b406
--- /dev/null
+++ b/acts/framework/acts/test_utils/wifi/rtt/rtt_test_utils.py
@@ -0,0 +1,463 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2017 - Google
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import queue
+import statistics
+import time
+
+from acts import asserts
+from acts.test_utils.wifi import wifi_test_utils as wutils
+from acts.test_utils.wifi.rtt import rtt_const as rconsts
+
+# arbitrary timeout for events
+EVENT_TIMEOUT = 10
+
+
+def decorate_event(event_name, id):
+ return '%s_%d' % (event_name, id)
+
+
+def wait_for_event(ad, event_name, timeout=EVENT_TIMEOUT):
+ """Wait for the specified event or timeout.
+
+ Args:
+ ad: The android device
+ event_name: The event to wait on
+ timeout: Number of seconds to wait
+ Returns:
+ The event (if available)
+ """
+ prefix = ''
+ if hasattr(ad, 'pretty_name'):
+ prefix = '[%s] ' % ad.pretty_name
+ try:
+ event = ad.ed.pop_event(event_name, timeout)
+ ad.log.info('%s%s: %s', prefix, event_name, event['data'])
+ return event
+ except queue.Empty:
+ ad.log.info('%sTimed out while waiting for %s', prefix, event_name)
+ asserts.fail(event_name)
+
+def fail_on_event(ad, event_name, timeout=EVENT_TIMEOUT):
+ """Wait for a timeout period and looks for the specified event - fails if it
+ is observed.
+
+ Args:
+ ad: The android device
+ event_name: The event to wait for (and fail on its appearance)
+ """
+ prefix = ''
+ if hasattr(ad, 'pretty_name'):
+ prefix = '[%s] ' % ad.pretty_name
+ try:
+ event = ad.ed.pop_event(event_name, timeout)
+ ad.log.info('%sReceived unwanted %s: %s', prefix, event_name, event['data'])
+ asserts.fail(event_name, extras=event)
+ except queue.Empty:
+ ad.log.info('%s%s not seen (as expected)', prefix, event_name)
+ return
+
+
+def config_privilege_override(dut, override_to_no_privilege):
+ """Configure the device to override the permission check and to disallow any
+ privileged RTT operations, e.g. disallow one-sided RTT to Responders (APs)
+ which do not support IEEE 802.11mc.
+
+ Args:
+ dut: Device to configure.
+ override_to_no_privilege: True to indicate no privileged ops, False for
+ default (which will allow privileged ops).
+ """
+ dut.adb.shell("cmd wifirtt set override_assume_no_privilege %d" % (
+ 1 if override_to_no_privilege else 0))
+
+
+def get_rtt_constrained_results(scanned_networks, support_rtt):
+ """Filter the input list and only return those networks which either support
+ or do not support RTT (IEEE 802.11mc.)
+
+ Args:
+ scanned_networks: A list of networks from scan results.
+ support_rtt: True - only return those APs which support RTT, False - only
+ return those APs which do not support RTT.
+
+ Returns: a sub-set of the scanned_networks per support_rtt constraint.
+ """
+ matching_networks = []
+ for network in scanned_networks:
+ if support_rtt:
+ if (rconsts.SCAN_RESULT_KEY_RTT_RESPONDER in network and
+ network[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER]):
+ matching_networks.append(network)
+ else:
+ if (rconsts.SCAN_RESULT_KEY_RTT_RESPONDER not in network or
+ not network[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER]):
+ matching_networks.append(network)
+
+ return matching_networks
+
+
+def scan_networks(dut):
+ """Perform a scan and return scan results.
+
+ Args:
+ dut: Device under test.
+
+ Returns: an array of scan results.
+ """
+ wutils.start_wifi_connection_scan(dut)
+ return dut.droid.wifiGetScanResults()
+
+
+def scan_with_rtt_support_constraint(dut, support_rtt, repeat=0):
+ """Perform a scan and return scan results of APs: only those that support or
+ do not support RTT (IEEE 802.11mc) - per the support_rtt parameter.
+
+ Args:
+ dut: Device under test.
+ support_rtt: True - only return those APs which support RTT, False - only
+ return those APs which do not support RTT.
+ repeat: Re-scan this many times to find an RTT supporting network.
+
+ Returns: an array of scan results.
+ """
+ for i in range(repeat + 1):
+ scan_results = scan_networks(dut)
+ aps = get_rtt_constrained_results(scan_results, support_rtt)
+ if len(aps) != 0:
+ return aps
+
+ return []
+
+
+def select_best_scan_results(scans, select_count, lowest_rssi=-80):
+ """Select the strongest 'select_count' scans in the input list based on
+ highest RSSI. Exclude all very weak signals, even if results in a shorter
+ list.
+
+ Args:
+ scans: List of scan results.
+ select_count: An integer specifying how many scans to return at most.
+ lowest_rssi: The lowest RSSI to accept into the output.
+ Returns: a list of the strongest 'select_count' scan results from the scans
+ list.
+ """
+ def takeRssi(element):
+ return element['level']
+
+ result = []
+ scans.sort(key=takeRssi, reverse=True)
+ for scan in scans:
+ if len(result) == select_count:
+ break
+ if scan['level'] < lowest_rssi:
+ break # rest are lower since we're sorted
+ result.append(scan)
+
+ return result
+
+
+def validate_ap_result(scan_result, range_result):
+ """Validate the range results:
+ - Successful if AP (per scan result) support 802.11mc (allowed to fail
+ otherwise)
+ - MAC of result matches the BSSID
+
+ Args:
+ scan_result: Scan result for the AP
+ range_result: Range result returned by the RTT API
+ """
+ asserts.assert_equal(scan_result[wutils.WifiEnums.BSSID_KEY], range_result[
+ rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING_BSSID], 'MAC/BSSID mismatch')
+ if (rconsts.SCAN_RESULT_KEY_RTT_RESPONDER in scan_result and
+ scan_result[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER]):
+ asserts.assert_true(range_result[rconsts.EVENT_CB_RANGING_KEY_STATUS] ==
+ rconsts.EVENT_CB_RANGING_STATUS_SUCCESS,
+ 'Ranging failed for an AP which supports 802.11mc!')
+
+
+def validate_ap_results(scan_results, range_results):
+ """Validate an array of ranging results against the scan results used to
+ trigger the range. The assumption is that the results are returned in the
+ same order as the request (which were the scan results).
+
+ Args:
+ scan_results: Scans results used to trigger the range request
+ range_results: Range results returned by the RTT API
+ """
+ asserts.assert_equal(
+ len(scan_results),
+ len(range_results),
+ 'Mismatch in length of scan results and range results')
+
+ # sort first based on BSSID/MAC
+ scan_results.sort(key=lambda x: x[wutils.WifiEnums.BSSID_KEY])
+ range_results.sort(
+ key=lambda x: x[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING_BSSID])
+
+ for i in range(len(scan_results)):
+ validate_ap_result(scan_results[i], range_results[i])
+
+
+def validate_aware_mac_result(range_result, mac, description):
+ """Validate the range result for an Aware peer specified with a MAC address:
+ - Correct MAC address.
+
+ The MAC addresses may contain ":" (which are ignored for the comparison) and
+ may be in any case (which is ignored for the comparison).
+
+ Args:
+ range_result: Range result returned by the RTT API
+ mac: MAC address of the peer
+ description: Additional content to print on failure
+ """
+ mac1 = mac.replace(':', '').lower()
+ mac2 = range_result[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING].replace(':',
+ '').lower()
+ asserts.assert_equal(mac1, mac2,
+ '%s: MAC mismatch' % description)
+
+def validate_aware_peer_id_result(range_result, peer_id, description):
+ """Validate the range result for An Aware peer specified with a Peer ID:
+ - Correct Peer ID
+ - MAC address information not available
+
+ Args:
+ range_result: Range result returned by the RTT API
+ peer_id: Peer ID of the peer
+ description: Additional content to print on failure
+ """
+ asserts.assert_equal(peer_id,
+ range_result[rconsts.EVENT_CB_RANGING_KEY_PEER_ID],
+ '%s: Peer Id mismatch' % description)
+ asserts.assert_false(rconsts.EVENT_CB_RANGING_KEY_MAC in range_result,
+ '%s: MAC Address not empty!' % description)
+
+
+def extract_stats(results, range_reference_mm, range_margin_mm, min_rssi,
+ reference_lci=[], reference_lcr=[], summary_only=False):
+ """Extract statistics from a list of RTT results. Returns a dictionary
+ with results:
+ - num_results (success or fails)
+ - num_success_results
+ - num_no_results (e.g. timeout)
+ - num_failures
+ - num_range_out_of_margin (only for successes)
+ - num_invalid_rssi (only for successes)
+ - distances: extracted list of distances
+ - distance_std_devs: extracted list of distance standard-deviations
+ - rssis: extracted list of RSSI
+ - distance_mean
+ - distance_std_dev (based on distance - ignoring the individual std-devs)
+ - rssi_mean
+ - rssi_std_dev
+ - status_codes
+ - lcis: extracted list of all of the individual LCI
+ - lcrs: extracted list of all of the individual LCR
+ - any_lci_mismatch: True/False - checks if all LCI results are identical to
+ the reference LCI.
+ - any_lcr_mismatch: True/False - checks if all LCR results are identical to
+ the reference LCR.
+ - num_attempted_measurements: extracted list of all of the individual
+ number of attempted measurements.
+ - num_successful_measurements: extracted list of all of the individual
+ number of successful measurements.
+ - invalid_num_attempted: True/False - checks if number of attempted
+ measurements is non-zero for successful results.
+ - invalid_num_successful: True/False - checks if number of successful
+ measurements is non-zero for successful results.
+
+ Args:
+ results: List of RTT results.
+ range_reference_mm: Reference value for the distance (in mm)
+ range_margin_mm: Acceptable absolute margin for distance (in mm)
+ min_rssi: Acceptable minimum RSSI value.
+ reference_lci, reference_lcr: Reference values for LCI and LCR.
+ summary_only: Only include summary keys (reduce size).
+
+ Returns: A dictionary of stats.
+ """
+ stats = {}
+ stats['num_results'] = 0
+ stats['num_success_results'] = 0
+ stats['num_no_results'] = 0
+ stats['num_failures'] = 0
+ stats['num_range_out_of_margin'] = 0
+ stats['num_invalid_rssi'] = 0
+ stats['any_lci_mismatch'] = False
+ stats['any_lcr_mismatch'] = False
+ stats['invalid_num_attempted'] = False
+ stats['invalid_num_successful'] = False
+
+ range_max_mm = range_reference_mm + range_margin_mm
+ range_min_mm = range_reference_mm - range_margin_mm
+
+ distances = []
+ distance_std_devs = []
+ rssis = []
+ num_attempted_measurements = []
+ num_successful_measurements = []
+ status_codes = []
+ lcis = []
+ lcrs = []
+
+ for i in range(len(results)):
+ result = results[i]
+
+ if result is None: # None -> timeout waiting for RTT result
+ stats['num_no_results'] = stats['num_no_results'] + 1
+ continue
+ stats['num_results'] = stats['num_results'] + 1
+
+ status_codes.append(result[rconsts.EVENT_CB_RANGING_KEY_STATUS])
+ if status_codes[-1] != rconsts.EVENT_CB_RANGING_STATUS_SUCCESS:
+ stats['num_failures'] = stats['num_failures'] + 1
+ continue
+ stats['num_success_results'] = stats['num_success_results'] + 1
+
+ distance_mm = result[rconsts.EVENT_CB_RANGING_KEY_DISTANCE_MM]
+ distances.append(distance_mm)
+ if not range_min_mm <= distance_mm <= range_max_mm:
+ stats['num_range_out_of_margin'] = stats['num_range_out_of_margin'] + 1
+ distance_std_devs.append(
+ result[rconsts.EVENT_CB_RANGING_KEY_DISTANCE_STD_DEV_MM])
+
+ rssi = result[rconsts.EVENT_CB_RANGING_KEY_RSSI]
+ rssis.append(rssi)
+ if not min_rssi <= rssi <= 0:
+ stats['num_invalid_rssi'] = stats['num_invalid_rssi'] + 1
+
+ num_attempted = result[
+ rconsts.EVENT_CB_RANGING_KEY_NUM_ATTEMPTED_MEASUREMENTS]
+ num_attempted_measurements.append(num_attempted)
+ if num_attempted == 0:
+ stats['invalid_num_attempted'] = True
+
+ num_successful = result[
+ rconsts.EVENT_CB_RANGING_KEY_NUM_SUCCESSFUL_MEASUREMENTS]
+ num_successful_measurements.append(num_successful)
+ if num_successful == 0:
+ stats['invalid_num_successful'] = True
+
+ lcis.append(result[rconsts.EVENT_CB_RANGING_KEY_LCI])
+ if (result[rconsts.EVENT_CB_RANGING_KEY_LCI] != reference_lci):
+ stats['any_lci_mismatch'] = True
+ lcrs.append(result[rconsts.EVENT_CB_RANGING_KEY_LCR])
+ if (result[rconsts.EVENT_CB_RANGING_KEY_LCR] != reference_lcr):
+ stats['any_lcr_mismatch'] = True
+
+ if len(distances) > 0:
+ stats['distance_mean'] = statistics.mean(distances)
+ if len(distances) > 1:
+ stats['distance_std_dev'] = statistics.stdev(distances)
+ if len(rssis) > 0:
+ stats['rssi_mean'] = statistics.mean(rssis)
+ if len(rssis) > 1:
+ stats['rssi_std_dev'] = statistics.stdev(rssis)
+ if not summary_only:
+ stats['distances'] = distances
+ stats['distance_std_devs'] = distance_std_devs
+ stats['rssis'] = rssis
+ stats['num_attempted_measurements'] = num_attempted_measurements
+ stats['num_successful_measurements'] = num_successful_measurements
+ stats['status_codes'] = status_codes
+ stats['lcis'] = lcis
+ stats['lcrs'] = lcrs
+
+ return stats
+
+
+def run_ranging(dut, aps, iter_count, time_between_iterations,
+ target_run_time_sec=0):
+ """Executing ranging to the set of APs.
+
+ Will execute a minimum of 'iter_count' iterations. Will continue to run
+ until execution time (just) exceeds 'target_run_time_sec'.
+
+ Args:
+ dut: Device under test
+ aps: A list of APs (Access Points) to range to.
+ iter_count: (Minimum) Number of measurements to perform.
+ time_between_iterations: Number of seconds to wait between iterations.
+ target_run_time_sec: The target run time in seconds.
+
+ Returns: a list of the events containing the RTT results (or None for a
+ failed measurement).
+ """
+ max_peers = dut.droid.wifiRttMaxPeersInRequest()
+
+ asserts.assert_true(len(aps) > 0, "Need at least one AP!")
+ if len(aps) > max_peers:
+ aps = aps[0:max_peers]
+
+ events = {} # need to keep track per BSSID!
+ for ap in aps:
+ events[ap["BSSID"]] = []
+
+ start_clock = time.time()
+ iterations_done = 0
+ run_time = 0
+ while iterations_done < iter_count or (
+ target_run_time_sec != 0 and run_time < target_run_time_sec):
+ if iterations_done != 0 and time_between_iterations != 0:
+ time.sleep(time_between_iterations)
+
+ id = dut.droid.wifiRttStartRangingToAccessPoints(aps)
+ try:
+ event = dut.ed.pop_event(
+ decorate_event(rconsts.EVENT_CB_RANGING_ON_RESULT, id), EVENT_TIMEOUT)
+ range_results = event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS]
+ asserts.assert_equal(
+ len(aps),
+ len(range_results),
+ 'Mismatch in length of scan results and range results')
+ for result in range_results:
+ bssid = result[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING]
+ asserts.assert_true(bssid in events,
+ "Result BSSID %s not in requested AP!?" % bssid)
+ asserts.assert_equal(len(events[bssid]), iterations_done,
+ "Duplicate results for BSSID %s!?" % bssid)
+ events[bssid].append(result)
+ except queue.Empty:
+ for ap in aps:
+ events[ap["BSSID"]].append(None)
+
+ iterations_done = iterations_done + 1
+ run_time = time.time() - start_clock
+
+ return events
+
+
+def analyze_results(all_aps_events, rtt_reference_distance_mm,
+ distance_margin_mm, min_expected_rssi, lci_reference, lcr_reference,
+ summary_only=False):
+ """Verifies the results of the RTT experiment.
+
+ Args:
+ all_aps_events: Dictionary of APs, each a list of RTT result events.
+ rtt_reference_distance_mm: Expected distance to the AP (source of truth).
+ distance_margin_mm: Accepted error marging in distance measurement.
+ min_expected_rssi: Minimum acceptable RSSI value
+ lci_reference, lcr_reference: Expected LCI/LCR values (arrays of bytes).
+ summary_only: Only include summary keys (reduce size).
+ """
+ all_stats = {}
+ for bssid, events in all_aps_events.items():
+ stats = extract_stats(events, rtt_reference_distance_mm,
+ distance_margin_mm, min_expected_rssi,
+ lci_reference, lcr_reference, summary_only)
+ all_stats[bssid] = stats
+ return all_stats
diff --git a/acts/framework/acts/test_utils/wifi/wifi_constants.py b/acts/framework/acts/test_utils/wifi/wifi_constants.py
index 97f342a..dd252c6 100644
--- a/acts/framework/acts/test_utils/wifi/wifi_constants.py
+++ b/acts/framework/acts/test_utils/wifi/wifi_constants.py
@@ -29,3 +29,8 @@
AP_MAIN = "main_AP"
AP_AUX = "aux_AP"
SSID = "SSID"
+
+# cnss_diag property related constants
+DEVICES_USING_LEGACY_PROP = ["sailfish", "marlin", "walleye", "taimen", "muskie"]
+CNSS_DIAG_PROP = "persist.vendor.sys.cnss.diag_txt"
+LEGACY_CNSS_DIAG_PROP = "persist.sys.cnss.diag_txt"
diff --git a/acts/framework/acts/test_utils/wifi/wifi_datastore_utils.py b/acts/framework/acts/test_utils/wifi/wifi_datastore_utils.py
new file mode 100755
index 0000000..3c045db
--- /dev/null
+++ b/acts/framework/acts/test_utils/wifi/wifi_datastore_utils.py
@@ -0,0 +1,149 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 Google, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import pprint
+import requests
+import time
+
+from acts import asserts
+from acts import signals
+from acts import utils
+from acts.test_utils.wifi import wifi_constants
+
+"""This file consists of all the helper methods needed to interact with the
+ Datastore @ https://chaos-188802.appspot.com/ used for Android Interop
+ testing.
+"""
+
+DATASTORE_HOST = "https://chaos-188802.appspot.com"
+
+# The Datastore defines the following paths for operating methods.
+ADD_DEVICE = "devices/new"
+REMOVE_DEVICE = "devices/delete"
+LOCK_DEVICE = "devices/lock"
+UNLOCK_DEVICE = "devices/unlock"
+SHOW_DEVICE = "devices/"
+GET_DEVICES = "devices/"
+
+# HTTP content type. JSON encoded with UTF-8 character encoding.
+HTTP_HEADER = {'content-type': 'application/json'}
+
+def add_device(name, ap_label, lab_label):
+ """Add a device(AP or Packet Capturer) in datastore.
+
+ Args:
+ name: string, hostname of the device.
+ ap_label: string, AP brand name.
+ lab_label: string, lab label for AP.
+ Returns:
+ True if device was added successfully; 0 otherwise.
+ """
+ request = DATASTORE_HOST + '/' + ADD_DEVICE
+ logging.debug("Request = %s" % request)
+ response = requests.post(request,
+ headers=HTTP_HEADER,
+ data=json.dumps({"hostname":name,
+ "ap_label":ap_label,
+ "lab_label":lab_label}))
+ if response.json()['result'] == 'success':
+ logging.info("Added device %s to datastore" % name)
+ return True
+ return False
+
+def remove_device(name):
+ """Delete a device(AP or Packet Capturer) in datastore.
+
+ Args:
+ name: string, hostname of the device to delete.
+ Returns:
+ True if device was deleted successfully; 0 otherwise.
+ """
+ request = DATASTORE_HOST + '/' + REMOVE_DEVICE
+ logging.debug("Request = %s" % request)
+ response = requests.put(request,
+ headers=HTTP_HEADER,
+ data=json.dumps({"hostname":name}))
+ result_str = "%s deleted." % name
+ if result_str in response.text:
+ logging.info("Removed device %s from datastore" % name)
+ return True
+ return False
+
+def lock_device(name):
+ """Lock a device(AP or Packet Capturer) in datastore.
+
+ Args:
+ name: string, hostname of the device in datastore.
+ Returns:
+ True if operation was successful; 0 otherwise.
+ """
+ request = DATASTORE_HOST + '/' + LOCK_DEVICE
+ logging.debug("Request = %s" % request)
+ response = requests.put(request,
+ headers=HTTP_HEADER,
+ data=json.dumps({"hostname":name, "locked_by":"admin"}))
+ if response.json()['result']:
+ logging.info("Locked device %s in datastore" % name)
+ return True
+ return False
+
+def unlock_device(name):
+ """Un-lock a device(AP or Packet Capturer) in datastore.
+
+ Args:
+ name: string, hostname of the device in datastore.
+ Returns:
+ True if operation was successful; 0 otherwise.
+ """
+ request = DATASTORE_HOST + '/' + UNLOCK_DEVICE
+ logging.debug("Request = %s" % request)
+ response = requests.put(request,
+ headers=HTTP_HEADER,
+ data=json.dumps({"hostname":name}))
+ if response.json()['result']:
+ logging.info("Finished un-locking AP %s in datastore" % name)
+ return True
+ return False
+
+def show_device(name):
+ """Show device properties for a given device(AP or Packet Capturer).
+
+ Args:
+ name: string, hostname of the device in datastore to fetch info.
+ Returns: dict of device name:value properties if successful;
+ None otherwise.
+ """
+ request = DATASTORE_HOST + '/' + SHOW_DEVICE + name
+ logging.debug("Request = %s" % request)
+ response = requests.get(request)
+ if 'null' in response.text:
+ return None
+ return response.json()
+
+def get_devices():
+ """Get a list of all devices in the datastore.
+
+ Returns: dict of all devices' name:value properties if successful;
+ None otherwise.
+ """
+ request = DATASTORE_HOST + '/' + GET_DEVICES
+ logging.debug("Request = %s" % request)
+ response = requests.get(request)
+ if 'error' in response.text:
+ return None
+ return response.json()
diff --git a/acts/framework/acts/test_utils/wifi/wifi_power_test_utils.py b/acts/framework/acts/test_utils/wifi/wifi_power_test_utils.py
index f253498..b77f11b 100644
--- a/acts/framework/acts/test_utils/wifi/wifi_power_test_utils.py
+++ b/acts/framework/acts/test_utils/wifi/wifi_power_test_utils.py
@@ -206,6 +206,7 @@
ad.reboot()
# Wait for auto-wifi feature to start
time.sleep(20)
+ ad.adb.shell('dumpsys battery set level 100')
ad.log.info('DTIM updated and device back from reboot')
return 1
diff --git a/acts/framework/acts/test_utils/wifi/wifi_test_utils.py b/acts/framework/acts/test_utils/wifi/wifi_test_utils.py
index b6510ae..c8a3195 100755
--- a/acts/framework/acts/test_utils/wifi/wifi_test_utils.py
+++ b/acts/framework/acts/test_utils/wifi/wifi_test_utils.py
@@ -15,8 +15,9 @@
# limitations under the License.
import logging
-import time
+import os
import pprint
+import time
from enum import IntEnum
from queue import Empty
@@ -25,6 +26,8 @@
from acts import signals
from acts import utils
from acts.controllers import attenuator
+from acts.controllers.ap_lib.hostapd_constants import BAND_2G
+from acts.controllers.ap_lib.hostapd_constants import BAND_5G
from acts.test_utils.wifi import wifi_constants
from acts.test_utils.tel import tel_defines
@@ -234,6 +237,10 @@
REPORT_EVENT_AFTER_EACH_SCAN = 1
REPORT_EVENT_FULL_SCAN_RESULT = 2
+ SCAN_TYPE_LOW_LATENCY = 0
+ SCAN_TYPE_LOW_POWER = 1
+ SCAN_TYPE_HIGH_ACCURACY = 2
+
# US Wifi frequencies
ALL_2G_FREQUENCIES = [2412, 2417, 2422, 2427, 2432, 2437, 2442, 2447, 2452,
2457, 2462]
@@ -1700,3 +1707,94 @@
WifiEnums.PWD_KEY: ap_password,
}
return config
+
+def start_pcap(pcap, wifi_band, log_path, test_name):
+ """Start packet capture in monitor mode.
+
+ Args:
+ pcap: packet capture object
+ wifi_band: '2g' or '5g' or 'dual'
+ log_path: current test log path
+ test_name: test name to be used for pcap file name
+
+ Returns:
+ Dictionary with pid of the tcpdump process as key and log path
+ of the file name as the value
+ """
+ log_dir = os.path.join(log_path, test_name)
+ utils.create_dir(log_dir)
+ if wifi_band == 'dual':
+ bands = [BAND_2G, BAND_5G]
+ else:
+ bands = [wifi_band]
+ pids = {}
+ for band in bands:
+ pid = pcap.start_packet_capture(band, log_dir, test_name)
+ pids[pid] = os.path.join(log_dir, test_name)
+ return pids
+
+def stop_pcap(pcap, pids, test_status=None):
+ """Stop packet capture in monitor mode.
+
+ Since, the pcap logs in monitor mode can be very large, we will
+ delete them if they are not required. 'test_status' if True, will delete
+ the pcap files. If False, we will keep them.
+
+ Args:
+ pcap: packet capture object
+ pids: dictionary returned by start_pcap
+ test_status: status of the test case
+ """
+ for pid, fname in pids.items():
+ pcap.stop_packet_capture(pid)
+
+ if test_status:
+ os.system('rm -rf %s' % os.path.dirname(fname))
+
+def start_cnss_diags(ads):
+ for ad in ads:
+ start_cnss_diag(ad)
+
+def start_cnss_diag(ad):
+ """Start cnss_diag to record extra wifi logs
+
+ Args:
+ ad: android device object.
+ """
+ if ad.model in wifi_constants.DEVICES_USING_LEGACY_PROP:
+ prop = wifi_constants.LEGACY_CNSS_DIAG_PROP
+ else:
+ prop = wifi_constants.CNSS_DIAG_PROP
+ if ad.adb.getprop(prop) != 'true':
+ ad.adb.shell("find /data/vendor/wifi/cnss_diag/wlan_logs/ -type f -delete")
+ ad.adb.shell("setprop %s true" % prop, ignore_status=True)
+
+def stop_cnss_diags(ads):
+ for ad in ads:
+ stop_cnss_diag(ad)
+
+def stop_cnss_diag(ad):
+ """Stops cnss_diag
+
+ Args:
+ ad: android device object.
+ """
+ if ad.model in wifi_constants.DEVICES_USING_LEGACY_PROP:
+ prop = wifi_constants.LEGACY_CNSS_DIAG_PROP
+ else:
+ prop = wifi_constants.CNSS_DIAG_PROP
+ ad.adb.shell("setprop %s false" % prop, ignore_status=True)
+
+def get_cnss_diag_log(ad, test_name=""):
+ """Pulls the cnss_diag logs in the wlan_logs dir
+ Args:
+ ad: android device object.
+ test_name: test case name
+ """
+ logs = ad.get_file_names("/data/vendor/wifi/cnss_diag/wlan_logs/")
+ if logs:
+ ad.log.info("Pulling cnss_diag logs %s", logs)
+ log_path = os.path.join(ad.log_path, test_name,
+ "CNSS_DIAG_%s" % ad.serial)
+ utils.create_dir(log_path)
+ ad.pull_files(logs, log_path)
diff --git a/acts/framework/acts/utils.py b/acts/framework/acts/utils.py
index d89e1d5..d225df4 100755
--- a/acts/framework/acts/utils.py
+++ b/acts/framework/acts/utils.py
@@ -326,6 +326,19 @@
return ''.join(letters)
+def rand_hex_str(length):
+ """Generates a random string of specified length, composed of hex digits
+
+ Args:
+ length: The number of characters in the string.
+
+ Returns:
+ The random string generated.
+ """
+ letters = [random.choice(string.hexdigits) for i in range(length)]
+ return ''.join(letters)
+
+
# Thead/Process related functions.
def concurrent_exec(func, param_list):
"""Executes a function with different parameters pseudo-concurrently.
@@ -496,6 +509,8 @@
Args:
ad: The android device to sync time on.
"""
+ ad.adb.shell("settings global put auto_time 0", ignore_status=True)
+ ad.adb.shell("settings global put auto_time_zone 0", ignore_status=True)
droid = ad.droid
droid.setTimeZone(get_timezone_olson_id())
droid.setTime(get_current_epoch_time())
@@ -738,6 +753,12 @@
If new_state is False, turn off location service.
If new_state if True, set location service to "High accuracy".
"""
+ ad.adb.shell("content insert --uri "
+ " content://com.google.settings/partner --bind "
+ "name:s:network_location_opt_in --bind value:s:1")
+ ad.adb.shell("content insert --uri "
+ " content://com.google.settings/partner --bind "
+ "name:s:use_location_for_services --bind value:s:1")
if new_state:
ad.adb.shell("settings put secure location_providers_allowed +gps")
ad.adb.shell("settings put secure location_providers_allowed +network")
@@ -759,16 +780,6 @@
1 if new_state else 0))
-def set_regulatory_domain(ad, domain):
- """Set the Wi-Fi regulatory domain
-
- Args:
- ad: android device object.
- domain: regulatory domain
- """
- ad.adb.shell("iw reg set %s" % domain)
-
-
def bypass_setup_wizard(ad, bypass_wait_time=3):
"""Bypass the setup wizard on an input Android device
@@ -847,7 +858,7 @@
packet_xmit = int(result.group(1))
packet_rcvd = int(result.group(2))
min_packet_xmit_rcvd = (100 - loss_tolerance) * 0.01
- if (packet_loss >= loss_tolerance
+ if (packet_loss > loss_tolerance
or packet_xmit < count * min_packet_xmit_rcvd
or packet_rcvd < count * min_packet_xmit_rcvd):
ad.log.error("%s, ping failed with loss more than tolerance %s%%",
diff --git a/acts/framework/setup.py b/acts/framework/setup.py
index 43d0c6e..422d431 100755
--- a/acts/framework/setup.py
+++ b/acts/framework/setup.py
@@ -32,10 +32,11 @@
'pyserial',
'shellescape>=3.4.1',
'protobuf',
+ 'requests',
'roman',
'scapy-python3',
'pylibftdi',
- 'xlsxwriter'
+ 'xlsxwriter',
]
if sys.version_info < (3, ):
diff --git a/acts/framework/tests/acts_android_device_test.py b/acts/framework/tests/acts_android_device_test.py
index 2052281..9c284cb 100755
--- a/acts/framework/tests/acts_android_device_test.py
+++ b/acts/framework/tests/acts_android_device_test.py
@@ -28,9 +28,10 @@
MOCK_LOG_PATH = "/tmp/logs/MockTest/xx-xx-xx_xx-xx-xx/"
# Mock start and end time of the adb cat.
-MOCK_ADB_LOGCAT_BEGIN_TIME = "1970-01-02 21:03:20.123"
-MOCK_ADB_LOGCAT_END_TIME = "1970-01-02 21:22:02.000"
MOCK_ADB_EPOCH_BEGIN_TIME = 191000123
+MOCK_ADB_LOGCAT_BEGIN_TIME = logger.normalize_log_line_timestamp(
+ logger.epoch_to_log_line_timestamp(MOCK_ADB_EPOCH_BEGIN_TIME))
+MOCK_ADB_LOGCAT_END_TIME = "1970-01-02 21:22:02.000"
MOCK_SERIAL = 1
MOCK_RELEASE_BUILD_ID = "ABC1.123456.007"
@@ -181,13 +182,13 @@
def test_create_with_empty_config(self):
expected_msg = android_device.ANDROID_DEVICE_EMPTY_CONFIG_MSG
- with self.assertRaisesRegex(android_device.AndroidDeviceError,
+ with self.assertRaisesRegex(android_device.AndroidDeviceConfigError,
expected_msg):
android_device.create([])
def test_create_with_not_list_config(self):
expected_msg = android_device.ANDROID_DEVICE_NOT_LIST_CONFIG_MSG
- with self.assertRaisesRegex(android_device.AndroidDeviceError,
+ with self.assertRaisesRegex(android_device.AndroidDeviceConfigError,
expected_msg):
android_device.create("HAHA")
@@ -211,16 +212,14 @@
ads = get_mock_ads(5)
expected_msg = ("Could not find a target device that matches condition"
": {'serial': 5}.")
- with self.assertRaisesRegex(android_device.AndroidDeviceError,
- expected_msg):
+ with self.assertRaisesRegex(ValueError, expected_msg):
ad = android_device.get_device(ads, serial=len(ads))
def test_get_device_too_many_matches(self):
ads = get_mock_ads(5)
target_serial = ads[1].serial = ads[0].serial
expected_msg = "More than one device matched: \[0, 0\]"
- with self.assertRaisesRegex(android_device.AndroidDeviceError,
- expected_msg):
+ with self.assertRaisesRegex(ValueError, expected_msg):
ad = android_device.get_device(ads, serial=target_serial)
def test_start_services_on_ads(self):
@@ -384,12 +383,11 @@
the calls.
"""
ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- expected_msg = ("Android device .* does not have an ongoing adb logcat"
- " collection.")
- # Expect error if stop is called before start.
- with self.assertRaisesRegex(android_device.AndroidDeviceError,
- expected_msg):
+ expected_msg = ("Android device .* does not have an ongoing adb logcat")
+ # Expect warning msg if stop is called before start.
+ with self.assertLogs(level='WARNING') as log:
ad.stop_adb_logcat()
+ self.assertRegex(log.output[0], expected_msg)
ad.start_adb_logcat()
# Verify start did the correct operations.
self.assertTrue(ad.adb_logcat_process)
@@ -398,15 +396,15 @@
"adblog,fakemodel,%s.txt" % ad.serial)
creat_dir_mock.assert_called_with(os.path.dirname(expected_log_path))
adb_cmd = 'adb -s %s logcat -T 1 -v year -b all >> %s'
- start_proc_mock.assert_called_with(adb_cmd % (ad.serial,
- expected_log_path))
+ start_proc_mock.assert_called_with(
+ adb_cmd % (ad.serial, expected_log_path))
self.assertEqual(ad.adb_logcat_file_path, expected_log_path)
- expected_msg = ("Android device .* already has an adb logcat thread "
- "going on. Cannot start another one.")
- # Expect error if start is called back to back.
- with self.assertRaisesRegex(android_device.AndroidDeviceError,
- expected_msg):
+ expected_msg = ("Android device .* already has a running adb logcat")
+ # Expect warning msg if start is called back to back.
+ with self.assertLogs(level='WARNING') as log:
ad.start_adb_logcat()
+ self.assertRegex(log.output[0], expected_msg)
+
# Verify stop did the correct operations.
ad.stop_adb_logcat()
stop_proc_mock.assert_called_with("process")
@@ -432,12 +430,11 @@
"""
ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
ad.adb_logcat_param = "-b radio"
- expected_msg = ("Android device .* does not have an ongoing adb logcat"
- " collection.")
- # Expect error if stop is called before start.
- with self.assertRaisesRegex(android_device.AndroidDeviceError,
- expected_msg):
+ expected_msg = ("Android device .* does not have an ongoing adb logcat")
+ # Expect warning msg if stop is called before start.
+ with self.assertLogs(level='WARNING') as log:
ad.stop_adb_logcat()
+ self.assertRegex(log.output[0], expected_msg)
ad.start_adb_logcat()
# Verify start did the correct operations.
self.assertTrue(ad.adb_logcat_process)
@@ -446,8 +443,8 @@
"adblog,fakemodel,%s.txt" % ad.serial)
creat_dir_mock.assert_called_with(os.path.dirname(expected_log_path))
adb_cmd = 'adb -s %s logcat -T 1 -v year -b radio >> %s'
- start_proc_mock.assert_called_with(adb_cmd % (ad.serial,
- expected_log_path))
+ start_proc_mock.assert_called_with(
+ adb_cmd % (ad.serial, expected_log_path))
self.assertEqual(ad.adb_logcat_file_path, expected_log_path)
@mock.patch(
diff --git a/acts/framework/tests/acts_error_test.py b/acts/framework/tests/acts_error_test.py
index 2527b54..ba1b69a 100755
--- a/acts/framework/tests/acts_error_test.py
+++ b/acts/framework/tests/acts_error_test.py
@@ -19,18 +19,28 @@
class ActsErrorTest(unittest.TestCase):
- def test_error_without_args(self):
+
+ def test_assert_key_pulled_from_acts_error_code(self):
e = error.ActsError()
self.assertEqual(e.error_code, 100)
+
+ def test_assert_description_pulled_from_docstring(self):
+ e = error.ActsError()
self.assertEqual(e.message, 'Base Acts Error')
- self.assertEqual(e.extra, ())
+
+ def test_error_without_args(self):
+ e = error.ActsError()
+ self.assertNotIn('details', e.extra)
def test_error_with_args(self):
- args = 'hello'
- e = error.ActsError(args)
- self.assertEqual(e.error_code, 100)
- self.assertEqual(e.message, 'Base Acts Error')
- self.assertEqual(e.extra, ('hello',))
+ args = ('hello', )
+ e = error.ActsError(*args)
+ self.assertEqual(e.extra['details'], args)
+
+ def test_error_with_kwargs(self):
+ e = error.ActsError(key='value')
+ self.assertTrue('key' in e.extra.keys())
+ self.assertTrue('value' in e.extra['key'])
if __name__ == '__main__':
diff --git a/acts/framework/tests/controllers/android_lib/__init__.py b/acts/framework/tests/controllers/android_lib/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/tests/controllers/android_lib/__init__.py
diff --git a/acts/framework/tests/controllers/android_lib/android_lib_unittest_bundle.py b/acts/framework/tests/controllers/android_lib/android_lib_unittest_bundle.py
new file mode 100755
index 0000000..3a40d31
--- /dev/null
+++ b/acts/framework/tests/controllers/android_lib/android_lib_unittest_bundle.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import unittest
+
+
+def main():
+ suite = unittest.TestLoader().discover(
+ start_dir='./acts/framework/tests/controllers/android_lib',
+ pattern='*_test.py')
+ return suite
+
+
+if __name__ == '__main__':
+ test_suite = main()
+ runner = unittest.TextTestRunner()
+ test_run = runner.run(test_suite)
+ sys.exit(not test_run.wasSuccessful())
diff --git a/acts/framework/tests/controllers/android_lib/logcat_test.py b/acts/framework/tests/controllers/android_lib/logcat_test.py
new file mode 100644
index 0000000..ab6f8f4
--- /dev/null
+++ b/acts/framework/tests/controllers/android_lib/logcat_test.py
@@ -0,0 +1,174 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+import unittest
+
+import mock
+from acts.controllers.android_lib import logcat
+from acts.controllers.android_lib.logcat import TimestampTracker
+
+BASE_TIMESTAMP = '2000-01-01 12:34:56.789 123 75348 '
+
+
+class LogcatTest(unittest.TestCase):
+ """Tests acts.controllers.android_lib.logcat"""
+
+ @staticmethod
+ def patch(patched):
+ return mock.patch('acts.controllers.android_lib.logcat.%s' % patched)
+
+ def setUp(self):
+ self._get_log_level = logcat._get_log_level
+
+ def tearDown(self):
+ logcat._get_log_level = self._get_log_level
+
+ # TimestampTracker
+
+ def test_read_output_sets_last_timestamp_if_found(self):
+ tracker = TimestampTracker()
+ tracker.read_output(BASE_TIMESTAMP + 'D message')
+
+ self.assertEqual(tracker.last_timestamp, '2000-01-01 12:34:56.789')
+
+ def test_read_output_keeps_last_timestamp_if_no_new_stamp_is_found(self):
+ tracker = TimestampTracker()
+ tracker.read_output(BASE_TIMESTAMP + 'D message')
+ tracker.read_output('--------- beginning of main')
+
+ self.assertEqual(tracker.last_timestamp, '2000-01-01 12:34:56.789')
+
+ def test_read_output_updates_timestamp_to_first_in_results(self):
+ tracker = TimestampTracker()
+ tracker.read_output(BASE_TIMESTAMP + 'D 9999-99-99 12:34:56.789')
+
+ self.assertEqual(tracker.last_timestamp, '2000-01-01 12:34:56.789')
+
+ # _get_log_level
+
+ def test_get_log_level_verbose(self):
+ """Tests that Logcat's verbose logs make it to the debug level."""
+ level = logcat._get_log_level(BASE_TIMESTAMP + 'V')
+
+ self.assertEqual(level, logging.DEBUG)
+
+ def test_get_log_level_debug(self):
+ """Tests that Logcat's debug logs make it to the debug level."""
+ level = logcat._get_log_level(BASE_TIMESTAMP + 'D')
+
+ self.assertEqual(level, logging.DEBUG)
+
+ def test_get_log_level_info(self):
+ """Tests that Logcat's info logs make it to the info level."""
+ level = logcat._get_log_level(BASE_TIMESTAMP + 'I')
+
+ self.assertEqual(level, logging.INFO)
+
+ def test_get_log_level_warning(self):
+ """Tests that Logcat's warning logs make it to the warning level."""
+ level = logcat._get_log_level(BASE_TIMESTAMP + 'W')
+
+ self.assertEqual(level, logging.WARNING)
+
+ def test_get_log_level_error(self):
+ """Tests that Logcat's error logs make it to the error level."""
+ level = logcat._get_log_level(BASE_TIMESTAMP + 'E')
+
+ self.assertEqual(level, logging.ERROR)
+
+ def test_get_log_level_markers(self):
+ """Tests that Logcat's marker logs make it to the error level."""
+ level = logcat._get_log_level('--------- beginning of main')
+
+ self.assertEqual(level, logging.ERROR)
+
+ # _log_line_func
+
+ def test_log_line_func_returns_func_that_logs_to_given_logger(self):
+ logcat._get_log_level = lambda message: logging.INFO
+ tracker = mock.Mock()
+ log = mock.Mock()
+ message = 'MESSAGE'
+
+ logcat._log_line_func(log, tracker)(message)
+
+ self.assertEqual(log.log.called, True)
+ log.log.assert_called_once_with(logging.INFO, message)
+
+ def test_log_line_func_returns_func_that_updates_the_timestamp(self):
+ logcat._get_log_level = lambda message: logging.INFO
+ tracker = mock.Mock()
+ log = mock.Mock()
+ message = 'MESSAGE'
+
+ logcat._log_line_func(log, tracker)(message)
+
+ self.assertEqual(tracker.read_output.called, True)
+ tracker.read_output.assert_called_once_with(message)
+
+ # _on_retry
+
+ def test_on_retry_returns_func_that_formats_with_last_timestamp(self):
+ tracker = TimestampTracker()
+ tracker.read_output(BASE_TIMESTAMP)
+ new_command = logcat._on_retry('S3R14L', 'extra_params', tracker)(None)
+
+ self.assertIn('-T "%s"' % tracker.last_timestamp, new_command)
+
+ def test_on_retry_func_returns_string_that_contains_the_given_serial(self):
+ tracker = TimestampTracker()
+ tracker.read_output(BASE_TIMESTAMP)
+ new_command = logcat._on_retry('S3R14L', 'extra_params', tracker)(None)
+
+ self.assertTrue('-s S3R14L' in new_command)
+
+ def test_on_retry_func_returns_string_that_contains_any_extra_params(self):
+ tracker = TimestampTracker()
+ tracker.read_output(BASE_TIMESTAMP)
+ new_command = logcat._on_retry('S3R14L', 'extra_params', tracker)(None)
+
+ self.assertTrue('extra_params' in new_command)
+
+ # create_logcat_keepalive_process
+
+ def test_create_logcat_keepalive_process_creates_a_new_logger(self):
+ with self.patch('log_stream') as log_stream, self.patch('Process'):
+ logcat.create_logcat_keepalive_process('S3R14L')
+
+ self.assertEqual(log_stream.create_logger.call_args[0][0],
+ 'AndroidDeviceS3R14L')
+
+ def test_create_logcat_keepalive_process_creates_a_new_process(self):
+ with self.patch('log_stream'), self.patch('Process') as process:
+ logcat.create_logcat_keepalive_process('S3R14L')
+
+ self.assertIn('S3R14L', process.call_args[0][0])
+
+ def test_create_logcat_keepalive_process_sets_output_callback(self):
+ with self.patch('log_stream'), self.patch('Process'):
+ process = logcat.create_logcat_keepalive_process('S3R14L')
+
+ self.assertEqual(process.set_on_output_callback.called, True)
+
+ def test_create_logcat_keepalive_process_sets_on_terminate_callback(self):
+ with self.patch('log_stream'), self.patch('Process'):
+ process = logcat.create_logcat_keepalive_process('S3R14L')
+
+ self.assertEqual(process.set_on_terminate_callback.called, True)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/acts/framework/tests/controllers/sl4a_lib/sl4a_manager_test.py b/acts/framework/tests/controllers/sl4a_lib/sl4a_manager_test.py
index f6e751e..99053e7 100755
--- a/acts/framework/tests/controllers/sl4a_lib/sl4a_manager_test.py
+++ b/acts/framework/tests/controllers/sl4a_lib/sl4a_manager_test.py
@@ -309,7 +309,7 @@
try:
manager.start_sl4a_service()
self.fail('An error should have been thrown.')
- except rpc_client.MissingSl4AError:
+ except rpc_client.Sl4aNotInstalledError:
pass
def test_start_sl4a_starts_sl4a_if_not_running(self):
@@ -324,7 +324,7 @@
manager.is_sl4a_installed = lambda: True
try:
manager.start_sl4a_service()
- except rpc_client.MissingSl4AError:
+ except rpc_client.Sl4aNotInstalledError:
self.fail('An error should not have been thrown.')
adb.shell.assert_called_with(sl4a_manager._SL4A_START_SERVICE_CMD)
diff --git a/acts/framework/tests/event/decorators_test.py b/acts/framework/tests/event/decorators_test.py
index 37a4ff8..8a6b0df 100755
--- a/acts/framework/tests/event/decorators_test.py
+++ b/acts/framework/tests/event/decorators_test.py
@@ -16,14 +16,19 @@
import unittest
from unittest import TestCase
-from mock import Mock
-
-from acts.event.decorators import subscribe_static, subscribe
+from acts.event import event_bus
+from acts.event.decorators import register_instance_subscriptions
+from acts.event.decorators import register_static_subscriptions
+from acts.event.decorators import subscribe
+from acts.event.decorators import subscribe_static
+from acts.event.event import Event
from acts.event.subscription_handle import SubscriptionHandle
+from mock import Mock
class DecoratorsTest(TestCase):
"""Tests the decorators found in acts.event.decorators."""
+
def test_subscribe_static_return_type(self):
"""Tests that the subscribe_static is the correct type."""
mock = Mock()
@@ -63,5 +68,67 @@
self.assertEqual(dummy_class.test(''), dummy_class.mock)
+class DummyEvent(Event):
+ """A dummy event used for testing registered functions."""
+
+
+class RegisterStaticSubscriptionsTest(TestCase):
+
+ def test_register_static_subscriptions_returns_passed_in_object(self):
+ obj = Mock()
+ returned_value = register_static_subscriptions(obj)
+ self.assertEqual(obj, returned_value,
+ 'register_static_subscriptions returned a value other'
+ 'than the object passed in.')
+
+ def test_register_static_subscriptions_registers_properly(self):
+ @register_static_subscriptions
+ class RegisterStaticSubscriptionsClass(object):
+ captured_event = None
+
+ @staticmethod
+ @subscribe_static(DummyEvent)
+ def on_static_event(evt):
+ RegisterStaticSubscriptionsClass.captured_event = evt
+
+ event = DummyEvent()
+ event_bus.post(event)
+
+ self.assertEqual(event, RegisterStaticSubscriptionsClass.captured_event,
+ 'register_static_subscriptions did not subscribe '
+ 'RegisterStaticSubscriptionsClass.on_static_event.')
+
+
+class RegisterInstanceSubscriptionsTest(TestCase):
+
+ def test_register_instance_subscriptions_returns_passed_in_object(self):
+ class SomeClass(object):
+ pass
+
+ returned_value = register_instance_subscriptions(SomeClass)
+ self.assertEqual(SomeClass, returned_value,
+ 'register_instance_subscriptions returned a value '
+ 'other than the object passed in.')
+
+ def test_register_instance_subscriptions_registers_properly(self):
+ @register_instance_subscriptions
+ class RegisterInstanceSubscriptionsClass(object):
+ def __init__(self):
+ self.captured_event = None
+
+ @subscribe(DummyEvent)
+ def on_instance_event(self, evt):
+ self.captured_event = evt
+
+ instance = RegisterInstanceSubscriptionsClass()
+ event = DummyEvent()
+ event_bus.post(event)
+
+ self.assertEqual(
+ event, instance.captured_event,
+ 'register_instance_subscriptions did not subscribe the instance '
+ 'function RegisterInstanceSubscriptionsClass.on_instance_event.')
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/acts/framework/tests/event/event_bus_integration_test.py b/acts/framework/tests/event/event_bus_integration_test.py
index 1e3bddb..7dadf40 100755
--- a/acts/framework/tests/event/event_bus_integration_test.py
+++ b/acts/framework/tests/event/event_bus_integration_test.py
@@ -69,7 +69,7 @@
"""Tests that TestClasses have their subscribed functions called."""
TestRunner(self.config, [('TestClass', [])]).run(TestClass)
- self.assertEqual(len(TestClass.instance_event_received), 1)
+ self.assertGreaterEqual(len(TestClass.instance_event_received), 1)
self.assertEqual(len(TestClass.static_event_received), 0)
def test_subscribe_static_bundles(self):
@@ -82,17 +82,6 @@
self.assertEqual(len(TestClass.instance_event_received), 0)
self.assertEqual(len(TestClass.static_event_received), 1)
- def test_subscribe_object_bundles(self):
- """Tests that @subscribe* bundles register all listeners."""
- test_object = TestClass({})
- bundle = subscription_bundle.create_from_object(test_object)
- bundle.register()
-
- event_bus.post(Event())
-
- self.assertEqual(len(TestClass.instance_event_received), 1)
- self.assertEqual(len(TestClass.static_event_received), 1)
-
def test_subscribe_instance_bundles(self):
"""Tests that @subscribe bundles register only instance listeners."""
test_object = TestClass({})
diff --git a/acts/framework/tests/event/subscription_bundle_test.py b/acts/framework/tests/event/subscription_bundle_test.py
index 06c1049..f0631e3 100755
--- a/acts/framework/tests/event/subscription_bundle_test.py
+++ b/acts/framework/tests/event/subscription_bundle_test.py
@@ -13,17 +13,17 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-import sys
import unittest
from unittest import TestCase
-from mock import Mock, patch
-
+import sys
from acts.event import subscription_bundle
+from acts.event.decorators import subscribe
+from acts.event.decorators import subscribe_static
from acts.event.event import Event
from acts.event.subscription_bundle import SubscriptionBundle
-from acts.event.subscription_handle import InstanceSubscriptionHandle
-from acts.event.subscription_handle import StaticSubscriptionHandle
+from mock import Mock
+from mock import patch
class SubscriptionBundleTest(TestCase):
@@ -134,24 +134,33 @@
class SubscriptionBundleStaticFunctions(TestCase):
"""Tests the static functions found in subscription_bundle.py"""
- static_listener_1 = StaticSubscriptionHandle(Event, lambda _: None)
+ @staticmethod
+ @subscribe_static(Event)
+ def static_listener_1():
+ pass
- static_listener_2 = StaticSubscriptionHandle(Event, lambda _: None)
+ @staticmethod
+ @subscribe_static(Event)
+ def static_listener_2():
+ pass
- def setUp(self):
- self.instance_listener_1 = InstanceSubscriptionHandle(Event,
- lambda _: None)
- self.instance_listener_2 = InstanceSubscriptionHandle(Event,
- lambda _: None)
+ @subscribe(Event)
+ def instance_listener_1(self):
+ pass
+
+ @subscribe(Event)
+ def instance_listener_2(self):
+ pass
def test_create_from_static(self):
"""Tests create_from_static gets all StaticSubscriptionHandles."""
- bundle = subscription_bundle.create_from_static(self.__class__)
+ cls = self.__class__
+ bundle = subscription_bundle.create_from_static(cls)
self.assertEqual(len(bundle.subscriptions), 2)
keys = bundle.subscriptions.keys()
- self.assertTrue(self.static_listener_1.subscription in keys)
- self.assertTrue(self.static_listener_2.subscription in keys)
+ self.assertIn(cls.static_listener_1.subscription, keys)
+ self.assertIn(cls.static_listener_2.subscription, keys)
def test_create_from_instance(self):
"""Tests create_from_instance gets all InstanceSubscriptionHandles."""
@@ -159,19 +168,24 @@
self.assertEqual(len(bundle.subscriptions), 2)
keys = bundle.subscriptions.keys()
- self.assertTrue(self.instance_listener_1.subscription in keys)
- self.assertTrue(self.instance_listener_2.subscription in keys)
+ self.assertIn(self.instance_listener_1.subscription, keys)
+ self.assertIn(self.instance_listener_2.subscription, keys)
- def test_create_from_object(self):
- """Tests create_from_object gets all SubscriptionHandles."""
- bundle = subscription_bundle.create_from_object(self)
- self.assertEqual(len(bundle.subscriptions), 4)
+@subscribe_static(Event)
+def static_listener_1():
+ pass
+
+
+class SubscribeStaticModuleLevelTest(TestCase):
+ def test_create_from_static(self):
+ """Tests create_from_static gets all StaticSubscriptionHandles."""
+ bundle = subscription_bundle.create_from_static(
+ sys.modules[self.__module__])
+
+ self.assertEqual(len(bundle.subscriptions), 1)
keys = bundle.subscriptions.keys()
- self.assertTrue(self.static_listener_1.subscription in keys)
- self.assertTrue(self.static_listener_2.subscription in keys)
- self.assertTrue(self.instance_listener_1.subscription in keys)
- self.assertTrue(self.instance_listener_2.subscription in keys)
+ self.assertIn(static_listener_1.subscription, keys)
if __name__ == '__main__':
diff --git a/acts/framework/tests/libs/logging/__init__.py b/acts/framework/tests/libs/logging/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/tests/libs/logging/__init__.py
diff --git a/acts/framework/tests/libs/logging/log_stream_test.py b/acts/framework/tests/libs/logging/log_stream_test.py
new file mode 100755
index 0000000..278272d
--- /dev/null
+++ b/acts/framework/tests/libs/logging/log_stream_test.py
@@ -0,0 +1,407 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+import unittest
+
+import mock
+import os
+from acts.event.event import TestCaseBeginEvent
+
+from acts.libs.logging import log_stream
+from acts.libs.logging.log_stream import AlsoToLogHandler
+from acts.libs.logging.log_stream import _LogStream
+from acts.libs.logging.log_stream import InvalidStyleSetError
+from acts.libs.logging.log_stream import LogStyles
+
+
+class TestClass(object):
+ """Dummy class for TestEvents"""
+
+ def test_case(self):
+ """Dummy test case for test events."""
+
+
+class LogStreamTest(unittest.TestCase):
+ """Tests the _LogStream class in acts.libs.logging.log_stream."""
+
+ @staticmethod
+ def patch(imported_name, *args, **kwargs):
+ return mock.patch('acts.libs.logging.log_stream.%s' % imported_name,
+ *args, **kwargs)
+
+ @classmethod
+ def setUpClass(cls):
+ # logging.log_path only exists if logger._setup_test_logger is called.
+ # Here we set it to a value that is likely to not exist so file IO is
+ # not executed (an error is raised instead of creating the file).
+ logging.log_path = '/f/a/i/l/p/a/t/h'
+
+ def setUp(self):
+ log_stream._log_streams = dict()
+
+ # __validate_style
+
+ def test_validate_styles_raises_when_same_location_set_multiple_times(
+ self, *_):
+ """Tests that a style is invalid if it sets the same handler twice.
+
+ If the error is NOT raised, then a LogStream can create a Logger that
+ has multiple LogHandlers trying to write to the same file.
+ """
+ with self.assertRaises(InvalidStyleSetError) as catch:
+ log_stream.create_logger(
+ self._testMethodName,
+ [LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG,
+ LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG])
+ self.assertTrue(
+ 'has been set' in catch.exception.args[0],
+ msg='__validate_styles did not raise the expected error message')
+
+ def test_validate_styles_raises_when_no_level_exists(self):
+ """Tests that a style is invalid if it does not contain a log level.
+
+ If the style does not contain a log level, then there is no way to
+ pass the information coming from the logger to the correct file.
+ """
+ with self.assertRaises(InvalidStyleSetError) as catch:
+ log_stream.create_logger(self._testMethodName,
+ [LogStyles.MONOLITH_LOG])
+
+ self.assertTrue(
+ 'log level' in catch.exception.args[0],
+ msg='__validate_styles did not raise the expected error message')
+
+ def test_validate_styles_raises_when_no_location_exists(self):
+ """Tests that a style is invalid if it does not contain a log level.
+
+ If the style does not contain a log level, then there is no way to
+ pass the information coming from the logger to the correct file.
+ """
+ with self.assertRaises(InvalidStyleSetError) as catch:
+ log_stream.create_logger(self._testMethodName,
+ [LogStyles.LOG_INFO])
+
+ self.assertTrue(
+ 'log location' in catch.exception.args[0],
+ msg='__validate_styles did not raise the expected error message')
+
+ def test_validate_styles_raises_when_rotate_logs_no_file_handler(self):
+ """Tests that a LogStyle cannot set ROTATE_LOGS without *_LOG flag.
+
+ If the LogStyle contains ROTATE_LOGS, it must be associated with a log
+ that is rotatable. TO_ACTS_LOG and TO_STDOUT are not rotatable logs,
+ since those are both controlled by another object/process. The user
+ must specify MONOLITHIC_LOG or TESTCASE_LOG.
+ """
+ with self.assertRaises(InvalidStyleSetError) as catch:
+ log_stream.create_logger(
+ self._testMethodName,
+ # Added LOG_DEBUG here to prevent the no_level_exists raise from
+ # occurring.
+ [LogStyles.LOG_DEBUG + LogStyles.ROTATE_LOGS])
+
+ self.assertTrue(
+ 'log type' in catch.exception.args[0],
+ msg='__validate_styles did not raise the expected error message')
+
+ # __handle_style
+
+ def test_handle_style_create_test_case_descriptors(self):
+ """Tests that handle_style creates the correct test case descriptors.
+
+ Testcase descriptors are only created on TESTCASE_LOG logstyles.
+ """
+ info_testcase_log = LogStyles.LOG_INFO + LogStyles.TESTCASE_LOG
+
+ with self.patch('FileHandler'):
+ log_stream.create_logger(self._testMethodName, info_testcase_log)
+
+ created_stream = log_stream._log_streams[self._testMethodName]
+ handler_descriptors = created_stream._test_case_handler_descriptors
+
+ self.assertEqual(len(handler_descriptors), 1,
+ 'There should be exactly 1 testcase handler created.')
+ self.assertEqual(handler_descriptors[0]._level, logging.INFO)
+
+ def test_handle_style_does_not_create_test_case_descriptors(self):
+ """Tests that handle_style does not create testcase descriptors without
+ LogStyle.TESTCASE_LOG.
+ """
+ info_monolith_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
+
+ with self.patch('FileHandler'):
+ log_stream.create_logger(self._testMethodName, info_monolith_log)
+
+ created_stream = log_stream._log_streams[self._testMethodName]
+ handler_descriptors = created_stream._test_case_handler_descriptors
+
+ self.assertEqual(len(handler_descriptors), 0,
+ 'Testcase handlers should not be created without a '
+ 'TESTCASE_LOG LogStyle.')
+
+ def test_handle_style_to_acts_log_creates_handler(self):
+ """Tests that using the flag TO_ACTS_LOG creates an AlsoToLogHandler."""
+ info_acts_log = LogStyles.LOG_INFO + LogStyles.TO_ACTS_LOG
+
+ log = log_stream.create_logger(self._testMethodName, info_acts_log)
+
+ self.assertTrue(isinstance(log.handlers[0], AlsoToLogHandler))
+
+ def test_handle_style_to_acts_log_creates_handler_is_lowest_level(self):
+ """Tests that using the flag TO_ACTS_LOG creates an AlsoToLogHandler
+ that is set to the lowest LogStyles level."""
+ info_acts_log = (LogStyles.LOG_DEBUG + LogStyles.LOG_INFO +
+ LogStyles.TO_ACTS_LOG)
+
+ log = log_stream.create_logger(self._testMethodName, info_acts_log)
+
+ self.assertTrue(isinstance(log.handlers[0], AlsoToLogHandler))
+ self.assertEqual(log.handlers[0].level, logging.DEBUG)
+
+ def test_handle_style_to_stdout_creates_stream_handler(self):
+ """Tests that using the flag TO_STDOUT creates a StreamHandler."""
+ info_acts_log = LogStyles.LOG_INFO + LogStyles.TO_STDOUT
+
+ log = log_stream.create_logger(self._testMethodName, info_acts_log)
+
+ self.assertTrue(isinstance(log.handlers[0], logging.StreamHandler))
+
+ def test_handle_style_creates_file_handler(self, *_):
+ """Tests handle_style creates a FileHandler for the MONOLITH_LOG."""
+ info_acts_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
+
+ expected = mock.MagicMock()
+ with self.patch('FileHandler', return_value=expected):
+ log = log_stream.create_logger(self._testMethodName, info_acts_log)
+
+ self.assertEqual(log.handlers[0], expected)
+
+ def test_handle_style_creates_rotating_file_handler(self):
+ """Tests handle_style creates a FileHandler for the ROTATE_LOGS."""
+ info_acts_log = (LogStyles.LOG_INFO + LogStyles.ROTATE_LOGS +
+ LogStyles.MONOLITH_LOG)
+
+ expected = mock.MagicMock()
+ with self.patch('RotatingFileHandler', return_value=expected):
+ log = log_stream.create_logger(self._testMethodName, info_acts_log)
+
+ self.assertEqual(log.handlers[0], expected)
+
+ # __create_rotating_file_handler
+
+ def test_create_rotating_file_handler_does_what_it_says_it_does(self):
+ """Tests that __create_rotating_file_handler does exactly that."""
+ expected = mock.MagicMock()
+
+ with self.patch('RotatingFileHandler', return_value=expected):
+ # Through name-mangling, this function is automatically renamed. See
+ # https://docs.python.org/3/tutorial/classes.html#private-variables
+ fh = _LogStream._LogStream__create_rotating_file_handler('')
+
+ self.assertEqual(expected, fh,
+ 'The function did not return a RotatingFileHandler.')
+
+ # __get_file_handler_creator
+
+ def test_get_file_handler_creator_returns_rotating_file_handler(self):
+ """Tests the function returns a RotatingFileHandler when the log_style
+ has LogStyle.ROTATE_LOGS."""
+ expected = mock.MagicMock()
+
+ with self.patch('_LogStream._LogStream__create_rotating_file_handler',
+ return_value=expected):
+ # Through name-mangling, this function is automatically renamed. See
+ # https://docs.python.org/3/tutorial/classes.html#private-variables
+ fh_creator = _LogStream._LogStream__get_file_handler_creator(
+ LogStyles.ROTATE_LOGS)
+
+ self.assertEqual(expected, fh_creator('/d/u/m/m/y/p/a/t/h'),
+ 'The function did not return a RotatingFileHandler.')
+
+ def test_get_file_handler_creator_returns_file_handler(self):
+ """Tests the function returns a FileHandler when the log_style does NOT
+ have LogStyle.ROTATE_LOGS."""
+ expected = mock.MagicMock()
+
+ with self.patch('FileHandler', return_value=expected):
+ # Through name-mangling, this function is automatically renamed. See
+ # https://docs.python.org/3/tutorial/classes.html#private-variables
+ handler = _LogStream._LogStream__get_file_handler_creator(
+ LogStyles.NONE)()
+
+ self.assertTrue(isinstance(handler, mock.Mock))
+
+ # __get_lowest_log_level
+
+ def test_get_lowest_level_gets_lowest_level(self):
+ """Tests __get_lowest_level returns the lowest LogStyle level given."""
+ level = _LogStream._LogStream__get_lowest_log_level(
+ LogStyles.ALL_LEVELS)
+ self.assertEqual(level, LogStyles.LOG_DEBUG)
+
+ # __remove_handler
+
+ def test_remove_handler_removes_a_handler(self):
+ dummy_obj = mock.Mock()
+ dummy_obj.logger = mock.Mock()
+ handler = mock.Mock()
+ _LogStream._LogStream__remove_handler(dummy_obj, handler)
+
+ self.assertTrue(dummy_obj.logger.removeHandler.called)
+ self.assertTrue(handler.close.called)
+
+ # on_test_case_end
+
+ def test_on_test_case_end_removes_all_handlers(self):
+ info_testcase_log = LogStyles.LOG_INFO + LogStyles.TESTCASE_LOG
+ with self.patch('FileHandler'):
+ log_stream.create_logger(self._testMethodName, info_testcase_log)
+
+ created_log_stream = log_stream._log_streams[self._testMethodName]
+ created_log_stream.on_test_case_end('')
+
+ self.assertEqual(len(created_log_stream._test_case_log_handlers), 0,
+ 'The test case log handlers were not cleared.')
+
+ # on_test_case_begin
+
+ def test_on_test_case_begin_creates_new_handlers(self):
+ info_testcase_log = LogStyles.LOG_INFO + LogStyles.TESTCASE_LOG
+ with self.patch('FileHandler'):
+ log_stream.create_logger(self._testMethodName, info_testcase_log)
+
+ created_log_stream = log_stream._log_streams[self._testMethodName]
+ created_log_stream.on_test_case_begin(
+ TestCaseBeginEvent(TestClass, TestClass.test_case))
+
+ self.assertEqual(len(created_log_stream._test_case_log_handlers), 1)
+
+ # cleanup
+
+ def test_cleanup_removes_all_handlers(self):
+ info_testcase_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
+ with self.patch('FileHandler'):
+ log_stream.create_logger(self._testMethodName, info_testcase_log)
+
+ created_log_stream = log_stream._log_streams[self._testMethodName]
+ created_log_stream.cleanup()
+
+ self.assertEqual(len(created_log_stream.logger.handlers), 0)
+
+
+class LogStreamModuleTests(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ # logging.log_path only exists if logger._setup_test_logger is called.
+ # Here we set it to a value that is likely to not exist so file IO is
+ # not executed (an error is raised instead of creating the file).
+ logging.log_path = '/f/a/i/l/p/a/t/h'
+
+ def setUp(self):
+ log_stream._log_streams = {}
+
+ # _on_test_case_begin
+
+ @staticmethod
+ def create_test_case_event():
+ return TestCaseBeginEvent(TestClass, TestClass.test_case)
+
+ @mock.patch('os.path.exists')
+ @mock.patch('os.mkdir')
+ def test_on_test_case_begin_makes_directory_if_not_exists(self, mkdir,
+ exists):
+ """Tests on_test_case_begin calls on_test_case_begin on each log_stream.
+
+ Note that we mock os.mkdir to prevent file IO.
+ """
+ exists.return_value = False
+
+ log_stream._on_test_case_begin(self.create_test_case_event())
+
+ self.assertTrue(mkdir.called)
+ self.assertEqual(mkdir.call_args[0][0], os.path.join(logging.log_path,
+ 'test_case'))
+
+ @mock.patch('os.path.exists')
+ @mock.patch('os.mkdir')
+ def test_on_test_case_begin_does_not_create_dir_if_it_exists(self, mkdir,
+ exists):
+ """Tests on_test_case_begin calls on_test_case_begin on each log_stream.
+
+ Note that we mock os.mkdir to prevent file IO.
+ """
+ exists.return_value = True
+
+ log_stream._on_test_case_begin(self.create_test_case_event())
+
+ self.assertTrue(exists.called)
+ self.assertFalse(mkdir.called)
+
+ @mock.patch('os.path.exists')
+ @mock.patch('os.mkdir')
+ def test_on_test_case_begin_delegates_calls_to_log_streams(self, *_):
+ """Tests on_test_case_begin calls on_test_case_begin on each log_stream.
+
+ Note that we mock os.mkdir to prevent file IO.
+ """
+ log_stream._log_streams = {
+ 'a': mock.Mock(),
+ 'b': mock.Mock()
+ }
+
+ log_stream._on_test_case_begin(self.create_test_case_event())
+
+ self.assertTrue(log_stream._log_streams['a'].on_test_case_begin.called)
+ self.assertTrue(log_stream._log_streams['b'].on_test_case_begin.called)
+
+ # _on_test_case_end
+
+ @mock.patch('os.path.exists')
+ @mock.patch('os.mkdir')
+ def test_on_test_case_end_delegates_calls_to_log_streams(self, *_):
+ """Tests on_test_case_begin calls on_test_case_begin on each log_stream.
+
+ Note that we mock os.mkdir to prevent file IO.
+ """
+ log_stream._log_streams = {
+ 'a': mock.Mock(),
+ 'b': mock.Mock()
+ }
+
+ log_stream._on_test_case_end(self.create_test_case_event())
+
+ self.assertTrue(log_stream._log_streams['a'].on_test_case_end.called)
+ self.assertTrue(log_stream._log_streams['b'].on_test_case_end.called)
+
+ # _set_logger
+
+ def test_set_logger_overwrites_previous_logger(self):
+ """Tests that calling set_logger overwrites the previous logger within
+ log_stream._log_streams.
+ """
+ previous = mock.Mock()
+ log_stream._log_streams = {
+ 'a': previous
+ }
+ expected = mock.Mock()
+ expected.logger.name = 'a'
+ log_stream._set_logger(expected)
+
+ self.assertEqual(log_stream._log_streams['a'], expected)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/acts/framework/tests/libs/logging/logging_unittest_bundle.py b/acts/framework/tests/libs/logging/logging_unittest_bundle.py
new file mode 100755
index 0000000..6f384ca
--- /dev/null
+++ b/acts/framework/tests/libs/logging/logging_unittest_bundle.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import unittest
+
+
+def main():
+ suite = unittest.TestLoader().discover(
+ start_dir='./acts/framework/tests/libs/logging', pattern='*_test.py')
+ return suite
+
+
+if __name__ == '__main__':
+ test_suite = main()
+ runner = unittest.TextTestRunner()
+ test_run = runner.run(test_suite)
+ sys.exit(not test_run.wasSuccessful())
diff --git a/acts/framework/tests/libs/proc/__init__.py b/acts/framework/tests/libs/proc/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/tests/libs/proc/__init__.py
diff --git a/acts/framework/tests/libs/proc/proc_unittest_bundle.py b/acts/framework/tests/libs/proc/proc_unittest_bundle.py
new file mode 100755
index 0000000..2a25587
--- /dev/null
+++ b/acts/framework/tests/libs/proc/proc_unittest_bundle.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import unittest
+
+
+def main():
+ suite = unittest.TestLoader().discover(
+ start_dir='./acts/framework/tests/libs/proc', pattern='*_test.py')
+ return suite
+
+
+if __name__ == '__main__':
+ test_suite = main()
+ runner = unittest.TextTestRunner()
+ test_run = runner.run(test_suite)
+ sys.exit(not test_run.wasSuccessful())
diff --git a/acts/framework/tests/libs/proc/process_test.py b/acts/framework/tests/libs/proc/process_test.py
new file mode 100644
index 0000000..5ed2dc9
--- /dev/null
+++ b/acts/framework/tests/libs/proc/process_test.py
@@ -0,0 +1,309 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import unittest
+
+import mock
+import subprocess
+from acts.libs.proc.process import Process
+
+
+class FakeThread(object):
+ def __init__(self, target=None):
+ self.target = target
+ self.alive = False
+
+ def _on_start(self):
+ pass
+
+ def start(self):
+ self.alive = True
+ if self._on_start:
+ self._on_start()
+
+ def stop(self):
+ self.alive = False
+
+ def join(self):
+ pass
+
+
+class ProcessTest(unittest.TestCase):
+ """Tests the acts.libs.proc.process.Process class."""
+
+ def setUp(self):
+ self._Process__start_process = Process._Process__start_process
+
+ def tearDown(self):
+ Process._Process__start_process = self._Process__start_process
+
+ @staticmethod
+ def patch(imported_name, *args, **kwargs):
+ return mock.patch('acts.libs.proc.process.%s' % imported_name,
+ *args, **kwargs)
+
+ # set_on_output_callback
+
+ def test_set_on_output_callback(self):
+ """Tests that set_on_output_callback sets on_output_callback."""
+ callback = mock.Mock()
+
+ process = Process('cmd').set_on_output_callback(callback)
+ process._on_output_callback()
+
+ self.assertTrue(callback.called)
+
+ # set_on_terminate_callback
+
+ def test_set_on_terminate_callback(self):
+ """Tests that set_on_terminate_callback sets _on_terminate_callback."""
+ callback = mock.Mock()
+
+ process = Process('cmd').set_on_terminate_callback(callback)
+ process._on_terminate_callback()
+
+ self.assertTrue(callback.called)
+
+ # start
+
+ def test_start_starts_listening_thread(self):
+ """Tests that start starts the _exec_popen_loop function."""
+ process = Process('cmd')
+
+ # Here we need the thread to start the process object.
+ class FakeThreadImpl(FakeThread):
+ def _on_start(self):
+ process._process = mock.Mock()
+
+ with self.patch('Thread', FakeThreadImpl):
+ process.start()
+
+ self.assertTrue(process._listening_thread.alive)
+ self.assertEqual(process._listening_thread.target, process._exec_loop)
+
+ # wait
+
+ def test_wait_kills_after_timeout(self):
+ """Tests that if a TimeoutExpired error is thrown during wait, the
+ process is killed."""
+ process = Process('cmd')
+ process._process = mock.Mock()
+ process._process.wait.side_effect = subprocess.TimeoutExpired('', '')
+
+ process.wait(0)
+
+ self.assertEqual(process._process.kill.called, True)
+
+ def test_wait_sets_stopped_to_true_before_process_kill(self):
+ """Tests that stop() sets the _stopped attribute to True.
+
+ This order is required to prevent the _exec_loop from calling
+ _on_terminate_callback when the user has killed the process.
+ """
+ verifier = mock.Mock()
+ verifier.passed = False
+
+ def test_call_order():
+ self.assertTrue(process._stopped)
+ verifier.passed = True
+
+ process = Process('cmd')
+ process._process = mock.Mock()
+ process._process.poll.return_value = None
+ process._process.wait.side_effect = subprocess.TimeoutExpired('', '')
+ process._process.kill = test_call_order
+
+ process.wait()
+
+ self.assertEqual(verifier.passed, True)
+
+ def test_wait_joins_listening_thread_if_it_exists(self):
+ """Tests wait() joins _listening_thread if it exists."""
+ process = Process('cmd')
+ process._process = mock.Mock()
+ mocked_thread = mock.Mock()
+ process._listening_thread = mocked_thread
+
+ process.wait(0)
+
+ self.assertEqual(mocked_thread.join.called, True)
+
+ def test_wait_clears_listening_thread_if_it_exists(self):
+ """Tests wait() joins _listening_thread if it exists.
+
+ Threads can only be started once, so after wait has been called, we
+ want to make sure we clear the listening thread.
+ """
+ process = Process('cmd')
+ process._process = mock.Mock()
+ process._listening_thread = mock.Mock()
+
+ process.wait(0)
+
+ self.assertEqual(process._listening_thread, None)
+
+ def test_wait_joins_redirection_thread_if_it_exists(self):
+ """Tests wait() joins _listening_thread if it exists."""
+ process = Process('cmd')
+ process._process = mock.Mock()
+ mocked_thread = mock.Mock()
+ process._redirection_thread = mocked_thread
+
+ process.wait(0)
+
+ self.assertEqual(mocked_thread.join.called, True)
+
+ def test_wait_clears_redirection_thread_if_it_exists(self):
+ """Tests wait() joins _listening_thread if it exists.
+
+ Threads can only be started once, so after wait has been called, we
+ want to make sure we clear the listening thread.
+ """
+ process = Process('cmd')
+ process._process = mock.Mock()
+ process._redirection_thread = mock.Mock()
+
+ process.wait(0)
+
+ self.assertEqual(process._redirection_thread, None)
+
+ # stop
+
+ def test_stop_sets_stopped_to_true(self):
+ """Tests that stop() sets the _stopped attribute to True."""
+ process = Process('cmd')
+ process._process = mock.Mock()
+
+ process.stop()
+
+ self.assertTrue(process._stopped)
+
+ def test_stop_sets_stopped_to_true_before_process_kill(self):
+ """Tests that stop() sets the _stopped attribute to True.
+
+ This order is required to prevent the _exec_loop from calling
+ _on_terminate_callback when the user has killed the process.
+ """
+ verifier = mock.Mock()
+ verifier.passed = False
+
+ def test_call_order():
+ self.assertTrue(process._stopped)
+ verifier.passed = True
+
+ process = Process('cmd')
+ process._process = mock.Mock()
+ process._process.poll.return_value = None
+ process._process.kill = test_call_order
+
+ process.stop()
+
+ self.assertEqual(verifier.passed, True)
+
+ def test_stop_calls_wait(self):
+ """Tests that stop() also has the functionality of wait()."""
+ process = Process('cmd')
+ process._process = mock.Mock()
+ process.wait = mock.Mock()
+
+ process.stop()
+
+ self.assertEqual(process.wait.called, True)
+
+ # _redirect_output
+
+ def test_redirect_output_feeds_all_lines_to_on_output_callback(self):
+ """Tests that _redirect_output loops until all lines are parsed."""
+ received_list = []
+
+ def appender(line):
+ received_list.append(line)
+
+ process = Process('cmd')
+ process.set_on_output_callback(appender)
+ process._process = mock.Mock()
+ process._process.stdout.readline.side_effect = [b'a\n', b'b\n', b'']
+
+ process._redirect_output()
+
+ self.assertEqual(received_list[0], 'a')
+ self.assertEqual(received_list[1], 'b')
+ self.assertEqual(len(received_list), 2)
+
+ # __start_process
+
+ def test_start_process_returns_a_popen_object(self):
+ """Tests that a Popen object is returned by __start_process."""
+ with self.patch('subprocess.Popen', return_value='verification'):
+ self.assertEqual(Process._Process__start_process('cmd'),
+ 'verification')
+
+ # _exec_loop
+
+ def test_exec_loop_redirections_output(self):
+ """Tests that the _exec_loop function calls to redirect the output."""
+ process = Process('cmd')
+ Process._Process__start_process = mock.Mock()
+
+ with self.patch('Thread', FakeThread):
+ process._exec_loop()
+
+ self.assertEqual(process._redirection_thread.target,
+ process._redirect_output)
+ self.assertEqual(process._redirection_thread.alive, True)
+
+ def test_exec_loop_waits_for_process(self):
+ """Tests that the _exec_loop waits for the process to complete before
+ returning."""
+ process = Process('cmd')
+ Process._Process__start_process = mock.Mock()
+
+ with self.patch('Thread', FakeThread):
+ process._exec_loop()
+
+ self.assertEqual(process._process.wait.called, True)
+
+ def test_exec_loop_loops_if_not_stopped(self):
+ process = Process('1st')
+ Process._Process__start_process = mock.Mock()
+ process._on_terminate_callback = mock.Mock(side_effect=['2nd', None])
+
+ with self.patch('Thread', FakeThread):
+ process._exec_loop()
+
+ self.assertEqual(Process._Process__start_process.call_count, 2)
+ self.assertEqual(Process._Process__start_process.call_args_list[0][0],
+ ('1st', ))
+ self.assertEqual(Process._Process__start_process.call_args_list[1][0],
+ ('2nd', ))
+
+ def test_exec_loop_does_not_loop_if_stopped(self):
+ process = Process('1st')
+ Process._Process__start_process = mock.Mock()
+ process._on_terminate_callback = mock.Mock(
+ side_effect=['2nd', None])
+ process._stopped = True
+
+ with self.patch('Thread', FakeThread):
+ process._exec_loop()
+
+ self.assertEqual(Process._Process__start_process.call_count, 1)
+ self.assertEqual(
+ Process._Process__start_process.call_args_list[0][0],
+ ('1st',))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/acts/framework/tests/libs/version_selector_test.py b/acts/framework/tests/libs/version_selector_test.py
new file mode 100755
index 0000000..54aa78b
--- /dev/null
+++ b/acts/framework/tests/libs/version_selector_test.py
@@ -0,0 +1,291 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import absolute_import
+import sys
+import os
+# A temporary hack to prevent tests/libs/logging from being selected as the
+# python default logging module.
+sys.path[0] = os.path.join(sys.path[0], '../')
+import unittest
+import logging
+
+from acts import base_test
+from acts.libs import version_selector
+from acts.test_decorators import test_tracker_info
+
+
+def versioning_decorator(min_sdk, max_sdk):
+ return version_selector.set_version(lambda ret, *_, **__: ret, min_sdk,
+ max_sdk)
+
+
+def versioning_decorator2(min_sdk, max_sdk):
+ return version_selector.set_version(lambda ret, *_, **__: ret, min_sdk,
+ max_sdk)
+
+
+def test_versioning(min_sdk, max_sdk):
+ return version_selector.set_version(lambda *_, **__: 1, min_sdk, max_sdk)
+
+
+@versioning_decorator(1, 10)
+def versioned_func(arg1, arg2):
+ return 'function 1', arg1, arg2
+
+
+@versioning_decorator(11, 11)
+def versioned_func(arg1, arg2):
+ return 'function 2', arg1, arg2
+
+
+@versioning_decorator(12, 20)
+def versioned_func(arg1, arg2):
+ return 'function 3', arg1, arg2
+
+
+@versioning_decorator(1, 20)
+def versioned_func_with_kwargs(_, asdf='jkl'):
+ return asdf
+
+
+def class_versioning_decorator(min_sdk, max_sdk):
+ return version_selector.set_version(lambda _, ret, *__, **___: ret, min_sdk,
+ max_sdk)
+
+
+class VersionedClass(object):
+ @classmethod
+ @class_versioning_decorator(1, 99999999)
+ def class_func(cls, arg1):
+ return cls, arg1
+
+ @staticmethod
+ @versioning_decorator(1, 99999999)
+ def static_func(arg1):
+ return arg1
+
+ @class_versioning_decorator(1, 99999999)
+ def instance_func(self, arg1):
+ return self, arg1
+
+
+class VersionedTestClass(base_test.BaseTestClass):
+ @test_tracker_info('UUID_1')
+ @test_versioning(1, 1)
+ def test_1(self):
+ pass
+
+ @test_versioning(1, 1)
+ @test_tracker_info('UUID_2')
+ def test_2(self):
+ pass
+
+
+class VersionSelectorIntegrationTest(unittest.TestCase):
+ """Tests the acts.libs.version_selector module."""
+
+ def test_versioned_test_class_calls_both_functions(self):
+ """Tests that VersionedTestClass (above) can be called with
+ test_tracker_info."""
+ test_class = VersionedTestClass({'log': logging.getLogger(),
+ 'cli_args': []})
+ test_class.run(['test_1', 'test_2'], 1)
+
+ self.assertIn('Executed 2', test_class.results.summary_str(),
+ 'One or more of the test cases did not execute.')
+ self.assertEqual(
+ 'UUID_1',
+ test_class.results.executed[0].extras['test_tracker_uuid'],
+ 'The test_tracker_uuid was not found for test_1.')
+ self.assertEqual(
+ 'UUID_2',
+ test_class.results.executed[1].extras['test_tracker_uuid'],
+ 'The test_tracker_uuid was not found for test_2.')
+
+ def test_raises_syntax_error_if_decorated_with_staticmethod_first(self):
+ try:
+ class SomeClass(object):
+ @versioning_decorator(1, 1)
+ @staticmethod
+ def test_1():
+ pass
+ except SyntaxError:
+ pass
+ else:
+ self.fail('Placing the @staticmethod decorator after the '
+ 'versioning decorator should cause a SyntaxError.')
+
+ def test_raises_syntax_error_if_decorated_with_classmethod_first(self):
+ try:
+ class SomeClass(object):
+ @versioning_decorator(1, 1)
+ @classmethod
+ def test_1(cls):
+ pass
+ except SyntaxError:
+ pass
+ else:
+ self.fail('Placing the @classmethod decorator after the '
+ 'versioning decorator should cause a SyntaxError.')
+
+ def test_overriding_an_undecorated_func_raises_a_syntax_error(self):
+ try:
+ class SomeClass(object):
+ def test_1(self):
+ pass
+
+ @versioning_decorator(1, 1)
+ def test_1(self):
+ pass
+ except SyntaxError:
+ pass
+ else:
+ self.fail('Overwriting a function that already exists without a '
+ 'versioning decorator should raise a SyntaxError.')
+
+ def test_func_decorated_with_2_different_versioning_decorators_causes_error(
+ self):
+ try:
+ class SomeClass(object):
+ @versioning_decorator(1, 1)
+ def test_1(self):
+ pass
+
+ @versioning_decorator2(2, 2)
+ def test_1(self):
+ pass
+ except SyntaxError:
+ pass
+ else:
+ self.fail('Using two different versioning decorators to version a '
+ 'single function should raise a SyntaxError.')
+
+ def test_func_decorated_with_overlapping_ranges_causes_value_error(self):
+ try:
+ class SomeClass(object):
+ @versioning_decorator(1, 2)
+ def test_1(self):
+ pass
+
+ @versioning_decorator(2, 2)
+ def test_1(self):
+ pass
+ except ValueError:
+ pass
+ else:
+ self.fail('Decorated functions with overlapping version ranges '
+ 'should raise a ValueError.')
+
+ def test_func_decorated_with_min_gt_max_causes_value_error(self):
+ try:
+ class SomeClass(object):
+ @versioning_decorator(2, 1)
+ def test_1(self):
+ pass
+ except ValueError:
+ pass
+ else:
+ self.fail('If the min_version level is higher than the max_version '
+ 'level, a ValueError should be raised.')
+
+ def test_calling_versioned_func_on_min_version_level_is_inclusive(self):
+ """Tests that calling some versioned function with the minimum version
+ level of the decorated function will call that function."""
+ ret = versioned_func(1, 'some_value')
+ self.assertEqual(ret, ('function 1', 1, 'some_value'),
+ 'Calling versioned_func(1, ...) did not return the '
+ 'versioned function for the correct range.')
+
+ def test_calling_versioned_func_on_middle_level_works(self):
+ """Tests that calling some versioned function a version value within the
+ range of the decorated function will call that function."""
+ ret = versioned_func(16, 'some_value')
+ self.assertEqual(ret, ('function 3', 16, 'some_value'),
+ 'Calling versioned_func(16, ...) did not return the '
+ 'versioned function for the correct range.')
+
+ def test_calling_versioned_func_on_max_version_level_is_inclusive(self):
+ """Tests that calling some versioned function with the maximum version
+ level of the decorated function will call that function."""
+ ret = versioned_func(10, 'some_value')
+ self.assertEqual(ret, ('function 1', 10, 'some_value'),
+ 'Calling versioned_func(10, ...) did not return the '
+ 'versioned function for the correct range.')
+
+ def test_calling_versioned_func_on_min_equals_max_level_works(self):
+ """Tests that calling some versioned function with the maximum version
+ level of the decorated function will call that function."""
+ ret = versioned_func(11, 'some_value')
+ self.assertEqual(ret, ('function 2', 11, 'some_value'),
+ 'Calling versioned_func(10, ...) did not return the '
+ 'versioned function for the correct range.')
+
+ def test_sending_kwargs_through_decorated_functions_works(self):
+ """Tests that calling some versioned function with the maximum version
+ level of the decorated function will call that function."""
+ ret = versioned_func_with_kwargs(1, asdf='some_value')
+ self.assertEqual(ret, 'some_value',
+ 'Calling versioned_func_with_kwargs(1, ...) did not'
+ 'return the kwarg value properly.')
+
+ def test_kwargs_can_default_through_decorated_functions(self):
+ """Tests that calling some versioned function with the maximum version
+ level of the decorated function will call that function."""
+ ret = versioned_func_with_kwargs(1)
+ self.assertEqual(ret, 'jkl',
+ 'Calling versioned_func_with_kwargs(1) did not'
+ 'return the default kwarg value properly.')
+
+ def test_staticmethod_can_be_called_properly(self):
+ """Tests that decorating a staticmethod will properly send the arguments
+ in the correct order.
+
+ i.e., we want to make sure self or cls do not get sent as the first
+ argument to the decorated staticmethod.
+ """
+ versioned_class = VersionedClass()
+ ret = versioned_class.static_func(123456)
+ self.assertEqual(ret, 123456,
+ 'The first argument was not set properly for calling '
+ 'a staticmethod.')
+
+ def test_instance_method_can_be_called_properly(self):
+ """Tests that decorating a method will properly send the arguments
+ in the correct order.
+
+ i.e., we want to make sure self is the first argument returned.
+ """
+ versioned_class = VersionedClass()
+ ret = versioned_class.instance_func(123456)
+ self.assertEqual(ret, (versioned_class, 123456),
+ 'The arguments were not set properly for an instance '
+ 'method.')
+
+ def test_classmethod_can_be_called_properly(self):
+ """Tests that decorating a classmethod will properly send the arguments
+ in the correct order.
+
+ i.e., we want to make sure cls is the first argument returned.
+ """
+ versioned_class = VersionedClass()
+ ret = versioned_class.class_func(123456)
+ self.assertEqual(ret, (VersionedClass, 123456),
+ 'The arguments were not set properly for a '
+ 'classmethod.')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/acts/framework/tests/metrics/__init__.py b/acts/framework/tests/metrics/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/tests/metrics/__init__.py
diff --git a/acts/framework/tests/metrics/context_test.py b/acts/framework/tests/metrics/context_test.py
new file mode 100644
index 0000000..63413fc
--- /dev/null
+++ b/acts/framework/tests/metrics/context_test.py
@@ -0,0 +1,185 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from functools import partial
+from mock import Mock
+from mock import patch
+import unittest
+from unittest import TestCase
+from acts.event.event import TestCaseEvent
+from acts.event.event import TestClassEvent
+from acts.metrics.context import get_context_for_event
+from acts.metrics.context import TestContext
+from acts.metrics.context import TestCaseContext
+from acts.metrics.context import TestClassContext
+
+LOGGING = 'acts.metrics.context.logging'
+
+
+class ModuleTest(TestCase):
+ """Unit tests for the context module."""
+
+ def test_get_context_for_event_for_test_case(self):
+ event = Mock(spec=TestCaseEvent)
+ event.test_class = Mock()
+ event.test_case = Mock()
+ context = get_context_for_event(event)
+
+ self.assertIsInstance(context, TestCaseContext)
+ self.assertEqual(context.test_class, event.test_class)
+ self.assertEqual(context.test_case, event.test_case)
+
+ def test_get_context_for_event_for_test_class(self):
+ event = Mock(spec=TestClassEvent)
+ event.test_class = Mock()
+ context = get_context_for_event(event)
+
+ self.assertIsInstance(context, TestClassContext)
+ self.assertEqual(context.test_class, event.test_class)
+
+ def test_get_context_for_unknown_event_type(self):
+ event = Mock()
+
+ self.assertRaises(TypeError, partial(get_context_for_event, event))
+
+
+class TestContextTest(TestCase):
+ """Unit tests for the TestContext class."""
+
+ @patch(LOGGING)
+ def test_get_base_output_path_uses_default(self, logging):
+ context = TestContext()
+
+ self.assertEqual(context.get_base_output_path(), logging.log_path)
+
+ def test_set_base_output_path_overrides_default(self):
+ context = TestContext()
+ mock_path = Mock()
+
+ context.set_base_output_path(mock_path)
+
+ self.assertEqual(context.get_base_output_path(), mock_path)
+
+ def test_get_output_dir_attempts_to_use_default(self):
+ context = TestContext()
+
+ self.assertRaises(NotImplementedError, context.get_output_dir)
+
+ def test_set_output_dir_overrides_default(self):
+ context = TestContext()
+ mock_dir = Mock()
+
+ context.set_output_dir(mock_dir)
+
+ self.assertEqual(context.get_output_dir(), mock_dir)
+
+ def test_get_full_output_path(self):
+ context = TestContext()
+ path = 'base/path'
+ dir = 'output/dir'
+ context.set_base_output_path(path)
+ context.set_output_dir(dir)
+
+ full_path = 'base/path/output/dir'
+ self.assertEqual(context.get_full_output_path(), full_path)
+
+ def test_identifier_not_implemented(self):
+ context = TestContext()
+
+ self.assertRaises(NotImplementedError, lambda: context.identifier)
+
+
+class TestClassContextTest(TestCase):
+ """Unit tests for the TestClassContext class."""
+
+ def test_init_attributes(self):
+ test_class = Mock()
+ context = TestClassContext(test_class)
+
+ self.assertEqual(context.test_class, test_class)
+
+ def test_get_class_name(self):
+ class TestClass:
+ pass
+ test_class = TestClass()
+ context = TestClassContext(test_class)
+
+ self.assertEqual(context.test_class_name, TestClass.__name__)
+
+ def test_get_output_dir_is_class_name(self):
+ class TestClass:
+ pass
+ test_class = TestClass()
+ context = TestClassContext(test_class)
+
+ self.assertEqual(context.get_output_dir(), TestClass.__name__)
+
+ def test_identifier_is_class_name(self):
+ class TestClass:
+ pass
+ test_class = TestClass()
+ context = TestClassContext(test_class)
+
+ self.assertEqual(context.identifier, TestClass.__name__)
+
+
+class TestCaseContextTest(TestCase):
+ """Unit tests for the TestCaseContext class."""
+
+ def test_init_attributes(self):
+ test_class = Mock()
+ test_case = Mock()
+ test_case.__name__ = 'test_case_name'
+ context = TestCaseContext(test_class, test_case)
+
+ self.assertEqual(context.test_class, test_class)
+ self.assertEqual(context.test_case, test_case)
+ self.assertEqual(context.test_case_name, test_case.__name__)
+
+ def test_get_class_name(self):
+ class TestClass:
+ pass
+ test_class = TestClass()
+ test_case_name = Mock()
+ context = TestCaseContext(test_class, test_case_name)
+
+ self.assertEqual(context.test_class_name, TestClass.__name__)
+
+ def test_get_output_dir_is_class_and_test_case_name(self):
+ class TestClass:
+ def test_case(self):
+ pass
+ test_class = TestClass()
+ test_case = TestClass.test_case
+ context = TestCaseContext(test_class, test_case)
+
+ output_dir = TestClass.__name__ + '/' + test_case.__name__
+ self.assertEqual(context.get_output_dir(), output_dir)
+
+ def test_identifier_is_class_and_test_case_name(self):
+ class TestClass:
+ def test_case(self):
+ pass
+ test_class = TestClass()
+ test_case = TestClass.test_case
+ context = TestCaseContext(test_class, test_case)
+
+ identifier = TestClass.__name__ + '.' + test_case.__name__
+ self.assertEqual(context.identifier, identifier)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/acts/framework/tests/metrics/core_test.py b/acts/framework/tests/metrics/core_test.py
new file mode 100644
index 0000000..ec9b6c1
--- /dev/null
+++ b/acts/framework/tests/metrics/core_test.py
@@ -0,0 +1,330 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from functools import partial
+from mock import call
+from mock import Mock
+from mock import patch
+import unittest
+from unittest import TestCase
+from acts.metrics.core import MetricPublisher
+from acts.metrics.core import ProtoMetric
+from acts.metrics.core import ProtoMetricPublisher
+
+PARSE_PROTO_TO_ASCII = 'acts.metrics.core.parse_proto_to_ascii'
+TO_DESCRIPTOR_PROTO = 'acts.metrics.core.to_descriptor_proto'
+DUMP_STRING_TO_FILE = 'acts.metrics.core.dump_string_to_file'
+MAKEDIRS = 'acts.metrics.core.os.makedirs'
+
+class ProtoMetricTest(TestCase):
+ """Unit tests for the ProtoMetric class."""
+
+ TEST_NAME = 'metric name'
+
+ def setUp(self):
+ self.data = Mock()
+ pass
+
+ def test_default_init_attributes(self):
+ metric = ProtoMetric(name = self.TEST_NAME, data=self.data)
+ self.assertEqual(metric.name, self.TEST_NAME)
+ self.assertEqual(metric.data, self.data)
+
+ def test_no_data_init_raises_error(self):
+ self.assertRaises(ValueError, lambda: ProtoMetric(name=self.TEST_NAME))
+
+ def test_get_binary(self):
+ metric = ProtoMetric(name=self.TEST_NAME, data=self.data)
+ self.data.SerializeToString = Mock()
+
+ metric.get_binary()
+
+ self.data.SerializeToString.assert_called_once_with()
+
+ @patch(PARSE_PROTO_TO_ASCII)
+ def test_get_ascii(self, parse_proto_to_ascii):
+ metric = ProtoMetric(name=self.TEST_NAME, data=self.data)
+
+ metric.get_ascii()
+
+ parse_proto_to_ascii.assert_called_once_with(self.data)
+
+ @patch(TO_DESCRIPTOR_PROTO)
+ def test_get_descriptor_binary(self, to_descriptor_proto):
+ metric = ProtoMetric(name=self.TEST_NAME, data=self.data)
+ descriptor_proto = Mock()
+ descriptor_proto.SerializeToString = Mock()
+ to_descriptor_proto.return_value = descriptor_proto
+
+ metric.get_descriptor_binary()
+
+ to_descriptor_proto.assert_called_once_with(self.data)
+ descriptor_proto.SerializeToString.assert_called_once_with()
+
+ @patch(PARSE_PROTO_TO_ASCII)
+ @patch(TO_DESCRIPTOR_PROTO)
+ def test_get_descriptor_ascii(self, to_desc_proto, parse_proto):
+ metric = ProtoMetric(name=self.TEST_NAME, data=self.data)
+ descriptor_proto = Mock()
+ to_desc_proto.return_value = descriptor_proto
+
+ metric.get_descriptor_ascii()
+
+ to_desc_proto.assert_called_once_with(self.data)
+ parse_proto.assert_called_once_with(descriptor_proto)
+
+
+class MetricPublisherTest(TestCase):
+ """Unit tests for the MetricPublisher class."""
+
+ def test_default_init_attributes(self):
+ context = Mock()
+ publisher = MetricPublisher(context)
+
+ self.assertEqual(publisher.context, context)
+
+ def test_none_init_raises(self):
+ self.assertRaises(ValueError, lambda: MetricPublisher(None))
+
+ def test_publish_not_implemented(self):
+ context = Mock()
+ metrics = Mock()
+ publisher = MetricPublisher(context)
+
+ self.assertRaises(NotImplementedError, lambda: publisher.publish(metrics))
+
+
+class ProtoMetricPublisherTest(TestCase):
+ """Unit tests for the ProtoMetricPublisher class"""
+
+ def test_init_attributes(self):
+ context = Mock()
+ context.get_full_output_path.return_value = 'output/path'
+ publishes_binary = Mock()
+ publishes_ascii = Mock()
+ publishes_descriptor_binary = Mock()
+ publishes_descriptor_ascii = Mock()
+
+ publisher = ProtoMetricPublisher(
+ context,
+ publishes_binary=publishes_binary,
+ publishes_ascii=publishes_ascii,
+ publishes_descriptor_binary=publishes_descriptor_binary,
+ publishes_descriptor_ascii=publishes_descriptor_ascii)
+
+ self.assertEqual(publisher.context, context)
+ self.assertEqual(publisher.publishes_binary, publishes_binary)
+ self.assertEqual(publisher.publishes_ascii, publishes_ascii)
+ self.assertEqual(publisher.publishes_descriptor_binary,
+ publishes_descriptor_binary)
+ self.assertEqual(publisher.publishes_descriptor_ascii,
+ publishes_descriptor_ascii)
+
+ def test_default_init_publishes_everything(self):
+ context = Mock()
+ context.get_full_output_path.return_value = 'output/path'
+ publisher = ProtoMetricPublisher(context)
+
+ self.assertEqual(publisher.publishes_binary, True)
+ self.assertEqual(publisher.publishes_ascii, True)
+ self.assertEqual(publisher.publishes_descriptor_binary, True)
+ self.assertEqual(publisher.publishes_descriptor_ascii, True)
+
+ def test_get_output_path(self):
+ context = Mock()
+ context.get_full_output_path.return_value = 'output/path'
+
+ publisher = ProtoMetricPublisher(context)
+ output_path = publisher.get_output_path()
+
+ metrics_output_path = 'output/path/metrics'
+ self.assertEqual(output_path, metrics_output_path)
+ context.get_full_output_path.assert_called_once_with()
+
+ @patch(MAKEDIRS)
+ @patch(DUMP_STRING_TO_FILE)
+ def test_publish_all_disabled(self, dump_string_to_file, makedirs):
+ del makedirs
+ context = Mock()
+ context.get_full_output_path.return_value = 'output/path'
+ metrics = [Mock()]
+ publisher = ProtoMetricPublisher(
+ context,
+ publishes_binary=False,
+ publishes_ascii=False,
+ publishes_descriptor_binary=False,
+ publishes_descriptor_ascii=False)
+
+ publisher.publish(metrics)
+
+ assert not dump_string_to_file.called
+
+ @patch(MAKEDIRS)
+ @patch(DUMP_STRING_TO_FILE)
+ def test_publish_makes_dirs(self, dump_string_to_file, makedirs):
+ del dump_string_to_file
+ context = Mock()
+ context.get_full_output_path.return_value = 'output/path'
+ metrics = [Mock()]
+ publisher = ProtoMetricPublisher(
+ context,
+ publishes_binary=False,
+ publishes_ascii=False,
+ publishes_descriptor_binary=False,
+ publishes_descriptor_ascii=False)
+
+ publisher.publish(metrics)
+
+ metrics_output_path= 'output/path/metrics'
+ makedirs.assert_called_once_with(metrics_output_path, exist_ok=True)
+
+ @patch(MAKEDIRS)
+ @patch(DUMP_STRING_TO_FILE)
+ def test_publish_binary(self, dump_string_to_file, makedirs):
+ del makedirs
+ context = Mock()
+ context.get_full_output_path.return_value = 'output/path'
+ metric = Mock()
+ binary = Mock()
+ metric.get_binary = Mock(return_value=binary)
+ metric.name = 'metric'
+ publisher = ProtoMetricPublisher(
+ context,
+ publishes_binary=True,
+ publishes_ascii=False,
+ publishes_descriptor_binary=False,
+ publishes_descriptor_ascii=False)
+
+ publisher.publish([metric])
+
+ file_path = ('output/path/metrics/metric.' +
+ ProtoMetricPublisher.BINARY_EXTENSION)
+ dump_string_to_file.assert_called_once_with(
+ binary,
+ file_path,
+ mode='wb')
+
+ @patch(MAKEDIRS)
+ @patch(DUMP_STRING_TO_FILE)
+ def test_publish_ascii(self, dump_string_to_file, makedirs):
+ del makedirs
+ context = Mock()
+ context.get_full_output_path.return_value = 'output/path'
+ metric = Mock()
+ ascii = Mock()
+ metric.get_ascii = Mock(return_value=ascii)
+ metric.name = 'metric'
+ publisher = ProtoMetricPublisher(
+ context,
+ publishes_binary=False,
+ publishes_ascii=True,
+ publishes_descriptor_binary=False,
+ publishes_descriptor_ascii=False)
+
+ publisher.publish([metric])
+
+ file_path = ('output/path/metrics/metric.' +
+ ProtoMetricPublisher.ASCII_EXTENSION)
+ dump_string_to_file.assert_called_once_with(
+ ascii,
+ file_path)
+
+ @patch(MAKEDIRS)
+ @patch(DUMP_STRING_TO_FILE)
+ def test_publish_descriptor_binary(self, dump_string_to_file, makedirs):
+ del makedirs
+ context = Mock()
+ context.get_full_output_path.return_value = 'output/path'
+ metric = Mock()
+ descriptor_binary = Mock()
+ metric.get_descriptor_binary = Mock(return_value=descriptor_binary)
+ metric.name = 'metric'
+ publisher = ProtoMetricPublisher(
+ context,
+ publishes_binary=False,
+ publishes_ascii=False,
+ publishes_descriptor_binary=True,
+ publishes_descriptor_ascii=False)
+
+ publisher.publish([metric])
+
+ file_path = ('output/path/metrics/metric.' +
+ ProtoMetricPublisher.BINARY_DESCRIPTOR_EXTENSION)
+ dump_string_to_file.assert_called_once_with(
+ descriptor_binary,
+ file_path,
+ mode='wb')
+
+ @patch(MAKEDIRS)
+ @patch(DUMP_STRING_TO_FILE)
+ def test_publish_ascii_descriptor(self, dump_string_to_file, makedirs):
+ del makedirs
+ context = Mock()
+ context.get_full_output_path.return_value = 'output/path'
+ metric = Mock()
+ descriptor_ascii = Mock()
+ metric.get_ascii = Mock(return_value=descriptor_ascii)
+ metric.name = 'metric'
+ publisher = ProtoMetricPublisher(
+ context,
+ publishes_binary=False,
+ publishes_ascii=True,
+ publishes_descriptor_binary=False,
+ publishes_descriptor_ascii=False)
+
+ publisher.publish([metric])
+
+ file_path = ('output/path/metrics/metric.' +
+ ProtoMetricPublisher.ASCII_EXTENSION)
+ dump_string_to_file.assert_called_once_with(
+ descriptor_ascii,
+ file_path)
+
+ @patch(MAKEDIRS)
+ @patch(DUMP_STRING_TO_FILE)
+ def test_publish_multiple_binaries(self, dump_string_to_file, makedirs):
+ del makedirs
+ context = Mock()
+ context.get_full_output_path.return_value = 'output/path'
+ metric_1 = Mock()
+ metric_2 = Mock()
+ binary_1 = Mock()
+ binary_2 = Mock()
+ metric_1.get_binary = Mock(return_value=binary_1)
+ metric_2.get_binary = Mock(return_value=binary_2)
+ metric_1.name = 'metric_1'
+ metric_2.name = 'metric_2'
+ publisher = ProtoMetricPublisher(
+ context,
+ publishes_binary=True,
+ publishes_ascii=False,
+ publishes_descriptor_binary=False,
+ publishes_descriptor_ascii=False)
+
+ publisher.publish([metric_1, metric_2])
+
+ file_path_1 = ('output/path/metrics/metric_1.' +
+ ProtoMetricPublisher.BINARY_EXTENSION)
+ file_path_2 = ('output/path/metrics/metric_2.' +
+ ProtoMetricPublisher.BINARY_EXTENSION)
+
+ call_1 = call(binary_1, file_path_1, mode='wb')
+ call_2 = call(binary_2, file_path_2, mode='wb')
+ dump_string_to_file.assert_has_calls([call_1, call_2])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/acts/framework/tests/metrics/logger_test.py b/acts/framework/tests/metrics/logger_test.py
new file mode 100644
index 0000000..1101c37
--- /dev/null
+++ b/acts/framework/tests/metrics/logger_test.py
@@ -0,0 +1,276 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mock import Mock
+from mock import patch
+import unittest
+from unittest import TestCase
+from acts.metrics.logger import LoggerProxy
+from acts.metrics.logger import MetricLogger
+
+COMPILE_IMPORT_PROTO = 'acts.metrics.logger.compile_import_proto'
+CREATE_FROM_INSTANCE = (
+ 'acts.metrics.logger.subscription_bundle.create_from_instance')
+GET_CONTEXT_FOR_EVENT = 'acts.metrics.logger.get_context_for_event'
+GET_FILE = 'acts.metrics.logger.inspect.getfile'
+MKDTEMP = 'acts.metrics.logger.tempfile.mkdtemp'
+PROTO_METRIC_PUBLISHER = 'acts.metrics.logger.ProtoMetricPublisher'
+TEST_CASE_LOGGER_PROXY = 'acts.metrics.logger.TestCaseLoggerProxy'
+TEST_CLASS_LOGGER_PROXY = 'acts.metrics.logger.TestClassLoggerProxy'
+
+
+class MetricLoggerTest(TestCase):
+ """Unit tests for the MetricLogger class."""
+
+ @patch(TEST_CASE_LOGGER_PROXY)
+ def test_for_test_case_returns_test_case_proxy(self, proxy_cls):
+ args = (Mock(), )
+ kwargs = {'mock' : Mock()}
+ logger = MetricLogger.for_test_case(*args, **kwargs)
+
+ proxy_cls.assert_called_once_with(MetricLogger, args, kwargs)
+
+ @patch(TEST_CLASS_LOGGER_PROXY)
+ def test_for_test_class_returns_test_class_proxy(self, proxy_cls):
+ args = (Mock(),)
+ kwargs = {'mock': Mock()}
+ logger = MetricLogger.for_test_class(*args, **kwargs)
+
+ proxy_cls.assert_called_once_with(MetricLogger, args, kwargs)
+
+ @patch(TEST_CASE_LOGGER_PROXY)
+ def test_for_test_case_works_on_subclases(self, proxy_cls):
+ class TestLogger(MetricLogger):
+ pass
+ args = (Mock(),)
+ kwargs = {'mock': Mock()}
+ logger = TestLogger.for_test_case(*args, **kwargs)
+
+ proxy_cls.assert_called_once_with(TestLogger, args, kwargs)
+
+ @patch(TEST_CLASS_LOGGER_PROXY)
+ def test_for_test_class_works_on_subclases(self, proxy_cls):
+ class TestLogger(MetricLogger):
+ pass
+ args = (Mock(),)
+ kwargs = {'mock': Mock()}
+ logger = TestLogger.for_test_class(*args, **kwargs)
+
+ proxy_cls.assert_called_once_with(TestLogger, args, kwargs)
+
+ @patch(COMPILE_IMPORT_PROTO)
+ @patch(GET_FILE)
+ def test_compile_proto_relative_path(self, getfile, compile_import_proto):
+ getfile.return_value = '/path/to/class/file.py'
+ proto_path = 'dir/my_proto.proto'
+ compiler_out = Mock()
+ MetricLogger._compile_proto(proto_path, compiler_out=compiler_out)
+
+ full_proto_path = '/path/to/class/dir/my_proto.proto'
+ compile_import_proto.assert_called_once_wtih(
+ compiler_out, full_proto_path)
+
+ @patch(COMPILE_IMPORT_PROTO)
+ @patch(GET_FILE)
+ def test_compile_proto_absolute_path(self, getfile, compile_import_proto):
+ proto_path = '/abs/path/to/my_proto.proto'
+ compiler_out = Mock()
+ MetricLogger._compile_proto(proto_path, compiler_out=compiler_out)
+
+ compile_import_proto.assert_called_once_wtih(compiler_out, proto_path)
+ getfile.assert_not_called()
+
+ @patch(COMPILE_IMPORT_PROTO)
+ @patch(GET_FILE)
+ @patch(MKDTEMP)
+ def test_compile_proto_default_compiler_out(self,
+ mkdtemp,
+ getfile,
+ compile_import_proto):
+ compiler_out = Mock()
+ mkdtemp.return_value = compiler_out
+ proto_path = '/abs/path/to/my_proto.proto'
+ MetricLogger._compile_proto(proto_path)
+
+ compile_import_proto.assert_called_once_wtih(compiler_out, proto_path)
+
+
+ def test_init_empty(self):
+ logger = MetricLogger()
+
+ self.assertIsNone(logger.context)
+ self.assertIsNone(logger.publisher)
+
+ def test_init_with_context_and_publisher(self):
+ context = Mock()
+ publisher = Mock()
+
+ logger = MetricLogger(context=context, publisher=publisher)
+
+ self.assertEqual(logger.context, context)
+ self.assertEqual(logger.publisher, publisher)
+
+ @patch(PROTO_METRIC_PUBLISHER)
+ @patch(GET_CONTEXT_FOR_EVENT)
+ def test_init_with_event(self, get_context, publisher_cls):
+ context = Mock()
+ publisher = Mock()
+ get_context.return_value = context
+ publisher_cls.return_value = publisher
+ event = Mock()
+
+ logger = MetricLogger(event=event)
+
+ get_context.assert_called_once_with(event)
+ publisher_cls.assert_called_once_with(context)
+ self.assertEqual(logger.context, context)
+ self.assertEqual(logger.publisher, publisher)
+
+ def test_start_has_default_impl(self):
+ logger = MetricLogger()
+ logger.start(Mock())
+
+ def test_end_has_default_impl(self):
+ logger = MetricLogger()
+ logger.end(Mock())
+
+ def test_compile_proto(self):
+ logger = MetricLogger()
+
+
+class LoggerProxyTest(TestCase):
+
+ @patch(CREATE_FROM_INSTANCE)
+ def test_init(self, create_from_instance):
+ logger_cls = Mock()
+ logger_args = Mock()
+ logger_kwargs = Mock()
+ bundle = Mock()
+ create_from_instance.return_value = bundle
+ proxy = LoggerProxy(logger_cls,
+ logger_args,
+ logger_kwargs)
+
+ self.assertEqual(proxy._logger_cls, logger_cls)
+ self.assertEqual(proxy._logger_args, logger_args)
+ self.assertEqual(proxy._logger_kwargs, logger_kwargs)
+ self.assertIsNone(proxy._logger)
+ create_from_instance.assert_called_once_with(proxy)
+ bundle.register.assert_called_once_with()
+
+ @patch(CREATE_FROM_INSTANCE)
+ def test_setup_proxy(self, create_from_instance):
+ logger_cls = Mock()
+ logger_args = (Mock(), )
+ logger_kwargs = {'mock': Mock()}
+ bundle = Mock()
+ event = Mock()
+ create_from_instance.return_value = bundle
+ logger = Mock()
+ logger_cls.return_value = logger
+
+ proxy = LoggerProxy(logger_cls,
+ logger_args,
+ logger_kwargs)
+ proxy._setup_proxy(event)
+
+ logger_cls.assert_called_once_with(event=event,
+ *logger_args,
+ **logger_kwargs)
+ logger.start.assert_called_once_with(event)
+
+ @patch(CREATE_FROM_INSTANCE)
+ def test_teardown_proxy(self, create_from_instance):
+ logger_cls = Mock()
+ logger_args = (Mock(),)
+ logger_kwargs = {'mock': Mock()}
+ bundle = Mock()
+ event = Mock()
+ create_from_instance.return_value = bundle
+ logger = Mock()
+ logger_cls.return_value = logger
+
+ proxy = LoggerProxy(logger_cls,
+ logger_args,
+ logger_kwargs)
+ proxy._setup_proxy(event)
+ proxy._teardown_proxy(event)
+
+ logger.end.assert_called_once_with(event)
+ self.assertIsNone(proxy._logger)
+
+ @patch(CREATE_FROM_INSTANCE)
+ def test_getattr_forwards_to_logger(self, create_from_instance):
+ logger_cls = Mock()
+ logger_args = (Mock(),)
+ logger_kwargs = {'mock': Mock()}
+ bundle = Mock()
+ event = Mock()
+ create_from_instance.return_value = bundle
+ logger = Mock()
+ logger_cls.return_value = logger
+
+ proxy = LoggerProxy(logger_cls,
+ logger_args,
+ logger_kwargs)
+ proxy._setup_proxy(event)
+
+ self.assertEqual(proxy.some_attr, logger.some_attr)
+
+ @patch(CREATE_FROM_INSTANCE)
+ def test_getattr_with_no_logger_raises(self, create_from_instance):
+ bundle = Mock()
+ create_from_instance.return_value = bundle
+
+ proxy = LoggerProxy(Mock(), Mock(), Mock())
+
+ self.assertRaises(ValueError, lambda: proxy.some_attr)
+
+ @patch(CREATE_FROM_INSTANCE)
+ def test_setattr_forwards_to_logger(self, create_from_instance):
+ logger_cls = Mock()
+ logger_args = (Mock(),)
+ logger_kwargs = {'mock': Mock()}
+ bundle = Mock()
+ event = Mock()
+ create_from_instance.return_value = bundle
+ logger = Mock()
+ logger_cls.return_value = logger
+ value = Mock()
+
+ proxy = LoggerProxy(logger_cls,
+ logger_args,
+ logger_kwargs)
+ proxy._setup_proxy(event)
+ proxy.some_attr = value
+
+ self.assertEqual(logger.some_attr, value)
+
+ @patch(CREATE_FROM_INSTANCE)
+ def test_setattr_with_no_logger_raises(self, create_from_instance):
+ bundle = Mock()
+ create_from_instance.return_value = bundle
+ value = Mock()
+
+ proxy = LoggerProxy(Mock(), Mock(), Mock())
+
+ def try_set():
+ proxy.some_attr = value
+ self.assertRaises(ValueError, try_set)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/acts/framework/tests/metrics/loggers/__init__.py b/acts/framework/tests/metrics/loggers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/tests/metrics/loggers/__init__.py
diff --git a/acts/framework/tests/metrics/loggers/blackbox_test.py b/acts/framework/tests/metrics/loggers/blackbox_test.py
new file mode 100644
index 0000000..5910d32
--- /dev/null
+++ b/acts/framework/tests/metrics/loggers/blackbox_test.py
@@ -0,0 +1,312 @@
+#!/usr/bin/env python3
+#
+# Copyright 2016 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mock import Mock
+from mock import patch
+import tempfile
+import unittest
+from unittest import TestCase
+from acts.base_test import BaseTestClass
+from acts.metrics.loggers.blackbox import BlackboxMetricLogger
+from acts.test_runner import TestRunner
+
+COMPILE_IMPORT_PROTO = 'acts.metrics.logger.compile_import_proto'
+GET_CONTEXT_FOR_EVENT = 'acts.metrics.logger.get_context_for_event'
+PROTO_METRIC_PUBLISHER = 'acts.metrics.logger.ProtoMetricPublisher'
+
+
+class BlackboxMetricLoggerTest(TestCase):
+ """Unit tests for BlackboxMetricLogger."""
+
+ TEST_METRIC_NAME = "metric_name"
+ TEST_FILE_NAME = "blackbox_metric_name"
+
+ def setUp(self):
+ self.proto_module = Mock()
+ self.event = Mock()
+ self.context = Mock()
+ self.publisher = Mock()
+
+ @patch(COMPILE_IMPORT_PROTO)
+ def test_default_init_attributes(self, compile_import_proto):
+ metric_name = Mock()
+ compile_import_proto.return_value = self.proto_module
+
+ logger = BlackboxMetricLogger(metric_name)
+
+ self.assertEqual(logger.metric_name, metric_name)
+ self.assertEqual(logger.proto_module, self.proto_module)
+ self.assertEqual(logger.result_attr, 'result')
+ self.assertIsNone(logger.metric_key)
+
+ @patch(COMPILE_IMPORT_PROTO)
+ def test_init_with_params(self, compile_import_proto):
+ metric_name = Mock()
+ result_attr = Mock()
+ metric_key = Mock()
+
+ logger = BlackboxMetricLogger(metric_name,
+ result_attr=result_attr,
+ metric_key=metric_key)
+
+ self.assertEqual(logger.result_attr, result_attr)
+ self.assertEqual(logger.metric_key, metric_key)
+
+ @patch(PROTO_METRIC_PUBLISHER)
+ @patch(GET_CONTEXT_FOR_EVENT)
+ @patch(COMPILE_IMPORT_PROTO)
+ def test_init_with_event(self,
+ compile_import_proto,
+ get_context,
+ publisher_cls):
+ metric_name = Mock()
+
+ logger = BlackboxMetricLogger(metric_name, event=self.event)
+
+ self.assertIsNotNone(logger.context)
+ self.assertIsNotNone(logger.publisher)
+
+ @patch(COMPILE_IMPORT_PROTO)
+ def test_end_populates_result(self, compile_import_proto):
+ result = Mock()
+ compile_import_proto.return_value = self.proto_module
+ self.proto_module.ActsBlackboxMetricResult.return_value = result
+
+ logger = BlackboxMetricLogger(self.TEST_METRIC_NAME)
+ logger.context = self.context
+ logger.publisher = self.publisher
+
+ logger.end(self.event)
+
+ self.assertEqual(result.test_identifier, self.context.identifier)
+ self.assertEqual(result.metric_key, self.context.identifier)
+ self.assertEqual(result.metric_value, self.context.test_class.result)
+
+ @patch(COMPILE_IMPORT_PROTO)
+ def test_end_uses_custom_result_attr(self, compile_import_proto):
+ result = Mock()
+ compile_import_proto.return_value = self.proto_module
+ self.proto_module.ActsBlackboxMetricResult.return_value = result
+ result_attr = 'result_attr'
+
+ logger = BlackboxMetricLogger(self.TEST_METRIC_NAME,
+ result_attr=result_attr)
+ logger.context = self.context
+ logger.publisher = self.publisher
+
+ logger.end(self.event)
+
+ self.assertEqual(result.metric_value,
+ getattr(self.context.test_class, result_attr))
+
+ @patch(COMPILE_IMPORT_PROTO)
+ def test_end_uses_custom_metric_key(self, compile_import_proto):
+ result = Mock()
+ compile_import_proto.return_value = self.proto_module
+ self.proto_module.ActsBlackboxMetricResult.return_value = result
+ metric_key = 'metric_key'
+
+ logger = BlackboxMetricLogger(self.TEST_METRIC_NAME,
+ metric_key=metric_key)
+ logger.context = self.context
+ logger.publisher = self.publisher
+
+ logger.end(self.event)
+
+ self.assertEqual(result.metric_key, metric_key)
+
+ @patch('acts.metrics.loggers.blackbox.ProtoMetric')
+ @patch(COMPILE_IMPORT_PROTO)
+ def test_end_does_publish(self, compile_import_proto, proto_metric_cls):
+ result = Mock()
+ compile_import_proto.return_value = self.proto_module
+ self.proto_module.ActsBlackboxMetricResult.return_value = result
+ metric_key = 'metric_key'
+
+ logger = BlackboxMetricLogger(self.TEST_METRIC_NAME,
+ metric_key=metric_key)
+ logger.context = self.context
+ logger.publisher = self.publisher
+
+ logger.end(self.event)
+
+ proto_metric_cls.assert_called_once_with(name=self.TEST_FILE_NAME,
+ data=result)
+ self.publisher.publish.assert_called_once_with(
+ proto_metric_cls.return_value)
+
+
+class BlackboxMetricLoggerIntegrationTest(TestCase):
+ """Integration tests for BlackboxMetricLogger."""
+
+ @patch('acts.test_runner.sys')
+ @patch('acts.test_runner.utils')
+ @patch('acts.test_runner.importlib')
+ def run_acts_test(self, test_class, importlib, utils, sys):
+ config = {
+ "testbed": {
+ "name": "SampleTestBed",
+ },
+ "logpath": tempfile.mkdtemp(),
+ "cli_args": None,
+ "testpaths": ["./"],
+ }
+ mockModule = Mock()
+ setattr(mockModule, test_class.__name__, test_class)
+ utils.find_files.return_value = [(None, None, None)]
+ importlib.import_module.return_value = mockModule
+ runner = TestRunner(config, [(test_class.__name__, None, )])
+
+ runner.run()
+ runner.stop()
+ return runner
+
+ @patch('acts.metrics.logger.ProtoMetricPublisher')
+ def test_test_case_metric(self, publisher_cls):
+ result = 5.0
+
+ class MyTest(BaseTestClass):
+ def __init__(self, controllers):
+ BaseTestClass.__init__(self, controllers)
+ self.tests = ('test_case', )
+ BlackboxMetricLogger.for_test_case('my_metric')
+
+ def test_case(self):
+ self.result = result
+
+ self.run_acts_test(MyTest)
+
+ args_list = publisher_cls().publish.call_args_list
+ self.assertEqual(len(args_list), 1)
+ metric = self.__get_only_arg(args_list[0])
+ self.assertEqual(metric.name, 'blackbox_my_metric')
+ self.assertEqual(metric.data.test_identifier, 'MyTest.test_case')
+ self.assertEqual(metric.data.metric_key, 'MyTest.test_case')
+ self.assertEqual(metric.data.metric_value, result)
+
+ @patch('acts.metrics.logger.ProtoMetricPublisher')
+ def test_multiple_test_case_metrics(self, publisher_cls):
+ result = 5.0
+
+ class MyTest(BaseTestClass):
+ def __init__(self, controllers):
+ BaseTestClass.__init__(self, controllers)
+ self.tests = ('test_case',)
+ BlackboxMetricLogger.for_test_case('my_metric_1')
+ BlackboxMetricLogger.for_test_case('my_metric_2')
+
+ def test_case(self):
+ self.result = result
+
+ self.run_acts_test(MyTest)
+
+ args_list = publisher_cls().publish.call_args_list
+ self.assertEqual(len(args_list), 2)
+ metrics = [self.__get_only_arg(args) for args in args_list]
+ self.assertEqual(
+ {metric.name for metric in metrics},
+ {'blackbox_my_metric_1', 'blackbox_my_metric_2'})
+ self.assertEqual(
+ {metric.data.test_identifier for metric in metrics},
+ {'MyTest.test_case'})
+ self.assertEqual(
+ {metric.data.metric_key for metric in metrics},
+ {'MyTest.test_case'})
+ self.assertEqual(
+ {metric.data.metric_value for metric in metrics},
+ {result})
+
+ @patch('acts.metrics.logger.ProtoMetricPublisher')
+ def test_test_case_metric_with_custom_key(self, publisher_cls):
+ result = 5.0
+
+ class MyTest(BaseTestClass):
+ def __init__(self, controllers):
+ BaseTestClass.__init__(self, controllers)
+ self.tests = ('test_case',)
+ BlackboxMetricLogger.for_test_case('my_metric',
+ metric_key='my_metric_key')
+
+ def test_case(self):
+ self.result = result
+
+ self.run_acts_test(MyTest)
+
+ args_list = publisher_cls().publish.call_args_list
+ self.assertEqual(len(args_list), 1)
+ metric = self.__get_only_arg(args_list[0])
+ self.assertEqual(metric.data.metric_key, 'my_metric_key')
+
+ @patch('acts.metrics.logger.ProtoMetricPublisher')
+ def test_test_case_metric_with_custom_result_attr(self, publisher_cls):
+ true_result = 5.0
+ other_result = 10.0
+
+ class MyTest(BaseTestClass):
+ def __init__(self, controllers):
+ BaseTestClass.__init__(self, controllers)
+ self.tests = ('test_case',)
+ BlackboxMetricLogger.for_test_case('my_metric',
+ result_attr='true_result')
+
+ def test_case(self):
+ self.true_result = true_result
+ self.result = other_result
+
+ self.run_acts_test(MyTest)
+
+ args_list = publisher_cls().publish.call_args_list
+ self.assertEqual(len(args_list), 1)
+ metric = self.__get_only_arg(args_list[0])
+ self.assertEqual(metric.data.metric_value, true_result)
+
+ @patch('acts.metrics.logger.ProtoMetricPublisher')
+ def test_test_class_metric(self, publisher_cls):
+ publisher_cls().publish = Mock()
+ result_1 = 5.0
+ result_2 = 8.0
+
+ class MyTest(BaseTestClass):
+ def __init__(self, controllers):
+ BaseTestClass.__init__(self, controllers)
+ self.tests = ('test_case_1', 'test_case_2', )
+ BlackboxMetricLogger.for_test_class('my_metric')
+ self.result = 0
+
+ def test_case_1(self):
+ self.result += result_1
+
+ def test_case_2(self):
+ self.result += result_2
+
+ self.run_acts_test(MyTest)
+
+ args_list = publisher_cls().publish.call_args_list
+ self.assertEqual(len(args_list), 1)
+ metric = self.__get_only_arg(args_list[0])
+ self.assertEqual(metric.data.metric_value, result_1 + result_2)
+ self.assertEqual(metric.data.test_identifier, MyTest.__name__)
+
+ def __get_only_arg(self, call_args):
+ self.assertEqual(len(call_args[0]) + len(call_args[1]), 1)
+ if len(call_args[0]) == 1:
+ return call_args[0][0]
+ return next(iter(call_args[1].values()))
+
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/acts/tests/google/ble/fuchsia_tests/BleFuchsiaAndroidTest.py b/acts/tests/google/ble/fuchsia_tests/BleFuchsiaAndroidTest.py
new file mode 100644
index 0000000..10f4d1b
--- /dev/null
+++ b/acts/tests/google/ble/fuchsia_tests/BleFuchsiaAndroidTest.py
@@ -0,0 +1,205 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+"""This script shows simple examples of how to get started with bluetooth
+ low energy testing in acts.
+"""
+
+import pprint
+import random
+import time
+
+from acts.controllers import android_device
+from acts.test_utils.bt.BluetoothBaseTest import BluetoothBaseTest
+from acts.test_utils.bt.bt_constants import adv_succ
+from acts.test_utils.bt.bt_constants import ble_scan_settings_modes
+from acts.test_utils.bt.bt_constants import scan_result
+from acts.test_utils.bt.bt_test_utils import cleanup_scanners_and_advertisers
+from acts.test_utils.bt.bt_test_utils import reset_bluetooth
+
+
+class BleFuchsiaAndroidTest(BluetoothBaseTest):
+ default_timeout = 10
+ active_adv_callback_list = []
+ droid = None
+
+ def __init__(self, controllers):
+ BluetoothBaseTest.__init__(self, controllers)
+
+ # Android device under test
+ self.ad = self.android_devices[0]
+ # Fuchsia device under test
+ self.fd = self.fuchsia_devices[0]
+ self.log.info("There are: {} fuchsia and {} android devices.".format(
+ len(self.fuchsia_devices), len(self.android_devices)))
+
+ def teardown_test(self):
+ self.fd.clean_up()
+
+ def _start_generic_advertisement_include_device_name(self):
+ self.ad.droid.bleSetAdvertiseDataIncludeDeviceName(True)
+ advertise_data = self.ad.droid.bleBuildAdvertiseData()
+ advertise_settings = self.ad.droid.bleBuildAdvertiseSettings()
+ advertise_callback = self.ad.droid.bleGenBleAdvertiseCallback()
+ self.ad.droid.bleStartBleAdvertising(
+ advertise_callback, advertise_data, advertise_settings)
+ self.ad.ed.pop_event(
+ adv_succ.format(advertise_callback), self.default_timeout)
+ self.active_adv_callback_list.append(advertise_callback)
+ return advertise_callback
+
+ # Basic test for android device as advertiser and fuchsia device as scanner
+ # Returns True if scan result has an entry corresponding to sample_android_name
+ @BluetoothBaseTest.bt_test_wrap
+ def test_fuchsia_scan_android_adv(self):
+ sample_android_name = "Pixel1234"
+ self.ad.droid.bluetoothSetLocalName(sample_android_name)
+ adv_callback = self._start_generic_advertisement_include_device_name()
+ droid_name = self.ad.droid.bluetoothGetLocalName()
+ self.log.info("Android device name: {}".format(droid_name))
+
+ # Generate input params for command
+ scan_time = 30000
+ scan_filter = {"name_substring": "Pixel"}
+ scan_count = 1
+ scan_res = self.fd.ble_lib.bleStartBleScan(scan_time, scan_filter,
+ scan_count)
+
+ # Get the result and validate
+ self.log.info("Scan res: {}".format(scan_res))
+
+ try:
+ scan_res = scan_res["result"]
+ #Validate result
+ res = False
+ for device in scan_res:
+ name, did, connectable = device["name"], device["id"], device[
+ "connectable"]
+ if (name):
+ self.log.info(
+ "Discovered device with name: {}".format(name))
+ if (name == droid_name):
+ self.log.info(
+ "Successfully found android device advertising! name, id: {}, {}"
+ .format(name, did))
+ res = True
+
+ except:
+ self.log.error("Failed to discovered android device")
+ res = False
+
+ #Print clients to validate results are saved
+ self.fd.print_clients()
+
+ #Stop android advertising
+ self.ad.droid.bleStopBleAdvertising(adv_callback)
+
+ return res
+
+ # Test for fuchsia device attempting to connect to android device (peripheral)
+ # Also tests the list_services and discconect to a peripheral
+ @BluetoothBaseTest.bt_test_wrap
+ def test_fuchsia_connect_android_periph(self):
+ sample_android_name = "Pixel1234"
+ self.ad.droid.bluetoothStartPairingHelper()
+ self.ad.droid.bluetoothSetLocalName(sample_android_name)
+ adv_callback = self._start_generic_advertisement_include_device_name()
+ droid_name = self.ad.droid.bluetoothGetLocalName()
+ self.log.info("Android device name: {}".format(droid_name))
+
+ # Generate input params for command
+ # Set scan time for 30 seconds (30,000 ms) and filter by android name
+ # Resolve scan after device is found (scan_count = 1)
+ scan_time_ms = 30000
+ scan_filter = {"name_substring": droid_name}
+ scan_count = 1
+ scan_res = self.fd.ble_lib.bleStartBleScan(scan_time_ms, scan_filter,
+ scan_count)
+
+ # Get the result and validate
+ self.log.info("Scan res: {}".format(scan_res))
+
+ try:
+ scan_res = scan_res["result"]
+ #Validate result
+ res = False
+ for device in scan_res:
+ name, did, connectable = device["name"], device["id"], device[
+ "connectable"]
+ if (name):
+ self.log.info(
+ "Discovered device with name: {}".format(name))
+ if (name == droid_name):
+ self.log.info(
+ "Successfully found android device advertising! name, id: {}, {}"
+ .format(name, did))
+ res = True
+
+ except:
+ self.log.error("Failed to discovered Android device")
+ res = False
+
+ connect = self.fd.ble_lib.bleConnectToPeripheral(did)
+ self.log.info("Connecting returned status: {}".format(connect))
+
+ services = self.fd.ble_lib.bleListServices(did)
+ self.log.info("Listing services returned: {}".format(services))
+
+ dconnect = self.fd.ble_lib.bleDisconnectPeripheral(did)
+ self.log.info("Disconnect status: {}".format(dconnect))
+
+ #Print clients to validate results are saved
+ self.fd.print_clients()
+
+ #Stop android advertising + cleanup sl4f
+ self.ad.droid.bleStopBleAdvertising(adv_callback)
+
+ return res
+
+ # Currently, this test doesn't work. The android device does not scan
+ # TODO(): Debug android scan
+ @BluetoothBaseTest.bt_test_wrap
+ def test_fuchsia_adv_android_scan(self):
+ #Initialize advertising on fuchsia device with name and interval
+ fuchsia_name = "testADV123"
+ adv_data = {"name": fuchsia_name}
+ interval = 1000
+
+ #Start advertising
+ self.fd.ble_lib.bleStartBleAdvertising(adv_data, interval)
+
+ # Initialize scan on android device which scan settings + callback
+ filter_list = self.ad.droid.bleGenFilterList()
+ self.ad.droid.bleSetScanFilterDeviceName(fuchsia_name)
+ self.ad.droid.bleSetScanSettingsScanMode(
+ ble_scan_settings_modes['low_latency'])
+ scan_settings = self.ad.droid.bleBuildScanSetting()
+ scan_callback = self.ad.droid.bleGenScanCallback()
+ self.ad.droid.bleBuildScanFilter(filter_list)
+ self.ad.droid.bleStartBleScan(filter_list, scan_settings,
+ scan_callback)
+ event_name = scan_result.format(scan_callback)
+ try:
+ event = self.ad.ed.pop_event(event_name, self.default_timeout)
+ self.log.info("Found scan result: {}".format(
+ pprint.pformat(event)))
+ except Exception:
+ self.log.error("Didn't find any scan results.")
+ return False
+ finally:
+ self.fd.ble_lib.bleStopBleAdvertising()
+ self.ad.droid.bleStopBleScan(scan_callback)
+ # TODO(): Validate result
+ return True
diff --git a/acts/tests/google/ble/fuchsia_tests/BleFuchsiaTest.py b/acts/tests/google/ble/fuchsia_tests/BleFuchsiaTest.py
index b7752a3..021a8bc 100644
--- a/acts/tests/google/ble/fuchsia_tests/BleFuchsiaTest.py
+++ b/acts/tests/google/ble/fuchsia_tests/BleFuchsiaTest.py
@@ -13,11 +13,12 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
-"""This script shows simple examples of how to get started with bluetooth
+"""This script shows simple examples of how to get started with bluetooth
low energy testing in acts.
"""
import pprint
+import random
import time
from acts.controllers import android_device
@@ -36,49 +37,48 @@
def __init__(self, controllers):
BluetoothBaseTest.__init__(self, controllers)
- self.droid, self.droid_ed = (self.android_devices[0].droid,
- self.android_devices[0].ed)
- self.fuchsia = self.fuchsia_devices[0]
- self.log.info("There are: %d fuchsia and %d android devices. " % (len(
- self.fuchsia_devices), len(self.android_devices)))
- # An optional function. This overrides the default
- # on_exception in base_test. If the test throws an
- # unexpected exception, you can customise it.
- def on_exception(self, test_name, begin_time):
- self.log.debug(
- "Test {} failed. Gathering bugreport and btsnoop logs".format(
- test_name))
- android_devices.take_bug_reports(self.android_devices, test_name,
- begin_time)
+ if (len(self.fuchsia_devices) < 2):
+ self.log.error("BleFuchsiaTest Init: Not enough fuchsia devices.")
+ self.log.info("Running testbed setup with two fuchsia devices")
+ self.fuchsia_adv = self.fuchsia_devices[0]
+ self.fuchsia_scan = self.fuchsia_devices[1]
- def _start_generic_advertisement_include_device_name(self):
- self.droid.bleSetAdvertiseDataIncludeDeviceName(True)
- advertise_data = self.droid.bleBuildAdvertiseData()
- advertise_settings = self.droid.bleBuildAdvertiseSettings()
- advertise_callback = self.droid.bleGenBleAdvertiseCallback()
- self.droid.bleStartBleAdvertising(advertise_callback, advertise_data,
- advertise_settings)
- self.droid_ed.pop_event(
- adv_succ.format(advertise_callback), self.default_timeout)
- self.active_adv_callback_list.append(advertise_callback)
- return advertise_callback
+ def teardown_test(self):
+ self.fuchsia_adv.clean_up()
+ self.fuchsia_scan.clean_up()
- # Basic test for android device as advertiser and fuchsia device as scanner
- # Returns True if scan result has an entry corresponding to sample_android_name
@BluetoothBaseTest.bt_test_wrap
- def test_fuchsia_scan_android_adv(self):
- sample_android_name = "Pixel1234"
- self.droid.bluetoothSetLocalName(sample_android_name)
- adv_callback = self._start_generic_advertisement_include_device_name()
- droid_name = self.droid.bluetoothGetLocalName()
- self.log.info("Android device name: {}".format(droid_name))
+ def test_fuchsia_publish_service(self):
+ service_id = 0
+ service_primary = True
+ # Random uuid
+ service_type = "0000180f-0000-1000-8000-00805fffffff"
- # Generate input params for command
- scan_time = 30000
- scan_filter = {"name_substring": "Pixel"}
- scan_count = None
- scan_res = self.fuchsia.ble_lib.bleStartBleScan(
+ # Generate a random key for sl4f storage of proxy key
+ service_proxy_key = "SProxy" + str(random.randint(0, 1000000))
+ res = self.fuchsia.ble_lib.blePublishService(
+ service_id, service_primary, service_type, service_proxy_key)
+ self.log.info("Publish result: {}".format(res))
+
+ return True
+
+ @BluetoothBaseTest.bt_test_wrap
+ def test_fuchsia_scan_fuchsia_adv(self):
+ # Initialize advertising on fuchsia dveice with name and interval
+ fuchsia_name = "testADV1234"
+ adv_data = {"name": fuchsia_name}
+ interval = 1000
+
+ # Start advertising
+ self.fuchsia_adv.ble_lib.bleStartBleAdvertising(adv_data, interval)
+ self.log.info("Fuchsia advertising name: {}".format(fuchsia_name))
+
+ # Create the scan filter (based on advertising name) for scan and run scan for 30 seconds
+ scan_time = 30000 # in ms
+ scan_filter = {"name_substring": fuchsia_name}
+ scan_count = 1
+ scan_res = self.fuchsia_scan.ble_lib.bleStartBleScan(
scan_time, scan_filter, scan_count)
# Get the result and validate
@@ -94,54 +94,84 @@
if (name):
self.log.info(
"Discovered device with name: {}".format(name))
- if (name == droid_name):
+ if (name == fuchsia_name):
self.log.info(
- "Successfully found android device advertising! {}".
- format(name))
+ "Successfully found Fuchsia device advertising! name, id: {}, {}".
+ format(name, did))
res = True
+
except:
- self.log.error("Failed to discovered android device")
+ self.log.error("Failed to discovered fuchsia device")
res = False
- #Stop android advertising + cleanup sl4f
- self.droid.bleStopBleAdvertising(adv_callback)
- self.fuchsia.clean_up()
+ # Stop advertising
+ self.fuchsia_adv.ble_lib.bleStopBleAdvertising()
return res
- # Currently, this test doesn't work. The android device does not scan
- # TODO(aniramakri): Debug android scan
@BluetoothBaseTest.bt_test_wrap
- def test_fuchsia_adv_android_scan(self):
- #Initialize advertising on fuchsia device with name and interval
- fuchsia_name = "testADV123"
+ def test_fuchsia_gatt_fuchsia_periph(self):
+ # Create random service with id, primary, and uuid
+ service_id = 3
+ service_primary = True
+ # Random uuid
+ service_type = "0000180f-0000-1000-8000-00805fffffff"
+
+ # Generate a random key for sl4f storage of proxy key
+ service_proxy_key = "SProxy" + str(random.randint(0, 1000000))
+ res = self.fuchsia_adv.ble_lib.blePublishService(
+ service_id, service_primary, service_type, service_proxy_key)
+ self.log.info("Publish result: {}".format(res))
+
+ # Initialize advertising on fuchsia dveice with name and interval
+ fuchsia_name = "testADV1234"
adv_data = {"name": fuchsia_name}
interval = 1000
- #Start advertising
- self.fuchsia.ble_lib.bleStartBleAdvertising(adv_data, interval)
+ # Start advertising
+ self.fuchsia_adv.ble_lib.bleStartBleAdvertising(adv_data, interval)
+ self.log.info("Fuchsia advertising name: {}".format(fuchsia_name))
- # Initialize scan on android device which scan settings + callback
- filter_list = self.droid.bleGenFilterList()
- self.droid.bleSetScanFilterDeviceName(fuchsia_name)
- scan_settings = self.droid.bleBuildScanSetting()
- scan_callback = self.droid.bleGenScanCallback()
- self.droid.bleBuildScanFilter(filter_list)
- self.droid.bleStartBleScan(filter_list, scan_settings, scan_callback)
- self.active_scan_callback_list.append(scan_callback)
- event_name = scan_result.format(scan_callback)
+ # Create the scan filter (based on advertising name) for scan and run scan for 30 seconds
+ scan_time = 30000 # in ms
+ scan_filter = {"name_substring": fuchsia_name}
+ scan_count = 1
+ scan_res = self.fuchsia_scan.ble_lib.bleStartBleScan(
+ scan_time, scan_filter, scan_count)
+
+ # Get the result and validate
+ self.log.info("Scan res: {}".format(scan_res))
+
try:
- event = self.droid.ed.pop_event(event_name, self.default_timeout)
- self.log.info("Found scan result: {}".format(
- pprint.pformat(event)))
+ scan_res = scan_res["result"]
+ #Validate result
+ res = False
+ for device in scan_res:
+ name, did, connectable = device["name"], device["id"], device[
+ "connectable"]
+ if (name):
+ self.log.info(
+ "Discovered device with name: {}".format(name))
+ if (name == fuchsia_name):
+ self.log.info(
+ "Successfully found fuchsia device advertising! name, id: {}, {}".
+ format(name, did))
+ res = True
- # Stop fuchsia advertise
- self.fuchsia.ble_lib.bleStopBleAdvertising()
- except Exception:
- self.log.error("Didn't find any scan results.")
- # Stop fuchsia advertise
- self.fuchsia.ble_lib.bleStopBleAdvertising()
- return False
+ except:
+ self.log.error("Failed to discovered fuchsia device")
+ res = False
- # TODO(aniramakri): Validate result
- return True
+ connect = self.fuchsia_scan.ble_lib.bleConnectToPeripheral(did)
+ self.log.info("Connecting returned status: {}".format(connect))
+
+ services = self.fuchsia_scan.ble_lib.bleListServices(did)
+ self.log.info("Listing services returned: {}".format(services))
+
+ dconnect = self.fuchsia_scan.ble_lib.bleDisconnectPeripheral(did)
+ self.log.info("Disconnect status: {}".format(dconnect))
+
+ # Stop fuchsia advertising
+ self.fuchsia_adv.ble_lib.bleStopBleAdvertising()
+
+ return res
diff --git a/acts/tests/google/bt/car_bt/BtCarMapMceTest.py b/acts/tests/google/bt/car_bt/BtCarMapMceTest.py
index 97414d6..93dbab8 100644
--- a/acts/tests/google/bt/car_bt/BtCarMapMceTest.py
+++ b/acts/tests/google/bt/car_bt/BtCarMapMceTest.py
@@ -55,12 +55,19 @@
return True
def setup_test(self):
+ for dut in self.android_devices:
+ toggle_airplane_mode_by_adb(self.log, dut, False)
+
if not bt_test_utils.connect_pri_to_sec(
self.MCE, self.MSE, set([BtEnum.BluetoothProfile.MAP_MCE.value])):
return False
# Grace time for connection to complete.
time.sleep(3)
+ def teardown_class(self):
+ for dut in self.android_devices:
+ toggle_airplane_mode_by_adb(self.log, dut, False)
+
def message_delivered(self, device):
try:
self.MCE.ed.pop_event(EventSmsDeliverSuccess, 15)
diff --git a/acts/tests/google/bt/car_bt/BtCarMediaConnectionTest.py b/acts/tests/google/bt/car_bt/BtCarMediaConnectionTest.py
index b54c646..3f6e495 100644
--- a/acts/tests/google/bt/car_bt/BtCarMediaConnectionTest.py
+++ b/acts/tests/google/bt/car_bt/BtCarMediaConnectionTest.py
@@ -23,8 +23,8 @@
from acts.test_utils.bt.BluetoothBaseTest import BluetoothBaseTest
from acts.test_utils.bt import bt_test_utils
from acts.test_utils.car import car_bt_utils
-from acts.test_utils.car import car_media_utils
from acts.test_utils.bt import BtEnum
+from acts.test_utils.bt.bt_test_utils import is_a2dp_connected
class BtCarMediaConnectionTest(BluetoothBaseTest):
@@ -58,26 +58,6 @@
self.SNK, self.SRC, [BtEnum.BluetoothProfile.A2DP_SINK],
BtEnum.BluetoothPriorityLevel.PRIORITY_ON)
- def is_a2dp_connected(self, device1, device2):
- """
- Convenience Function to see if the 2 devices are connected on
- A2dp.
- ToDo: Move to bt_test_utils if used in more places.
- Args:
- device1: Device 1
- device2: Device 2
- Returns:
- True if Connected
- False if Not connected
- """
- devices = device1.droid.bluetoothA2dpSinkGetConnectedDevices()
- for device in devices:
- self.device1.log.info("A2dp Connected device {}".format(device[
- "name"]))
- if (device["address"] == device2.droid.bluetoothGetLocalAddress()):
- return True
- return False
-
@test_tracker_info(uuid='1934c0d5-3fa3-43e5-a91f-2c8a4424f5cd')
@BluetoothBaseTest.bt_test_wrap
def test_a2dp_connect_disconnect_from_src(self):
@@ -99,7 +79,7 @@
Priority: 0
"""
- if (car_media_utils.is_a2dp_connected(self.log, self.SNK, self.SRC)):
+ if (is_a2dp_connected(self.SNK, self.SRC)):
self.log.info("Already Connected")
else:
if (not bt_test_utils.connect_pri_to_sec(
@@ -120,7 +100,7 @@
return False
# Logging if we connected right back, since that happens sometimes
# Not failing the test if it did though
- if (car_media_utils.is_a2dp_connected(self.log, self.SNK, self.SRC)):
+ if (is_a2dp_connected(self.SNK, self.SRC)):
self.SNK.log.error("Still connected after a disconnect")
return True
@@ -147,7 +127,7 @@
Priority: 0
"""
# Connect
- if car_media_utils.is_a2dp_connected(self.log, self.SNK, self.SRC):
+ if is_a2dp_connected(self.SNK, self.SRC):
self.log.info("Already Connected")
else:
if (not bt_test_utils.connect_pri_to_sec(
@@ -169,6 +149,6 @@
return False
# Logging if we connected right back, since that happens sometimes
# Not failing the test if it did though
- if car_media_utils.is_a2dp_connected(self.log, self.SNK, self.SRC):
+ if is_a2dp_connected(self.SNK, self.SRC):
self.SNK.log.error("Still connected after a disconnect")
return True
diff --git a/acts/tests/google/bt/car_bt/BtCarMediaPassthroughTest.py b/acts/tests/google/bt/car_bt/BtCarMediaPassthroughTest.py
index cb4c3ec..031834d 100644
--- a/acts/tests/google/bt/car_bt/BtCarMediaPassthroughTest.py
+++ b/acts/tests/google/bt/car_bt/BtCarMediaPassthroughTest.py
@@ -25,8 +25,9 @@
from acts.test_utils.bt import bt_test_utils
from acts.test_utils.bt import BtEnum
from acts.test_utils.car import car_media_utils
-from acts.utils import exe_cmd
-from acts.controllers import adb
+from acts.test_utils.bt.bt_test_utils import is_a2dp_connected
+from acts.keys import Config
+
DEFAULT_WAIT_TIME = 1.0
DEFAULT_EVENT_TIMEOUT = 1.0
@@ -126,8 +127,7 @@
if not super(BtCarMediaPassthroughTest, self).teardown_test():
return False
# If A2dp connection was disconnected as part of the test, connect it back
- if not (car_media_utils.is_a2dp_connected(self.log, self.SNK,
- self.SRC)):
+ if not (is_a2dp_connected(self.SNK,self.SRC)):
result = bt_test_utils.connect_pri_to_sec(
self.SRC, self.SNK, set([BtEnum.BluetoothProfile.A2DP.value]))
if not result:
@@ -235,8 +235,7 @@
Priority: 0
"""
- if not (car_media_utils.is_a2dp_connected(self.log, self.SNK,
- self.SRC)):
+ if not (is_a2dp_connected(self.SNK,self.SRC)):
self.SNK.log.error('No A2dp Connection')
return False
diff --git a/acts/tests/google/bt/car_bt/BtCarPbapTest.py b/acts/tests/google/bt/car_bt/BtCarPbapTest.py
index e598b96..ab605a2 100644
--- a/acts/tests/google/bt/car_bt/BtCarPbapTest.py
+++ b/acts/tests/google/bt/car_bt/BtCarPbapTest.py
@@ -485,3 +485,4 @@
bt_contacts_utils.erase_contacts(self.pse)
bt_contacts_utils.erase_contacts(self.pse2)
return pse1_matches and pse2_matches and pse1andpse2_matches
+
diff --git a/acts/tests/google/bt/pts/cmd_input.py b/acts/tests/google/bt/pts/cmd_input.py
index 5a86ea2..cdb25a7 100644
--- a/acts/tests/google/bt/pts/cmd_input.py
+++ b/acts/tests/google/bt/pts/cmd_input.py
@@ -1058,6 +1058,14 @@
except Exception as err:
self.log.info(FAILURE.format(cmd, err))
+ def do_hid_set_protocol_mode(self, line):
+ """HID set protocol mode (0 == report, 1 == boot, 225 == unsupported)"""
+ cmd = "Set protocol mode (0 == report, 1 == boot, 225 == unsupported)"
+ try:
+ self.pri_dut.droid.bluetoothHidSetProtocolMode(self.mac_addr, int(line))
+ except Exception as err:
+ self.log.info(FAILURE.format(cmd, err))
+
"""End HID wrappers"""
"""Begin carkit test wrappers"""
diff --git a/acts/tests/google/net/ApfCountersTest.py b/acts/tests/google/net/ApfCountersTest.py
index 3f637f3..abee138 100755
--- a/acts/tests/google/net/ApfCountersTest.py
+++ b/acts/tests/google/net/ApfCountersTest.py
@@ -13,11 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import os
-
-import acts.base_test
-import acts.test_utils.wifi.wifi_test_utils as wutils
-
from acts import asserts
from acts.test_decorators import test_tracker_info
from acts.test_utils.wifi.WifiBaseTest import WifiBaseTest
@@ -25,6 +20,13 @@
from acts.test_utils.tel.tel_test_utils import stop_adb_tcpdump
from acts.test_utils.wifi import wifi_test_utils as wutils
+import acts.base_test
+import acts.test_utils.wifi.wifi_test_utils as wutils
+
+import copy
+import os
+import random
+import time
WifiEnums = wutils.WifiEnums
@@ -41,6 +43,8 @@
def __init__(self, controllers):
WifiBaseTest.__init__(self, controllers)
+ self.tests = ("test_IPv6_RA_packets",
+ "test_IPv6_RA_with_RTT", )
def setup_class(self):
self.dut = self.android_devices[0]
@@ -121,9 +125,12 @@
"""
# get mac address of the dut
ap = self.access_points[0]
- wutils.connect_to_wifi_network(self.dut, self.wpapsk_5g)
+ wifi_network = copy.deepcopy(self.wpapsk_5g)
+ wifi_network['meteredOverride'] = 1
+ wutils.connect_to_wifi_network(self.dut, wifi_network)
mac_addr = self.dut.droid.wifiGetConnectionInfo()['mac_address']
self.log.info("mac_addr %s" % mac_addr)
+ time.sleep(30) # wait 30 sec before sending RAs
# get the current ra count
ra_count = self._get_icmp6intype134()
@@ -158,3 +165,38 @@
asserts.assert_true(ra_count_latest == ra_count + 1,
"Device did not accept new RA after 1/6th time "
"interval. Device dropped a valid RA in sequence.")
+
+ @test_tracker_info(uuid="d2a0aff0-048c-475f-9bba-d90d8d9ebae3")
+ def test_IPv6_RA_with_RTT(self):
+ """Test if the device filters IPv6 RA packets with different re-trans time
+
+ Steps:
+ 1. Get the current RA count
+ 2. Send 400 packets with different re-trans time
+ 3. Verify that RA count increased by 400
+ 4. Verify internet connectivity
+ """
+ pkt_num = 400
+
+ # get mac address of the dut
+ ap = self.access_points[0]
+ wutils.connect_to_wifi_network(self.dut, self.wpapsk_5g)
+ mac_addr = self.dut.droid.wifiGetConnectionInfo()['mac_address']
+ self.log.info("mac_addr %s" % mac_addr)
+ time.sleep(30) # wait 30 sec before sending RAs
+
+ # get the current ra count
+ ra_count = self._get_icmp6intype134()
+
+ # send RA with differnt re-trans time
+ for _ in range(pkt_num):
+ rtt=random.randint(10, 10000)
+ ap.send_ra('wlan1', mac_addr, 0, 1, rtt=rtt)
+
+ # get the new RA count
+ new_ra_count = self._get_icmp6intype134()
+ asserts.assert_true(new_ra_count == ra_count + pkt_num,
+ "Device did not accept all RAs")
+
+ # verify if internet connectivity works after sending RA packets
+ wutils.validate_connection(self.dut)
diff --git a/acts/tests/google/net/DhcpServerTest.py b/acts/tests/google/net/DhcpServerTest.py
index 4cc2b91..0072958 100644
--- a/acts/tests/google/net/DhcpServerTest.py
+++ b/acts/tests/google/net/DhcpServerTest.py
@@ -6,12 +6,15 @@
from scapy.all import *
from threading import Event
from threading import Thread
+import random
import time
import warnings
+CLIENT_PORT = 68
SERVER_PORT = 67
BROADCAST_MAC = 'ff:ff:ff:ff:ff:ff'
+INET4_ANY = '0.0.0.0'
NETADDR_PREFIX = '192.168.42.'
OTHER_NETADDR_PREFIX = '192.168.43.'
NETADDR_BROADCAST = '255.255.255.255'
@@ -23,6 +26,7 @@
ACK = 5
NAK = 6
+
pmc_base_cmd = (
"am broadcast -a com.android.pmc.action.AUTOPOWER --es PowerAction ")
start_pmc_cmd = (
@@ -41,7 +45,7 @@
conf.checkIPaddr = 0
conf.checkIPsrc = 0
# Allow using non-67 server ports as long as client uses 68
- bind_layers(UDP, BOOTP, dport=68)
+ bind_layers(UDP, BOOTP, dport=CLIENT_PORT)
self.dut.adb.shell(start_pmc_cmd)
self.dut.adb.shell("setprop log.tag.PMC VERBOSE")
@@ -54,12 +58,13 @@
thread = Thread(target=self._sniff_arp, args=(self.stop_arp,))
thread.start()
- # Discover server IP
+ # Discover server IP and device hwaddr
hwaddr = self._next_hwaddr()
- resp = self._get_response(make_discover(hwaddr))
+ resp = self._get_response(self._make_discover(hwaddr))
asserts.assert_false(None == resp,
"Device did not reply to first DHCP discover")
self.server_addr = getopt(resp, 'server_id')
+ self.dut_hwaddr = resp.getlayer(Ether).src
asserts.assert_false(None == self.server_addr,
"DHCP server did not specify server identifier")
# Ensure that we don't depend on assigned route/gateway on the host
@@ -94,6 +99,7 @@
self.log.info("Starting USB Tethering")
dut.stop_services()
dut.adb.shell(pmc_start_usb_tethering_cmd)
+ self._wait_for_device(self.dut)
self.USB_TETHERED = True
def _stop_usb_tethering(self, dut):
@@ -151,88 +157,301 @@
@test_tracker_info(uuid="a8712769-977a-4ee1-902f-90b3ba30b40c")
def test_config_assumptions(self):
- resp = self._get_response(make_discover(self.hwaddr))
+ resp = self._get_response(self._make_discover(self.hwaddr))
asserts.assert_false(None == resp, "Device did not reply to discover")
asserts.assert_true(get_yiaddr(resp).startswith(NETADDR_PREFIX),
"Server does not use expected prefix")
+ @test_tracker_info(uuid="e3761689-7d64-46b1-97ce-15f315eaf568")
+ def test_discover_broadcastbit(self):
+ resp = self._get_response(
+ self._make_discover(self.hwaddr, bcastbit=True))
+ self._assert_offer(resp)
+ self._assert_broadcast(resp)
+
+ @test_tracker_info(uuid="30a7ea7c-c20f-4c46-aaf2-96f19d8f8191")
+ def test_discover_bootpfields(self):
+ discover = self._make_discover(self.hwaddr)
+ resp = self._get_response(discover)
+ self._assert_offer(resp)
+ self._assert_unicast(resp)
+ bootp = assert_bootp_response(resp, discover)
+ asserts.assert_equal(INET4_ANY, bootp.ciaddr)
+ asserts.assert_equal(self.server_addr, bootp.siaddr)
+ asserts.assert_equal(INET4_ANY, bootp.giaddr)
+ asserts.assert_equal(self.hwaddr, get_chaddr(bootp))
+
+ @test_tracker_info(uuid="593f4051-516d-44fa-8834-7d485362f182")
+ def test_discover_relayed_broadcastbit(self):
+ giaddr = NETADDR_PREFIX + '123'
+ resp = self._get_response(
+ self._make_discover(self.hwaddr, giaddr=giaddr, bcastbit=True))
+ self._assert_offer(resp)
+ self._assert_relayed(resp, giaddr)
+ self._assert_broadcastbit(resp)
+
+ def _run_discover_paramrequestlist(self, params, unwanted_params):
+ params_opt = make_paramrequestlist_opt(params)
+ resp = self._get_response(
+ self._make_discover(self.hwaddr, options=[params_opt]))
+
+ self._assert_offer(resp)
+ # List of requested params in response order
+ resp_opts = get_opt_labels(resp)
+ resp_requested_opts = [opt for opt in resp_opts if opt in params]
+ # All above params should be supported, order should be conserved
+ asserts.assert_equal(params, resp_requested_opts)
+ asserts.assert_equal(0, len(set(resp_opts) & set(unwanted_params)))
+ return resp
+
+ @test_tracker_info(uuid="00a8a3f6-f143-47ff-a79b-482c607fb5b8")
+ def test_discover_paramrequestlist(self):
+ resp = self._run_discover_paramrequestlist(
+ ['subnet_mask', 'broadcast_address', 'router', 'name_server'],
+ unwanted_params=[])
+ for opt in ['broadcast_address', 'router', 'name_server']:
+ asserts.assert_true(getopt(resp, opt).startswith(NETADDR_PREFIX),
+ opt + ' does not start with ' + NETADDR_PREFIX)
+
+ subnet_mask = getopt(resp, 'subnet_mask')
+ asserts.assert_true(subnet_mask.startswith('255.255.'),
+ 'Unexpected subnet mask for /16+: ' + subnet_mask)
+
+ @test_tracker_info(uuid="d1aad4a3-9eab-4900-aa6a-5b82a4a64f46")
+ def test_discover_paramrequestlist_rev(self):
+ # RFC2132 #9.8: "The DHCP server is not required to return the options
+ # in the requested order, but MUST try to insert the requested options
+ # in the order requested"
+ asserts.skip('legacy behavior not compliant: fixed order used')
+ self._run_discover_paramrequestlist(
+ ['name_server', 'router', 'broadcast_address', 'subnet_mask'],
+ unwanted_params=[])
+
+ @test_tracker_info(uuid="e3ae6335-8cc7-4bf1-bb58-67646b727f2b")
+ def test_discover_paramrequestlist_unwanted(self):
+ asserts.skip('legacy behavior always sends all parameters')
+ self._run_discover_paramrequestlist(['router', 'name_server'],
+ unwanted_params=['broadcast_address', 'subnet_mask'])
+
+ def _assert_renews(self, request, addr, exp_time, resp_type=ACK):
+ # Sleep to test lease time renewal
+ time.sleep(3)
+ resp = self._get_response(request)
+ self._assert_type(resp, resp_type)
+ asserts.assert_equal(addr, get_yiaddr(resp))
+ remaining_lease = getopt(resp, 'lease_time')
+ # Lease renewed: waited for 3s, lease time not decreased by more than 2
+ asserts.assert_true(remaining_lease >= exp_time - 2,
+ 'Lease not renewed')
+ # Lease times should be consistent across offers/renewals
+ asserts.assert_true(remaining_lease <= exp_time + 2,
+ 'Lease time inconsistent')
+ return resp
+
@test_tracker_info(uuid="d6b598b7-f443-4b5a-ba80-4af5d211cade")
def test_discover_assigned_ownaddress(self):
addr, siaddr, resp = self._request_address(self.hwaddr)
lease_time = getopt(resp, 'lease_time')
server_id = getopt(resp, 'server_id')
- asserts.assert_true(lease_time > 10, "Lease time is unreasonably short")
- asserts.assert_false(addr == '0.0.0.0', "Assigned address is empty")
+ asserts.assert_true(lease_time >= 60, "Lease time is too short")
+ asserts.assert_false(addr == INET4_ANY, "Assigned address is empty")
# Wait to test lease expiration time change
- time.sleep(2)
+ time.sleep(3)
# New discover, same address
- resp = self._get_response(make_discover(self.hwaddr))
- # Lease time renewed: exptime not decreased
- asserts.assert_equal(lease_time, getopt(resp, 'lease_time'))
- asserts.assert_equal(addr, get_yiaddr(resp))
+ resp = self._assert_renews(self._make_discover(self.hwaddr),
+ addr, lease_time, resp_type=OFFER)
+ self._assert_unicast(resp, get_yiaddr(resp))
+ self._assert_broadcastbit(resp, isset=False)
@test_tracker_info(uuid="cbb07d77-912b-4269-bbbc-adba99779587")
def test_discover_assigned_otherhost(self):
addr, siaddr, _ = self._request_address(self.hwaddr)
# New discover, same address, different client
- resp = self._get_response(make_discover(self.other_hwaddr,
+ resp = self._get_response(self._make_discover(self.other_hwaddr,
[('requested_addr', addr)]))
self._assert_offer(resp)
asserts.assert_false(get_yiaddr(resp) == addr,
"Already assigned address offered")
+ self._assert_unicast(resp, get_yiaddr(resp))
+ self._assert_broadcastbit(resp, isset=False)
@test_tracker_info(uuid="3d2b3d2f-eb5f-498f-b887-3b4638cebf14")
def test_discover_requestaddress(self):
addr = NETADDR_PREFIX + '200'
- resp = self._get_response(make_discover(self.hwaddr,
+ resp = self._get_response(self._make_discover(self.hwaddr,
[('requested_addr', addr)]))
self._assert_offer(resp)
asserts.assert_equal(get_yiaddr(resp), addr)
# Lease not committed: can request again
- resp = self._get_response(make_discover(self.other_hwaddr,
+ resp = self._get_response(self._make_discover(self.other_hwaddr,
[('requested_addr', addr)]))
self._assert_offer(resp)
asserts.assert_equal(get_yiaddr(resp), addr)
- def _assert_renews(self, request, addr, expTime):
- time.sleep(2)
- resp = self._get_response(request)
- self._assert_ack(resp)
- asserts.assert_equal(addr, get_yiaddr(resp))
- # Lease time renewed
- asserts.assert_equal(expTime, getopt(resp, 'lease_time'))
+ @test_tracker_info(uuid="5ffd9d25-304e-434b-bedb-56ccf27dcebd")
+ def test_discover_requestaddress_wrongsubnet(self):
+ addr = OTHER_NETADDR_PREFIX + '200'
+ resp = self._get_response(
+ self._make_discover(self.hwaddr, [('requested_addr', addr)]))
+ self._assert_offer(resp)
+ self._assert_unicast(resp)
+ asserts.assert_false(get_yiaddr(resp) == addr,
+ 'Server offered invalid address')
- @test_tracker_info(uuid="ce42ba57-07be-427b-9cbd-5535c62b0120")
- def test_request_wrongnet(self):
- resp = self._get_response(make_request(self.hwaddr,
- OTHER_NETADDR_PREFIX + '1', None))
- self._assert_nak(resp)
+ @test_tracker_info(uuid="f7d6a92f-9386-4b65-b6c1-d0a3f11213bf")
+ def test_discover_giaddr_outside_subnet(self):
+ giaddr = OTHER_NETADDR_PREFIX + '201'
+ resp = self._get_response(
+ self._make_discover(self.hwaddr, giaddr=giaddr))
+ asserts.assert_equal(resp, None)
+
+ @test_tracker_info(uuid="1348c79a-9203-4bb8-b33b-af80bacd17b1")
+ def test_discover_srcaddr_outside_subnet(self):
+ srcaddr = OTHER_NETADDR_PREFIX + '200'
+ resp = self._get_response(
+ self._make_discover(self.hwaddr, ip_src=srcaddr))
+ self._assert_offer(resp)
+ asserts.assert_false(srcaddr == get_yiaddr(resp),
+ 'Server offered invalid address')
+
+ @test_tracker_info(uuid="a03bb783-8665-4c66-9c0c-1bb02ddca07e")
+ def test_discover_requestaddress_giaddr_outside_subnet(self):
+ addr = NETADDR_PREFIX + '200'
+ giaddr = OTHER_NETADDR_PREFIX + '201'
+ req = self._make_discover(self.hwaddr, [('requested_addr', addr)],
+ ip_src=giaddr, giaddr=giaddr)
+ resp = self._get_response(req)
+ asserts.assert_equal(resp, None)
+
+ @test_tracker_info(uuid="725956af-71e2-45d8-b8b3-402d21bfc7db")
+ def test_discover_knownaddress_giaddr_outside_subnet(self):
+ addr, siaddr, _ = self._request_address(self.hwaddr)
+
+ # New discover, same client, through relay in invalid subnet
+ giaddr = OTHER_NETADDR_PREFIX + '200'
+ resp = self._get_response(
+ self._make_discover(self.hwaddr, giaddr=giaddr))
+ asserts.assert_equal(resp, None)
+
+ @test_tracker_info(uuid="2ee9d5b1-c15d-40c4-98e9-63202d1f1557")
+ def test_discover_knownaddress_giaddr_valid_subnet(self):
+ addr, siaddr, _ = self._request_address(self.hwaddr)
+
+ # New discover, same client, through relay in valid subnet
+ giaddr = NETADDR_PREFIX + '200'
+ resp = self._get_response(
+ self._make_discover(self.hwaddr, giaddr=giaddr))
+ self._assert_offer(resp)
+ self._assert_unicast(resp, giaddr)
+ self._assert_broadcastbit(resp, isset=False)
+
+ @test_tracker_info(uuid="f43105a5-633a-417a-8a07-39bc36c493e7")
+ def test_request_unicast(self):
+ addr, siaddr, resp = self._request_address(self.hwaddr, bcast=False)
+ self._assert_unicast(resp, addr)
+
+ @test_tracker_info(uuid="09f3c1c4-1202-4f85-a965-4d86aee069f3")
+ def test_request_bootpfields(self):
+ req_addr = NETADDR_PREFIX + '200'
+ req = self._make_request(self.hwaddr, req_addr, self.server_addr)
+ resp = self._get_response(req)
+ self._assert_ack(resp)
+ bootp = assert_bootp_response(resp, req)
+ asserts.assert_equal(INET4_ANY, bootp.ciaddr)
+ asserts.assert_equal(self.server_addr, bootp.siaddr)
+ asserts.assert_equal(INET4_ANY, bootp.giaddr)
+ asserts.assert_equal(self.hwaddr, get_chaddr(bootp))
@test_tracker_info(uuid="ec00d268-80cb-4be5-9771-2292cc7d2e18")
- def test_request_inuse(self):
+ def test_request_selecting_inuse(self):
addr, siaddr, _ = self._request_address(self.hwaddr)
- res = self._get_response(make_request(self.other_hwaddr, addr, None))
- self._assert_nak(res)
+ new_req = self._make_request(self.other_hwaddr, addr, siaddr)
+ resp = self._get_response(new_req)
+ self._assert_nak(resp)
+ self._assert_broadcast(resp)
+ bootp = assert_bootp_response(resp, new_req)
+ asserts.assert_equal(INET4_ANY, bootp.ciaddr)
+ asserts.assert_equal(INET4_ANY, bootp.yiaddr)
+ asserts.assert_equal(INET4_ANY, bootp.siaddr)
+ asserts.assert_equal(INET4_ANY, bootp.giaddr)
+ asserts.assert_equal(self.other_hwaddr, get_chaddr(bootp))
+ asserts.assert_equal(
+ ['message-type', 'server_id', 56, 'end'], # 56 is "message" opt
+ get_opt_labels(bootp))
+ asserts.assert_equal(self.server_addr, getopt(bootp, 'server_id'))
- @test_tracker_info(uuid="263c91b9-cfe9-4f21-985d-b7046df80528")
- def test_request_initreboot(self):
+ @test_tracker_info(uuid="0643c179-3542-4297-9b06-8d86ff785e9c")
+ def test_request_selecting_wrongsiaddr(self):
+ addr = NETADDR_PREFIX + '200'
+ wrong_siaddr = NETADDR_PREFIX + '201'
+ asserts.assert_false(wrong_siaddr == self.server_addr,
+ 'Test assumption not met: server addr is ' + wrong_siaddr)
+ resp = self._get_response(
+ self._make_request(self.hwaddr, addr, siaddr=wrong_siaddr))
+ asserts.assert_true(resp == None,
+ 'Received response for request with incorrect siaddr')
+
+ @test_tracker_info(uuid="676beab2-4af8-4bf0-a4ad-c7626ae5987f")
+ def test_request_selecting_giaddr_outside_subnet(self):
+ addr = NETADDR_PREFIX + '200'
+ giaddr = OTHER_NETADDR_PREFIX + '201'
+ resp = self._get_response(
+ self._make_request(self.hwaddr, addr, siaddr=self.server_addr,
+ giaddr=giaddr))
+ asserts.assert_equal(resp, None)
+
+ @test_tracker_info(uuid="fe17df0c-2f41-416f-bb76-d75b74b63c0f")
+ def test_request_selecting_hostnameupdate(self):
+ addr = NETADDR_PREFIX + '123'
+ hostname1 = b'testhostname1'
+ hostname2 = b'testhostname2'
+ req = self._make_request(self.hwaddr, None, None,
+ options=[
+ ('requested_addr', addr),
+ ('server_id', self.server_addr),
+ ('hostname', hostname1)])
+ resp = self._get_response(req)
+ self._assert_ack(resp)
+ self._assert_unicast(resp, addr)
+ asserts.assert_equal(hostname1, getopt(req, 'hostname'))
+
+ # Re-request with different hostname
+ setopt(req, 'hostname', hostname2)
+ resp = self._get_response(req)
+ self._assert_ack(resp)
+ self._assert_unicast(resp, addr)
+ asserts.assert_equal(hostname2, getopt(req, 'hostname'))
+
+ def _run_initreboot(self, bcastbit):
addr, siaddr, resp = self._request_address(self.hwaddr)
exp = getopt(resp, 'lease_time')
- # siaddr NONE: init-reboot client state
- self._assert_renews(make_request(self.hwaddr, addr, None), addr, exp)
+ # init-reboot: siaddr is None
+ return self._assert_renews(self._make_request(
+ self.hwaddr, addr, siaddr=None, bcastbit=bcastbit), addr, exp)
+
+ @test_tracker_info(uuid="263c91b9-cfe9-4f21-985d-b7046df80528")
+ def test_request_initreboot(self):
+ resp = self._run_initreboot(bcastbit=False)
+ self._assert_unicast(resp)
+ self._assert_broadcastbit(resp, isset=False)
+
+ @test_tracker_info(uuid="f05dd60f-03dd-4e2b-8e58-80f4d752ad51")
+ def test_request_initreboot_broadcastbit(self):
+ resp = self._run_initreboot(bcastbit=True)
+ self._assert_broadcast(resp)
@test_tracker_info(uuid="5563c616-2136-47f6-9151-4e28cbfe797c")
def test_request_initreboot_nolease(self):
# RFC2131 #4.3.2
- asserts.skip("dnsmasq not compliant if --dhcp-authoritative set.")
+ asserts.skip("legacy behavior not compliant")
addr = NETADDR_PREFIX + '123'
- resp = self._get_response(make_request(self.hwaddr, addr, None))
+ resp = self._get_response(self._make_request(self.hwaddr, addr, None))
asserts.assert_equal(resp, None)
@test_tracker_info(uuid="da5c5537-cb38-4a2e-828f-44bc97976fe5")
@@ -242,24 +461,51 @@
asserts.assert_false(addr == otheraddr,
"Test assumption not met: server assigned " + otheraddr)
- resp = self._get_response(make_request(self.hwaddr, otheraddr, None))
+ resp = self._get_response(
+ self._make_request(self.hwaddr, otheraddr, siaddr=None))
self._assert_nak(resp)
+ self._assert_broadcast(resp)
- @test_tracker_info(uuid="68bfcb25-5873-41ad-ad0a-bf22781534ca")
- def test_request_rebinding(self):
+ @test_tracker_info(uuid="ce42ba57-07be-427b-9cbd-5535c62b0120")
+ def test_request_initreboot_wrongnet(self):
+ resp = self._get_response(self._make_request(self.hwaddr,
+ OTHER_NETADDR_PREFIX + '1', siaddr=None))
+ self._assert_nak(resp)
+ self._assert_broadcast(resp)
+
+ def _run_rebinding(self, bcastbit, giaddr=INET4_ANY):
addr, siaddr, resp = self._request_address(self.hwaddr)
exp = getopt(resp, 'lease_time')
- self._assert_renews(make_request(self.hwaddr, None, None, ciaddr=addr),
+ # Rebinding: no siaddr or reqaddr
+ resp = self._assert_renews(
+ self._make_request(self.hwaddr, reqaddr=None, siaddr=None,
+ ciaddr=addr, giaddr=giaddr, ip_src=addr,
+ ip_dst=NETADDR_BROADCAST, bcastbit=bcastbit),
addr, exp)
+ return resp, addr
+
+ @test_tracker_info(uuid="68bfcb25-5873-41ad-ad0a-bf22781534ca")
+ def test_request_rebinding(self):
+ resp, addr = self._run_rebinding(bcastbit=False)
+ self._assert_unicast(resp, addr)
+ self._assert_broadcastbit(resp, isset=False)
+
+ @test_tracker_info(uuid="4c591536-8062-40ec-ae12-1ebe7dcad8e2")
+ def test_request_rebinding_relayed(self):
+ giaddr = NETADDR_PREFIX + '123'
+ resp, _ = self._run_rebinding(bcastbit=False, giaddr=giaddr)
+ self._assert_relayed(resp, giaddr)
+ self._assert_broadcastbit(resp, isset=False)
@test_tracker_info(uuid="cee2668b-bd79-47d7-b358-8f9387d715b1")
def test_request_rebinding_inuse(self):
addr, siaddr, _ = self._request_address(self.hwaddr)
- resp = self._get_response(make_request(self.other_hwaddr, None, None,
- ciaddr=addr))
+ resp = self._get_response(self._make_request(
+ self.other_hwaddr, reqaddr=None, siaddr=None, ciaddr=addr))
self._assert_nak(resp)
+ self._assert_broadcast(resp)
@test_tracker_info(uuid="d95d69b5-ab9a-42f5-8dd0-b9b6a6d960cc")
def test_request_rebinding_wrongaddr(self):
@@ -268,8 +514,8 @@
asserts.assert_false(addr == otheraddr,
"Test assumption not met: server assigned " + otheraddr)
- resp = self._get_response(make_request(self.hwaddr, None, None,
- ciaddr=otheraddr))
+ resp = self._get_response(self._make_request(
+ self.hwaddr, reqaddr=None, siaddr=siaddr, ciaddr=otheraddr))
self._assert_nak(resp)
self._assert_broadcast(resp)
@@ -283,58 +529,360 @@
asserts.assert_false(addr == relayaddr,
"Test assumption not met: server assigned " + relayaddr)
- req = make_request(self.hwaddr, None, None, ciaddr=otheraddr)
- req.getlayer(BOOTP).giaddr = relayaddr
+ req = self._make_request(self.hwaddr, reqaddr=None, siaddr=None,
+ ciaddr=otheraddr, giaddr=relayaddr)
resp = self._get_response(req)
self._assert_nak(resp)
- self._assert_unicast(resp, relayaddr)
+ self._assert_relayed(resp, relayaddr)
+ self._assert_broadcastbit(resp)
@test_tracker_info(uuid="6ff1fab4-009a-4758-9153-0d9db63423da")
def test_release(self):
addr, siaddr, _ = self._request_address(self.hwaddr)
# Re-requesting fails
- resp = self._get_response(make_request(self.other_hwaddr, addr, siaddr))
+ resp = self._get_response(
+ self._make_request(self.other_hwaddr, addr, siaddr))
self._assert_nak(resp)
+ self._assert_broadcast(resp)
# Succeeds after release
- self._send(make_release(self.hwaddr, addr, siaddr))
- time.sleep(1)
- resp = self._get_response(make_request(self.other_hwaddr, addr, siaddr))
+ self._send(self._make_release(self.hwaddr, addr, siaddr))
+ resp = self._get_response(
+ self._make_request(self.other_hwaddr, addr, siaddr))
self._assert_ack(resp)
@test_tracker_info(uuid="abb1a53e-6b6c-468f-88b9-ace9ca4d6593")
def test_release_noserverid(self):
addr, siaddr, _ = self._request_address(self.hwaddr)
- # Release without server_id opt ignored
- release = make_release(self.hwaddr, addr, siaddr)
+ # Release without server_id opt is ignored
+ release = self._make_release(self.hwaddr, addr, siaddr)
removeopt(release, 'server_id')
self._send(release)
# Not released: request fails
- resp = self._get_response(make_request(self.other_hwaddr, addr, siaddr))
+ resp = self._get_response(
+ self._make_request(self.other_hwaddr, addr, siaddr))
self._assert_nak(resp)
+ self._assert_broadcast(resp)
@test_tracker_info(uuid="8415b69e-ae61-4474-8495-d783ba6818d1")
def test_release_wrongserverid(self):
addr, siaddr, _ = self._request_address(self.hwaddr)
# Release with wrong server id
- release = make_release(self.hwaddr, addr, siaddr)
+ release = self._make_release(self.hwaddr, addr, siaddr)
setopt(release, 'server_id', addr)
self._send(release)
# Not released: request fails
- resp = self._get_response(make_request(self.other_hwaddr, addr, siaddr))
+ resp = self._get_response(
+ self._make_request(self.other_hwaddr, addr, siaddr))
self._assert_nak(resp)
+ self._assert_broadcast(resp)
- def _request_address(self, hwaddr):
- resp = self._get_response(make_discover(self.hwaddr))
+ @test_tracker_info(uuid="0858f678-3db2-4c12-a21b-6e16c5d7e7ce")
+ def test_unicast_l2l3(self):
+ reqAddr = NETADDR_PREFIX + '124'
+ resp = self._get_response(self._make_request(
+ self.hwaddr, reqAddr, siaddr=None))
+ self._assert_unicast(resp)
+ str_hwaddr = format_hwaddr(self.hwaddr)
+ asserts.assert_equal(str_hwaddr, resp.getlayer(Ether).dst)
+ asserts.assert_equal(reqAddr, resp.getlayer(IP).dst)
+ asserts.assert_equal(CLIENT_PORT, resp.getlayer(UDP).dport)
+
+ @test_tracker_info(uuid="bf05efe9-ee5b-46ba-9b3c-5a4441c13798")
+ def test_macos_10_13_3_discover(self):
+ params_opt = make_paramrequestlist_opt([
+ 'subnet_mask',
+ 121, # Classless Static Route
+ 'router',
+ 'name_server',
+ 'domain',
+ 119, # Domain Search
+ 252, # Private/Proxy autodiscovery
+ 95, # LDAP
+ 'NetBIOS_server',
+ 46, # NetBIOS over TCP/IP Node Type
+ ])
+ req = self._make_discover(self.hwaddr,
+ options=[
+ params_opt,
+ ('max_dhcp_size', 1500),
+ # HW type Ethernet (0x01)
+ ('client_id', b'\x01' + self.hwaddr),
+ ('lease_time', 7776000),
+ ('hostname', b'test12-macbookpro'),
+ ], opts_padding=bytes(6))
+ req.getlayer(BOOTP).secs = 2
+ resp = self._get_response(req)
+ self._assert_standard_offer(resp)
+
+ def _make_macos_10_13_3_paramrequestlist(self):
+ return make_paramrequestlist_opt([
+ 'subnet_mask',
+ 121, # Classless Static Route
+ 'router',
+ 'name_server',
+ 'domain',
+ 119, # Domain Search
+ 252, # Private/Proxy autodiscovery
+ 95, # LDAP
+ 44, # NetBIOS over TCP/IP Name Server
+ 46, # NetBIOS over TCP/IP Node Type
+ ])
+
+ @test_tracker_info(uuid="bf05efe9-ee5b-46ba-9b3c-5a4441c13798")
+ def test_macos_10_13_3_discover(self):
+ req = self._make_discover(self.hwaddr,
+ options=[
+ self._make_macos_10_13_3_paramrequestlist(),
+ ('max_dhcp_size', 1500),
+ # HW type Ethernet (0x01)
+ ('client_id', b'\x01' + self.hwaddr),
+ ('lease_time', 7776000),
+ ('hostname', b'test12-macbookpro'),
+ ], opts_padding=bytes(6))
+ req.getlayer(BOOTP).secs = 2
+ resp = self._get_response(req)
+ self._assert_offer(resp)
+ self._assert_standard_offer_or_ack(resp)
+
+ @test_tracker_info(uuid="7acc796b-c4f1-46cc-8ffb-0a0efb05ae86")
+ def test_macos_10_13_3_request_selecting(self):
+ req = self._make_request(self.hwaddr, None, None,
+ options=[
+ self._make_macos_10_13_3_paramrequestlist(),
+ ('max_dhcp_size', 1500),
+ # HW type Ethernet (0x01)
+ ('client_id', b'\x01' + self.hwaddr),
+ ('requested_addr', NETADDR_PREFIX + '109'),
+ ('server_id', self.server_addr),
+ ('hostname', b'test12-macbookpro'),
+ ])
+ req.getlayer(BOOTP).secs = 5
+ resp = self._get_response(req)
+ self._assert_ack(resp)
+ self._assert_standard_offer_or_ack(resp)
+
+ # Note: macOS does not seem to do any rebinding (straight to discover)
+ @test_tracker_info(uuid="e8f0b60c-9ea3-4184-8426-151a395bff5b")
+ def test_macos_10_13_3_request_renewing(self):
+ req_ip = NETADDR_PREFIX + '109'
+ req = self._make_request(self.hwaddr, None, None,
+ ciaddr=req_ip, ip_src=req_ip, ip_dst=self.server_addr, options=[
+ self._make_macos_10_13_3_paramrequestlist(),
+ ('max_dhcp_size', 1500),
+ # HW type Ethernet (0x01)
+ ('client_id', b'\x01' + self.hwaddr),
+ ('lease_time', 7776000),
+ ('hostname', b'test12-macbookpro'),
+ ], opts_padding=bytes(6))
+ resp = self._get_response(req)
+ self._assert_ack(resp)
+ self._assert_standard_offer_or_ack(resp, renewing=True)
+
+ def _make_win10_paramrequestlist(self):
+ return make_paramrequestlist_opt([
+ 'subnet_mask',
+ 'router',
+ 'name_server',
+ 'domain',
+ 31, # Perform Router Discover
+ 33, # Static Route
+ 'vendor_specific',
+ 44, # NetBIOS over TCP/IP Name Server
+ 46, # NetBIOS over TCP/IP Node Type
+ 47, # NetBIOS over TCP/IP Scope
+ 121, # Classless Static Route
+ 249, # Private/Classless Static Route (MS)
+ 252, # Private/Proxy autodiscovery
+ ])
+
+ @test_tracker_info(uuid="11b3db9c-4cd7-4088-99dc-881f25ce4e76")
+ def test_win10_discover(self):
+ req = self._make_discover(self.hwaddr, bcastbit=True,
+ options=[
+ # HW type Ethernet (0x01)
+ ('client_id', b'\x01' + self.hwaddr),
+ ('hostname', b'test120-w'),
+ ('vendor_class_id', b'MSFT 5.0'),
+ self._make_win10_paramrequestlist(),
+ ], opts_padding=bytes(11))
+ req.getlayer(BOOTP).secs = 2
+ resp = self._get_response(req)
+ self._assert_offer(resp)
+ self._assert_standard_offer_or_ack(resp, bcast=True)
+
+ @test_tracker_info(uuid="4fe04e7f-c643-4a19-b15c-cf417b2c9410")
+ def test_win10_request_selecting(self):
+ req = self._make_request(self.hwaddr, None, None, bcastbit=True,
+ options=[
+ ('max_dhcp_size', 1500),
+ # HW type Ethernet (0x01)
+ ('client_id', b'\x01' + self.hwaddr),
+ ('requested_addr', NETADDR_PREFIX + '109'),
+ ('server_id', self.server_addr),
+ ('hostname', b'test120-w'),
+ # Client Fully Qualified Domain Name
+ (81, b'\x00\x00\x00test120-w.ad.tst.example.com'),
+ ('vendor_class_id', b'MSFT 5.0'),
+ self._make_win10_paramrequestlist(),
+ ])
+ resp = self._get_response(req)
+ self._assert_ack(resp)
+ self._assert_standard_offer_or_ack(resp, bcast=True)
+
+ def _run_win10_request_renewing(self, bcast):
+ req_ip = NETADDR_PREFIX + '109'
+ req = self._make_request(self.hwaddr, None, None, bcastbit=bcast,
+ ciaddr=req_ip, ip_src=req_ip,
+ ip_dst=NETADDR_BROADCAST if bcast else self.server_addr,
+ options=[
+ ('max_dhcp_size', 1500),
+ # HW type Ethernet (0x01)
+ ('client_id', b'\x01' + self.hwaddr),
+ ('hostname', b'test120-w'),
+ # Client Fully Qualified Domain Name
+ (81, b'\x00\x00\x00test120-w.ad.tst.example.com'),
+ ('vendor_class_id', b'MSFT 5.0'),
+ self._make_win10_paramrequestlist(),
+ ])
+ resp = self._get_response(req)
+ self._assert_ack(resp)
+ self._assert_standard_offer_or_ack(resp, renewing=True, bcast=bcast)
+
+ @test_tracker_info(uuid="1b23c9c7-cc94-42d0-83a6-f1b2bc125fb9")
+ def test_win10_request_renewing(self):
+ self._run_win10_request_renewing(bcast=False)
+
+ @test_tracker_info(uuid="c846bd14-71fb-4492-a4d3-0aa5c2c35751")
+ def test_win10_request_rebinding(self):
+ self._run_win10_request_renewing(bcast=True)
+
+ def _make_debian_paramrequestlist(self):
+ return make_paramrequestlist_opt([
+ 'subnet_mask',
+ 'broadcast_address',
+ 'router',
+ 'name_server',
+ 119, # Domain Search
+ 'hostname',
+ 101, # TCode
+ 'domain', # NetBIOS over TCP/IP Name Server
+ 'vendor_specific', # NetBIOS over TCP/IP Node Type
+ 121, # Classless Static Route
+ 249, # Private/Classless Static Route (MS)
+ 33, # Static Route
+ 252, # Private/Proxy autodiscovery
+ 'NTP_server',
+ ])
+
+ @test_tracker_info(uuid="b0bb6ae7-07e6-4ecb-9a2f-db9c8146a3d5")
+ def test_debian_dhclient_4_3_5_discover(self):
+ req_ip = NETADDR_PREFIX + '109'
+ req = self._make_discover(self.hwaddr,
+ options=[
+ ('requested_addr', req_ip),
+ ('hostname', b'test12'),
+ self._make_debian_paramrequestlist(),
+ ], opts_padding=bytes(26))
+ resp = self._get_response(req)
+ self._assert_offer(resp)
+ # Don't test for hostname option: previous implementation would not
+ # set it in offer, which was not consistent with ack
+ self._assert_standard_offer_or_ack(resp, ignore_hostname=True)
+ asserts.assert_equal(req_ip, get_yiaddr(resp))
+
+ @test_tracker_info(uuid="d70bc043-84cb-4735-9123-c46c6d1ce5ac")
+ def test_debian_dhclient_4_3_5_request_selecting(self):
+ req = self._make_request(self.hwaddr, None, None,
+ options=[
+ ('server_id', self.server_addr),
+ ('requested_addr', NETADDR_PREFIX + '109'),
+ ('hostname', b'test12'),
+ self._make_debian_paramrequestlist(),
+ ], opts_padding=bytes(20))
+ resp = self._get_response(req)
+ self._assert_ack(resp)
+ self._assert_standard_offer_or_ack(resp, with_hostname=True)
+
+ def _run_debian_renewing(self, bcast):
+ req_ip = NETADDR_PREFIX + '109'
+ req = self._make_request(self.hwaddr, None, None,
+ ciaddr=req_ip, ip_src=req_ip,
+ ip_dst=NETADDR_BROADCAST if bcast else self.server_addr,
+ options=[
+ ('hostname', b'test12'),
+ self._make_debian_paramrequestlist(),
+ ],
+ opts_padding=bytes(32))
+ resp = self._get_response(req)
+ self._assert_ack(resp)
+ self._assert_standard_offer_or_ack(resp, renewing=True,
+ with_hostname=True)
+
+ @test_tracker_info(uuid="5e1e817d-9972-46ca-8d44-1e120bf1bafc")
+ def test_debian_dhclient_4_3_5_request_renewing(self):
+ self._run_debian_renewing(bcast=False)
+
+ @test_tracker_info(uuid="b179a36d-910e-4006-a79a-11cc561b69db")
+ def test_debian_dhclient_4_3_5_request_rebinding(self):
+ self._run_debian_renewing(bcast=True)
+
+ def _assert_standard_offer_or_ack(self, resp, renewing=False, bcast=False,
+ ignore_hostname=False, with_hostname=False):
+ # Responses to renew/rebind are always unicast to ciaddr even with
+ # broadcast flag set (RFC does not define this behavior, but this is
+ # more efficient and matches previous behavior)
+ if bcast and not renewing:
+ self._assert_broadcast(resp)
+ self._assert_broadcastbit(resp, isset=True)
+ else:
+ # Previous implementation would set the broadcast flag but send a
+ # unicast reply if (bcast and renewing). This was not consistent and
+ # new implementation consistently clears the flag. Not testing for
+ # broadcast flag value to maintain compatibility.
+ self._assert_unicast(resp)
+
+ bootp_resp = resp.getlayer(BOOTP)
+ asserts.assert_equal(0, bootp_resp.hops)
+ if renewing:
+ asserts.assert_true(bootp_resp.ciaddr.startswith(NETADDR_PREFIX),
+ 'ciaddr does not start with expected prefix')
+ else:
+ asserts.assert_equal(INET4_ANY, bootp_resp.ciaddr)
+ asserts.assert_true(bootp_resp.yiaddr.startswith(NETADDR_PREFIX),
+ 'yiaddr does not start with expected prefix')
+ asserts.assert_true(bootp_resp.siaddr.startswith(NETADDR_PREFIX),
+ 'siaddr does not start with expected prefix')
+ asserts.assert_equal(INET4_ANY, bootp_resp.giaddr)
+
+ opt_labels = get_opt_labels(bootp_resp)
+ # FQDN option 81 is not supported in new behavior
+ opt_labels = [opt for opt in opt_labels if opt != 81]
+
+ # Expect exactly these options in this order
+ expected_opts = [
+ 'message-type', 'server_id', 'lease_time', 'renewal_time',
+ 'rebinding_time', 'subnet_mask', 'broadcast_address', 'router',
+ 'name_server']
+ if ignore_hostname:
+ opt_labels = [opt for opt in opt_labels if opt != 'hostname']
+ elif with_hostname:
+ expected_opts.append('hostname')
+ expected_opts.extend(['vendor_specific', 'end'])
+ asserts.assert_equal(expected_opts, opt_labels)
+
+ def _request_address(self, hwaddr, bcast=True):
+ resp = self._get_response(self._make_discover(hwaddr))
self._assert_offer(resp)
addr = get_yiaddr(resp)
siaddr = getopt(resp, 'server_id')
- resp = self._get_response(make_request(self.hwaddr, addr, siaddr))
+ resp = self._get_response(self._make_request(hwaddr, addr, siaddr,
+ ip_dst=(INET4_ANY if bcast else siaddr)))
self._assert_ack(resp)
return addr, siaddr, resp
@@ -343,7 +891,7 @@
bootp_resp = (resp or None) and resp.getlayer(BOOTP)
if bootp_resp != None and get_mess_type(bootp_resp) == ACK:
# Note down corresponding release for this request
- release = make_release(bootp_resp.chaddr, bootp_resp.yiaddr,
+ release = self._make_release(bootp_resp.chaddr, bootp_resp.yiaddr,
getopt(bootp_resp, 'server_id'))
self.cleanup_releases.append(release)
return resp
@@ -368,7 +916,12 @@
asserts.assert_false(None == packet, "No packet")
asserts.assert_equal(packet.getlayer(Ether).dst, BROADCAST_MAC)
asserts.assert_equal(packet.getlayer(IP).dst, NETADDR_BROADCAST)
- asserts.assert_equal(packet.getlayer(BOOTP).flags, 0x8000)
+ self._assert_broadcastbit(packet)
+
+ def _assert_broadcastbit(self, packet, isset=True):
+ mask = 0x8000
+ flag = packet.getlayer(BOOTP).flags
+ asserts.assert_equal(flag & mask, mask if isset else 0)
def _assert_unicast(self, packet, ipAddr=None):
asserts.assert_false(None == packet, "No packet")
@@ -377,15 +930,97 @@
if ipAddr:
asserts.assert_equal(packet.getlayer(IP).dst, ipAddr)
+ def _assert_relayed(self, packet, giaddr):
+ self._assert_unicast(packet, giaddr)
+ asserts.assert_equal(giaddr, packet.getlayer(BOOTP).giaddr,
+ 'Relayed response has invalid giaddr field')
+
def _next_hwaddr(self):
addr = make_hwaddr(self.next_hwaddr_index)
self.next_hwaddr_index = self.next_hwaddr_index + 1
return addr
+ def _make_dhcp(self, src_hwaddr, options, ciaddr=INET4_ANY,
+ ip_src=INET4_ANY, ip_dst=NETADDR_BROADCAST, giaddr=INET4_ANY,
+ bcastbit=False):
+ broadcast = (ip_dst == NETADDR_BROADCAST)
+ ethernet = Ether(dst=(BROADCAST_MAC if broadcast else self.dut_hwaddr))
+ ip = IP(src=ip_src, dst=ip_dst)
+ udp = UDP(sport=68, dport=SERVER_PORT)
+ bootp = BOOTP(chaddr=src_hwaddr, ciaddr=ciaddr, giaddr=giaddr,
+ flags=(0x8000 if bcastbit else 0), xid=random.randrange(0, 2**32))
+ dhcp = DHCP(options=options)
+ return ethernet / ip / udp / bootp / dhcp
+
+ def _make_discover(self, src_hwaddr, options = [], giaddr=INET4_ANY,
+ bcastbit=False, opts_padding=None, ip_src=INET4_ANY):
+ opts = [('message-type','discover')]
+ opts.extend(options)
+ opts.append('end')
+ if (opts_padding):
+ opts.append(opts_padding)
+ return self._make_dhcp(src_hwaddr, options=opts, giaddr=giaddr,
+ ip_dst=NETADDR_BROADCAST, bcastbit=bcastbit, ip_src=ip_src)
+
+ def _make_request(self, src_hwaddr, reqaddr, siaddr, ciaddr=INET4_ANY,
+ ip_dst=None, ip_src=None, giaddr=INET4_ANY, bcastbit=False,
+ options=[], opts_padding=None):
+ if not ip_dst:
+ ip_dst = siaddr or INET4_ANY
+
+ if not ip_src and ip_dst == INET4_ANY:
+ ip_src = INET4_ANY
+ elif not ip_src:
+ ip_src = (giaddr if not isempty(giaddr)
+ else ciaddr if not isempty(ciaddr)
+ else reqaddr)
+ # Kernel will not receive unicast UDP packets with empty ip_src
+ asserts.assert_false(ip_dst != INET4_ANY and isempty(ip_src),
+ "Unicast ip_src cannot be zero")
+ opts = [('message-type', 'request')]
+ if options:
+ opts.extend(options)
+ else:
+ if siaddr:
+ opts.append(('server_id', siaddr))
+ if reqaddr:
+ opts.append(('requested_addr', reqaddr))
+ opts.append('end')
+ if opts_padding:
+ opts.append(opts_padding)
+ return self._make_dhcp(src_hwaddr, options=opts, ciaddr=ciaddr,
+ ip_src=ip_src, ip_dst=ip_dst, giaddr=giaddr, bcastbit=bcastbit)
+
+ def _make_release(self, src_hwaddr, addr, server_id):
+ opts = [('message-type', 'release'), ('server_id', server_id), 'end']
+ return self._make_dhcp(src_hwaddr, opts, ciaddr=addr, ip_src=addr,
+ ip_dst=server_id)
+
+def assert_bootp_response(resp, req):
+ bootp = resp.getlayer(BOOTP)
+ asserts.assert_equal(2, bootp.op, 'Invalid BOOTP op')
+ asserts.assert_equal(1, bootp.htype, 'Invalid BOOTP htype')
+ asserts.assert_equal(6, bootp.hlen, 'Invalid BOOTP hlen')
+ asserts.assert_equal(0, bootp.hops, 'Invalid BOOTP hops')
+ asserts.assert_equal(req.getlayer(BOOTP).xid, bootp.xid, 'Invalid XID')
+ return bootp
+
+
+def make_paramrequestlist_opt(params):
+ param_indexes = [DHCPRevOptions[opt][0] if isinstance(opt, str) else opt
+ for opt in params]
+ return tuple(['param_req_list'] + [
+ opt.to_bytes(1, byteorder='big') if isinstance(opt, int) else opt
+ for opt in param_indexes])
+
+
+def isempty(addr):
+ return not addr or addr == INET4_ANY
+
def setopt(packet, optname, val):
dhcp = packet.getlayer(DHCP)
- if optname in [opt[0] for opt in dhcp.options]:
+ if optname in get_opt_labels(dhcp):
dhcp.options = [(optname, val) if opt[0] == optname else opt
for opt in dhcp.options]
else:
@@ -403,55 +1038,31 @@
dhcp.options = [opt for opt in dhcp.options if opt[0] != key]
+def get_opt_labels(packet):
+ dhcp_resp = packet.getlayer(DHCP)
+ # end option is a single string, not a tuple.
+ return [opt if isinstance(opt, str) else opt[0]
+ for opt in dhcp_resp.options if opt != 'pad']
+
+
def get_yiaddr(packet):
return packet.getlayer(BOOTP).yiaddr
+def get_chaddr(packet):
+ # We use Ethernet addresses. Ignore address padding
+ return packet.getlayer(BOOTP).chaddr[:6]
+
+
def get_mess_type(packet):
return getopt(packet, 'message-type')
-def make_dhcp(src_hwaddr, options, ciaddr='0.0.0.0', ipSrc='0.0.0.0',
- ipDst=NETADDR_BROADCAST):
- broadcast = (ipDst == NETADDR_BROADCAST)
- ethernet = Ether(dst=BROADCAST_MAC) if broadcast else Ether()
- ip = IP(src=ipSrc, dst=ipDst)
- udp = UDP(sport=68, dport=SERVER_PORT)
- bootp = BOOTP(chaddr=src_hwaddr, ciaddr=ciaddr,
- flags=(0x8000 if broadcast else 0), xid=RandInt())
- dhcp = DHCP(options=options)
- return ethernet / ip / udp / bootp / dhcp
-
-
-def make_discover(src_hwaddr, options = []):
- opts = [('message-type','discover')]
- opts.extend(options)
- opts.append('end')
- return make_dhcp(src_hwaddr, options=opts)
-
-
-def make_request(src_hwaddr, reqaddr, siaddr, ciaddr='0.0.0.0', ipSrc=None):
- if ipSrc == None:
- ipSrc = ciaddr
- opts = [('message-type', 'request')]
- if siaddr:
- opts.append(('server_id', siaddr))
- if reqaddr:
- opts.append(('requested_addr', reqaddr))
- opts.append('end')
- return make_dhcp(src_hwaddr, options=opts, ciaddr=ciaddr, ipSrc=ciaddr)
-
-
-def make_release(src_hwaddr, addr, server_id):
- opts = [('message-type', 'release'), ('server_id', server_id), 'end']
- return make_dhcp(src_hwaddr, opts, ciaddr=addr, ipSrc=addr, ipDst=server_id)
-
-
def make_hwaddr(index):
if index > 0xffff:
raise ValueError("Address index out of range")
- return '\x44\x85\x00\x00{}{}'.format(chr(index >> 8), chr(index & 0xff))
+ return b'\x44\x85\x00\x00' + bytes([index >> 8, index & 0xff])
def format_hwaddr(addr):
- return ':'.join(['%02x' % ord(c) for c in addr])
+ return ':'.join(['%02x' % c for c in addr])
diff --git a/acts/tests/google/net/sendra.py b/acts/tests/google/net/sendra.py
index 226b7e4..b547fa6 100755
--- a/acts/tests/google/net/sendra.py
+++ b/acts/tests/google/net/sendra.py
@@ -6,7 +6,7 @@
from scapy import all as scapy
-def send(dstmac, interval, count, lifetime, iface):
+def send(dstmac, interval, count, lifetime, iface, rtt):
"""Generate IPv6 Router Advertisement and send to destination.
Args:
@@ -15,12 +15,13 @@
3. count: int Number of packets to be sent.
4. lifetime: Router lifetime value for the original RA.
5. iface: string Router's WiFi interface to send packets over.
+ 6. rtt: retrans timer in the RA packet
"""
while count:
ra = (scapy.Ether(dst=dstmac) /
scapy.IPv6() /
- scapy.ICMPv6ND_RA(routerlifetime=lifetime))
+ scapy.ICMPv6ND_RA(routerlifetime=lifetime, retranstimer=rtt))
scapy.sendp(ra, iface=iface)
count = count - 1
time.sleep(interval)
@@ -39,6 +40,8 @@
type=int, help='Lifetime in seconds for the first RA')
parser.add_argument('-in', '--wifi-interface', action='store', default=None,
help='The wifi interface to send packets over.')
+ parser.add_argument('-rtt', '--retrans-timer', action='store', default=None,
+ type=int, help='Retrans timer')
args = parser.parse_args()
send(args.mac_address, args.t_interval, args.pkt_count, args.life_time,
- args.wifi_interface)
+ args.wifi_interface, args.retrans_timer)
diff --git a/acts/tests/google/power/tel/lab/PowerTelTest.py b/acts/tests/google/power/tel/lab/PowerTelTest.py
deleted file mode 100644
index ebda5b2..0000000
--- a/acts/tests/google/power/tel/lab/PowerTelTest.py
+++ /dev/null
@@ -1,623 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2016 - The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Power tests for cellular connectivity.
-"""
-
-import time
-import json
-import logging
-import os
-import scapy.all as scapy
-
-import acts.controllers.iperf_server as ipf
-
-from acts import base_test
-from acts.test_decorators import test_tracker_info
-from acts.controllers.anritsu_lib._anritsu_utils import AnritsuError
-from acts.controllers.anritsu_lib.md8475a import MD8475A
-from acts.controllers.anritsu_lib.md8475a import BtsBandwidth
-from acts.controllers.anritsu_lib.md8475a import BtsPacketRate
-from acts.controllers.anritsu_lib.md8475a import VirtualPhoneStatus
-from acts.test_utils.tel.anritsu_utils import cb_serial_number
-from acts.test_utils.tel.anritsu_utils import set_system_model_1x
-from acts.test_utils.tel.anritsu_utils import set_system_model_gsm
-from acts.test_utils.tel.anritsu_utils import load_system_model_from_config_files
-from acts.test_utils.tel.anritsu_utils import load_system_model_from_config_files_ca
-from acts.test_utils.tel.anritsu_utils import set_system_model_lte
-from acts.test_utils.tel.anritsu_utils import set_system_model_lte_wcdma
-from acts.test_utils.tel.anritsu_utils import set_system_model_wcdma
-from acts.test_utils.tel.anritsu_utils import sms_mo_send
-from acts.test_utils.tel.anritsu_utils import sms_mt_receive_verify
-from acts.test_utils.tel.anritsu_utils import set_usim_parameters
-from acts.test_utils.tel.anritsu_utils import set_post_sim_params
-from acts.test_utils.tel.tel_defines import DIRECTION_MOBILE_ORIGINATED
-from acts.test_utils.tel.tel_defines import DIRECTION_MOBILE_TERMINATED
-from acts.test_utils.tel.tel_defines import NETWORK_MODE_CDMA
-from acts.test_utils.tel.tel_defines import NETWORK_MODE_GSM_ONLY
-from acts.test_utils.tel.tel_defines import NETWORK_MODE_GSM_UMTS
-from acts.test_utils.tel.tel_defines import NETWORK_MODE_LTE_GSM_WCDMA
-from acts.test_utils.tel.tel_defines import NETWORK_MODE_LTE_CDMA_EVDO
-from acts.test_utils.tel.tel_defines import RAT_1XRTT
-from acts.test_utils.tel.tel_defines import RAT_GSM
-from acts.test_utils.tel.tel_defines import RAT_LTE
-from acts.test_utils.tel.tel_defines import RAT_WCDMA
-from acts.test_utils.tel.tel_defines import RAT_FAMILY_CDMA2000
-from acts.test_utils.tel.tel_defines import RAT_FAMILY_GSM
-from acts.test_utils.tel.tel_defines import RAT_FAMILY_LTE
-from acts.test_utils.tel.tel_defines import RAT_FAMILY_UMTS
-from acts.test_utils.tel.tel_defines import NETWORK_SERVICE_DATA
-from acts.test_utils.tel.tel_defines import GEN_4G
-from acts.test_utils.tel.tel_test_utils import ensure_network_rat
-from acts.test_utils.tel.tel_test_utils import ensure_phones_idle
-from acts.test_utils.tel.tel_test_utils import ensure_network_generation
-from acts.test_utils.tel.tel_test_utils import toggle_airplane_mode
-from acts.test_utils.tel.tel_test_utils import iperf_test_by_adb
-from acts.test_utils.wifi import wifi_power_test_utils as wputils
-
-from acts.utils import adb_shell_ping
-from acts.utils import rand_ascii_str
-from acts.controllers import iperf_server
-from acts.utils import exe_cmd
-
-DEFAULT_PING_DURATION = 30
-
-
-SCHEDULING_DYNAMIC = 0
-SCHEDULING_MIN_MCS = 1
-SCHEDULING_MAX_MCS = 2
-
-DIRECTION_UPLINK = 0
-DIRECTION_DOWNLINK = 1
-
-TM1 = 1
-TM4 = 4
-
-class PowerTelTest(base_test.BaseTestClass):
-
- SETTLING_TIME = 10
-
-
- def __init__(self, controllers):
- base_test.BaseTestClass.__init__(self, controllers)
-
- self.ad = self.android_devices[0]
- self.iperf_server = self.iperf_servers[0]
- self.port_num = self.iperf_server.port
- self.log.info("Iperf Port is %s", self.port_num)
- self.ad.sim_card = getattr(self.ad, "sim_card", None)
- self.log.info("SIM Card is %s", self.ad.sim_card)
- self.md8475a_ip_address = self.user_params[
- "anritsu_md8475a_ip_address"]
- self.wlan_option = self.user_params.get("anritsu_wlan_option", False)
-
- # Load power level values
- self.big_step = self.user_params.get("big_step", 10)
- self.small_step_range = self.user_params.get("small_step_range", [])
- self.small_step = self.user_params.get("small_step", 3)
-
- # Load power levels
- self.uplink_power_levels = self.get_power_levels(
- small_step_range = self.user_params.get("uplink_small_step_range", []),
- big_step_range = self.user_params.get("uplink_big_step_range", [])
- )
-
- self.downlink_power_levels = self.get_power_levels(
- small_step_range = self.user_params.get("downlink_small_step_range", []),
- big_step_range = self.user_params.get("downlink_big_step_range", [])
- )
-
- # Setup sampling durations
- self.mon_offset = self.user_params.get("monsoon_offset", 15)
- self.mon_duration = self.user_params.get("monsoon_sampling_time", 10)
- self.iperf_offset = self.user_params.get("iperf_offset", 5)
- self.iperf_duration = self.mon_duration + self.iperf_offset + self.mon_offset
-
- # Setup monsoon
- self.mon_freq = 5000
- self.mon_data_path = os.path.join(self.log_path, 'Monsoon')
- self.mon = self.monsoons[0]
- self.mon.set_max_current(8.0)
- self.mon.set_voltage(4.2)
- self.mon.attach_device(self.ad)
- self.mon_info = wputils.create_monsoon_info(self)
-
- # Fetch IP address of the host machine
- self.ip = scapy.get_if_addr(self.user_params.get("interface", "eno1"))
- self.log.info("Dest IP is %s", self.ip)
-
- def setup_class(self):
- try:
- self.anritsu = MD8475A(self.md8475a_ip_address, self.log,
- self.wlan_option)
- except AnritsuError:
- self.log.error("Error in connecting to Anritsu Simulator")
- return False
- return True
-
- def setup_test(self):
- ensure_phones_idle(self.log, self.android_devices)
- wputils.dut_rockbottom(self.ad)
- return True
-
- def teardown_test(self):
- self.log.info("Stopping Simulation")
- self.anritsu.stop_simulation()
- toggle_airplane_mode(self.log, self.ad, True)
- return True
-
- def teardown_class(self):
- self.anritsu.disconnect()
- return True
-
- def get_power_levels(self, small_step_range, big_step_range):
-
- power_levels = []
- if len(big_step_range) != 0:
- if len(big_step_range) != 2:
- self.log.error("big_step_range should contain an array with a min and max value for that part of the sweeping range.")
- else:
- if big_step_range[0] > big_step_range[1]:
- aux = big_step_range[0]
- big_step_range[0] = big_step_range[1]
- big_step_range[1] = aux
- power_levels.extend(range(big_step_range[1], big_step_range[0], -self.big_step))
- power_levels.append(big_step_range[0])
- if len(small_step_range) != 0:
- if len(small_step_range) != 2:
- self.log.error("small_step_range should contain an array with a min and max value for that part of the sweeping range.")
- else:
- if small_step_range[0] > small_step_range[1]:
- aux = small_step_range[0]
- small_step_range[0] = small_step_range[1]
- small_step_range[1] = aux
- power_levels.extend(range(small_step_range[1], small_step_range[0], -self.small_step))
- power_levels.append(small_step_range[0])
- print(str(power_levels))
- return power_levels
-
- def start_sitmulation(self, set_simulation_func, band, scheduling, bandwidth, transmission_mode):
-
- [self.bts1] = set_simulation_func(self.anritsu, self.user_params,
- self.ad.sim_card)
-
- self.bts1.band = band
-
- if bandwidth == 20:
- self.bts1.bandwidth = BtsBandwidth.LTE_BANDWIDTH_20MHz
- elif bandwidth == 15:
- self.bts1.bandwidth = BtsBandwidth.LTE_BANDWIDTH_15MHz
- elif bandwidth == 10:
- self.bts1.bandwidth = BtsBandwidth.LTE_BANDWIDTH_10MHz
- elif bandwidth == 5:
- self.bts1.bandwidth = BtsBandwidth.LTE_BANDWIDTH_5MHz
- elif bandwidth == 3:
- self.bts1.bandwidth = BtsBandwidth.LTE_BANDWIDTH_3MHz
- elif bandwidth == 1.4:
- self.bts1.bandwidth = BtsBandwidth.LTE_BANDWIDTH_1dot4MHz
-
- if scheduling == SCHEDULING_DYNAMIC:
- self.bts1.lte_scheduling_mode = "DYNAMIC"
- else:
-
- self.bts1.lte_scheduling_mode = "STATIC"
- self.bts1.packet_rate = BtsPacketRate.LTE_MANUAL
- self.anritsu.send_command("TBSPATTERN OFF, BTS1")
- self.bts1.lte_mcs_dl = 0
- self.bts1.nrb_dl = 5 * bandwidth
- self.bts1.nrb_ul = 5 * bandwidth
-
- if scheduling == SCHEDULING_MIN_MCS:
- self.bts1.lte_mcs_ul = 0
- else:
- self.bts1.lte_mcs_ul = 23
-
- if transmission_mode == TM1:
- self.bts1.dl_antenna = 1
- self.bts1.transmode = "TM1"
- elif transmission_mode == TM4:
- self.bts1.dl_antenna = 2
- self.bts1.transmode = "TM4"
-
- self.anritsu.start_simulation()
-
- def start_sitmulation_ca(self, set_simulation_func):
-
- [self.bts1] = set_simulation_func(self.anritsu, self.user_params,
- self.ad.sim_card)
-
- self.anritsu.start_simulation()
-
- def measure_throughput_and_power(self, direction):
-
- # Start iperf locally
- self.log.info("Starting iperf server.")
- self.iperf_server.start()
-
- self.log.info("Starting iperf client on the phone.")
- iperf_args = '-i 1 -t %d' % self.iperf_duration
- if direction == DIRECTION_DOWNLINK:
- iperf_args = iperf_args + ' -R'
- iperf_args = iperf_args + ' > /dev/null'
-
- wputils.run_iperf_client_nonblocking(
- self.ad, self.ip, iperf_args)
-
- # Collect power data
- self.log.info("Starting sampling with monsoon.")
- file_path, current = wputils.monsoon_data_collect_save(
- self.ad, self.mon_info, self.current_test_name, bug_report=0)
-
- # Collect iperf data
-
- # Give some time for iperf to finish
- time.sleep(self.iperf_offset)
-
- self.iperf_server.stop()
-
- throughput = 0
- try:
- iperf_result = ipf.IPerfResult(self.iperf_server.log_files[-1])
-
- if direction == DIRECTION_DOWNLINK:
- if iperf_result.avg_send_rate is not None:
- throughput = iperf_result.avg_send_rate * 8
- elif direction == DIRECTION_UPLINK:
- if iperf_result.avg_receive_rate is not None:
- throughput = iperf_result.avg_receive_rate * 8
- except:
- pass
-
- self.log.info("Average receive rate: %sMbps", throughput)
-
- return [throughput, current]
-
- def sweep(self, power_levels, direction):
-
- if direction == DIRECTION_DOWNLINK:
- self.bts1.input_level = -40
-
-
- results_throughput = []
- results_power = []
-
- for power in power_levels:
-
- self.log.info("------- Measuring with power level %d dBm -------", power)
-
- if direction == DIRECTION_DOWNLINK:
- self.bts1.output_level = power
- elif direction == DIRECTION_UPLINK:
- self.bts1.input_level = power
-
- self.log.info("Current Power Level is %s dBm", power)
-
-
- throughput, current = self.measure_throughput_and_power(direction)
- results_throughput.append(throughput)
- results_power.append(current)
-
-
- return [results_throughput, results_power]
-
- def set_to_rockbottom_and_attach(self):
-
- self.bts1.input_power = -10
- self.bts1.output_power = -30
-
- # Set device to rockbottom
- self.ad.droid.goToSleepNow()
-
- # Turn of airplane mode and wait until the phone attaches
- toggle_airplane_mode(self.log, self.ad, False)
- time.sleep(2)
- self.anritsu.wait_for_registration_state()
- time.sleep(self.SETTLING_TIME)
- self.log.info("UE attached to the callbox.")
-
- def save_results(self, results_throughput, results_power, power_levels, file_name = ""):
-
- if file_name == "":
- file_name = self.current_test_name
-
- self.logpath = os.path.join(logging.log_path, self.current_test_name + ".csv")
- with open(self.logpath, "a") as tput_file:
- tput_file.write("# rf_power, current, throughput")
- tput_file.write("\n")
- for i in range(0, len(results_power)):
- tput_file.write(str(power_levels[i]) + ", " + str(results_power[i]) + ", " + str(results_throughput[i]))
- tput_file.write("\n")
-
- def do_test(self, direction, band, scheduling, bandwidth, transmission_mode, ca_band2 = 0):
-
- if direction == DIRECTION_DOWNLINK:
- power_levels = self.downlink_power_levels
- elif direction == DIRECTION_UPLINK:
- power_levels = self.uplink_power_levels
-
- if ca_band2 == 0:
- self.start_sitmulation(load_system_model_from_config_files, band, scheduling, bandwidth, transmission_mode)
- else:
- self.start_sitmulation_ca(load_system_model_from_config_files_ca, band, ca_band2, scheduling, bandwidth)
-
- self.set_to_rockbottom_and_attach()
- results_throughput, results_power = self.sweep(power_levels, direction)
- self.save_results(results_throughput, results_power, power_levels)
-
- def do_test_ca(self, direction):
-
- if direction == DIRECTION_DOWNLINK:
- power_levels = self.downlink_power_levels
- elif direction == DIRECTION_UPLINK:
- power_levels = self.uplink_power_levels
-
- self.start_sitmulation_ca(load_system_model_from_config_files_ca)
-
- self.set_to_rockbottom_and_attach()
- results_throughput, results_power = self.sweep(power_levels, direction)
- self.save_results(results_throughput, results_power, power_levels)
-
-
-
- """ Tests Begin """
-
- def test_downlink_tm1_band4_14MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 4, scheduling = SCHEDULING_DYNAMIC, bandwidth = 1.4, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band4_14MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MIN_MCS, bandwidth = 1.4, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band4_14MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MAX_MCS, bandwidth = 1.4, transmission_mode = TM1, ca_band2 = 0)
-
- def test_downlink_tm4_band4_14MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 4, scheduling = SCHEDULING_DYNAMIC, bandwidth = 1.4, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band4_14MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MIN_MCS, bandwidth = 1.4, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band4_14MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MAX_MCS, bandwidth = 1.4, transmission_mode = TM4, ca_band2 = 0)
-
- def test_downlink_tm1_band4_3MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 4, scheduling = SCHEDULING_DYNAMIC, bandwidth = 3, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band4_3MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MIN_MCS, bandwidth = 3, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band4_3MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MAX_MCS, bandwidth = 3, transmission_mode = TM1, ca_band2 = 0)
-
- def test_downlink_tm4_band4_3MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 4, scheduling = SCHEDULING_DYNAMIC, bandwidth = 3, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band4_3MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MIN_MCS, bandwidth = 3, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band4_3MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MAX_MCS, bandwidth = 3, transmission_mode = TM4, ca_band2 = 0)
-
- def test_downlink_tm1_band4_5MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 4, scheduling = SCHEDULING_DYNAMIC, bandwidth = 5, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band4_5MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MIN_MCS, bandwidth = 5, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band4_5MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MAX_MCS, bandwidth = 5, transmission_mode = TM1, ca_band2 = 0)
-
- def test_downlink_tm4_band4_5MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 4, scheduling = SCHEDULING_DYNAMIC, bandwidth = 5, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band4_5MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MIN_MCS, bandwidth = 5, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band4_5MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MAX_MCS, bandwidth = 5, transmission_mode = TM4, ca_band2 = 0)
-
- def test_downlink_tm1_band4_10MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 4, scheduling = SCHEDULING_DYNAMIC, bandwidth = 10, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band4_10MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MIN_MCS, bandwidth = 10, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band4_10MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MAX_MCS, bandwidth = 10, transmission_mode = TM1, ca_band2 = 0)
-
- def test_downlink_tm4_band4_10MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 4, scheduling = SCHEDULING_DYNAMIC, bandwidth = 10, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band4_10MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MIN_MCS, bandwidth = 10, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band4_10MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MAX_MCS, bandwidth = 10, transmission_mode = TM4, ca_band2 = 0)
-
- def test_downlink_tm1_band4_20MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 4, scheduling = SCHEDULING_DYNAMIC, bandwidth = 20, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band4_20MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MIN_MCS, bandwidth = 20, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band4_20MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MAX_MCS, bandwidth = 20, transmission_mode = TM1, ca_band2 = 0)
-
- def test_downlink_tm4_band4_20MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 4, scheduling = SCHEDULING_DYNAMIC, bandwidth = 20, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band4_20MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MIN_MCS, bandwidth = 20, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band4_20MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 4, scheduling = SCHEDULING_MAX_MCS, bandwidth = 20, transmission_mode = TM4, ca_band2 = 0)
-
- def test_downlink_tm1_band7_5MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 7, scheduling = SCHEDULING_DYNAMIC, bandwidth = 5, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band7_5MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 7, scheduling = SCHEDULING_MIN_MCS, bandwidth = 5, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band7_5MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 7, scheduling = SCHEDULING_MAX_MCS, bandwidth = 5, transmission_mode = TM1, ca_band2 = 0)
-
- def test_downlink_tm4_band7_5MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 7, scheduling = SCHEDULING_DYNAMIC, bandwidth = 5, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band7_5MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 7, scheduling = SCHEDULING_MIN_MCS, bandwidth = 5, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band7_5MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 7, scheduling = SCHEDULING_MAX_MCS, bandwidth = 5, transmission_mode = TM4, ca_band2 = 0)
-
- def test_downlink_tm1_band7_10MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 7, scheduling = SCHEDULING_DYNAMIC, bandwidth = 10, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band7_10MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 7, scheduling = SCHEDULING_MIN_MCS, bandwidth = 10, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band7_10MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 7, scheduling = SCHEDULING_MAX_MCS, bandwidth = 10, transmission_mode = TM1, ca_band2 = 0)
-
- def test_downlink_tm4_band7_10MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 7, scheduling = SCHEDULING_DYNAMIC, bandwidth = 10, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band7_10MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 7, scheduling = SCHEDULING_MIN_MCS, bandwidth = 10, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band7_10MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 7, scheduling = SCHEDULING_MAX_MCS, bandwidth = 10, transmission_mode = TM4, ca_band2 = 0)
-
- def test_downlink_tm1_band7_20MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 7, scheduling = SCHEDULING_DYNAMIC, bandwidth = 20, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band7_20MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 7, scheduling = SCHEDULING_MIN_MCS, bandwidth = 20, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band7_20MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 7, scheduling = SCHEDULING_MAX_MCS, bandwidth = 20, transmission_mode = TM1, ca_band2 = 0)
-
- def test_downlink_tm4_band7_20MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 7, scheduling = SCHEDULING_DYNAMIC, bandwidth = 20, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band7_20MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 7, scheduling = SCHEDULING_MIN_MCS, bandwidth = 20, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band7_20MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 7, scheduling = SCHEDULING_MAX_MCS, bandwidth = 20, transmission_mode = TM4, ca_band2 = 0)
-
- def test_downlink_tm1_band13_5MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 13, scheduling = SCHEDULING_DYNAMIC, bandwidth = 5, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band13_5MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 13, scheduling = SCHEDULING_MIN_MCS, bandwidth = 5, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band13_5MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 13, scheduling = SCHEDULING_MAX_MCS, bandwidth = 5, transmission_mode = TM1, ca_band2 = 0)
-
- def test_downlink_tm4_band13_5MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 13, scheduling = SCHEDULING_DYNAMIC, bandwidth = 5, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band13_5MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 13, scheduling = SCHEDULING_MIN_MCS, bandwidth = 5, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band13_5MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 13, scheduling = SCHEDULING_MAX_MCS, bandwidth = 5, transmission_mode = TM4, ca_band2 = 0)
-
- def test_downlink_tm1_band13_10MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 13, scheduling = SCHEDULING_DYNAMIC, bandwidth = 10, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band13_10MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 13, scheduling = SCHEDULING_MIN_MCS, bandwidth = 10, transmission_mode = TM1, ca_band2 = 0)
-
- def test_uplink_tm1_band13_10MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 13, scheduling = SCHEDULING_MAX_MCS, bandwidth = 10, transmission_mode = TM1, ca_band2 = 0)
-
- def test_downlink_tm4_band13_10MHz_dynamic(self):
-
- self.do_test(direction = DIRECTION_DOWNLINK, band = 13, scheduling = SCHEDULING_DYNAMIC, bandwidth = 10, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band13_10MHz_min_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 13, scheduling = SCHEDULING_MIN_MCS, bandwidth = 10, transmission_mode = TM4, ca_band2 = 0)
-
- def test_uplink_tm4_band13_10MHz_max_mcs(self):
-
- self.do_test(direction = DIRECTION_UPLINK, band = 13, scheduling = SCHEDULING_MAX_MCS, bandwidth = 10, transmission_mode = TM4, ca_band2 = 0)
-
- def test_downlink_ca_20MHz(self):
- self.do_test_ca(DIRECTION_DOWNLINK)
-
- def test_uplink_ca_20MHz(self):
- self.do_test_ca(DIRECTION_UPLINK)
-
- """ Tests End """
diff --git a/acts/tests/google/power/tel/lab/PowerTelTrafficTest.py b/acts/tests/google/power/tel/lab/PowerTelTrafficTest.py
new file mode 100644
index 0000000..7beae2b
--- /dev/null
+++ b/acts/tests/google/power/tel/lab/PowerTelTrafficTest.py
@@ -0,0 +1,364 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import scapy.all as scapy
+import time
+from acts.test_utils.power import IperfHelper as IPH
+from acts.test_utils.power import PowerCellularLabBaseTest as PWCEL
+from acts.test_utils.tel.tel_test_utils import WIFI_CONFIG_APBAND_2G
+from acts.test_utils.tel.tel_test_utils import WIFI_CONFIG_APBAND_5G
+from acts.test_utils.wifi import wifi_power_test_utils as wputils
+from acts.test_utils.wifi import wifi_test_utils as wutils
+
+
+class PowerTelTrafficTest(PWCEL.PowerCellularLabBaseTest):
+ """ Cellular traffic power test.
+
+ Inherits from PowerCellularLabBaseTest. Parses config specific
+ to this kind of test. Contains methods to start data traffic
+ between a local instance of iPerf and one running in the dut.
+
+ """
+
+ # Keywords for test name parameters
+ PARAM_TRAFFIC_PATTERN = 'pattern'
+
+ # Iperf waiting time
+ IPERF_MARGIN = 10
+
+ def __init__(self, controllers):
+ """ Class initialization.
+
+ Sets test parameters to initial values.
+ """
+
+ super().__init__(controllers)
+
+ # Traffic pattern variables. Values are set later
+ # when reading config from test name.
+ self.traffic_pattern_dl = 0
+ self.traffic_pattern_ul = 0
+
+ def setup_test(self):
+ """ Executed before every test case.
+
+ Parses test configuration from the test name and prepares
+ the simulation for measurement.
+ """
+
+ # Call parent method first to setup simulation
+ if not super().setup_test():
+ return False
+
+ try:
+ values = self.consume_parameter(self.PARAM_TRAFFIC_PATTERN, 2)
+ self.traffic_pattern_dl = int(values[1])
+ self.traffic_pattern_ul = int(values[2])
+ except:
+ self.log.error("The test name has to include parameter {} followed by two int values separated by an "
+ "underscore indicating DL and UL traffic percentage.".format(self.PARAM_TRAFFIC_PATTERN))
+ return False
+
+ return True
+
+ def teardown_test(self):
+ """Tear down necessary objects after test case is finished.
+
+ """
+
+ for ips in self.iperf_servers:
+ ips.stop()
+
+ def power_tel_traffic_test(self):
+ """ Measures power and throughput during data transmission.
+
+ Measurement step in this test. Starts iPerf client in the DUT and then
+ initiates power measurement. After that, DUT is connected again and
+ the result from iPerf is collected. Pass or fail is decided with a
+ threshold value.
+ """
+
+ # Start data traffic
+ client_iperf_helper = self.start_tel_traffic(self.dut)
+
+ # Measure power
+ self.collect_power_data()
+
+ # Wait for iPerf to finish
+ time.sleep(self.IPERF_MARGIN + 2)
+
+ # Collect throughput measurement
+ throughput = []
+ for iph in client_iperf_helper:
+ print('Setting: {}\n'.format(iph.iperf_args))
+ throughput.append(iph.process_iperf_results(self.dut, self.log, self.iperf_servers, self.test_name))
+
+ # Check if power measurement is below the required value
+ # self.pass_fail_check()
+
+ return self.test_result, throughput
+
+ def start_tel_traffic(self, client_host):
+ """ Starts iPerf in the indicated device and initiates traffic.
+
+ Starts the required iperf clients and servers according to the traffic
+ pattern config in the current test.
+
+ Args:
+ client_host: Android device handler in which to start the iperf client.
+
+ Returns:
+ A list of iperf helpers.
+ """
+
+ # The iPerf server is hosted in this computer
+ self.iperf_server_address = scapy.get_if_addr(self.pkt_sender.interface)
+
+ # Start iPerf traffic
+ iperf_helpers = []
+
+ if self.traffic_pattern_ul > 0 and self.traffic_pattern_dl > 0:
+ # Bidirectional traffic
+ # Calculate traffic limit to do traffic shaping
+ max_t_ul = self.simulation.maximum_uplink_throughput() * 0.98 * self.traffic_pattern_ul / 100
+ max_t_dl = self.simulation.maximum_downlink_throughput() * 0.98 * self.traffic_pattern_dl / 100
+ # Initiate traffic
+ iperf_helpers.append(self.start_iperf_traffic(client_host, server_idx=len(iperf_helpers), traffic_direction='UL', bandwidth=max_t_ul))
+ iperf_helpers.append(self.start_iperf_traffic(client_host, server_idx=len(iperf_helpers), traffic_direction='DL', bandwidth=max_t_dl))
+ elif self.traffic_pattern_ul > 0:
+ # Uplink traffic
+ iperf_helpers.append(self.start_iperf_traffic(client_host, server_idx=len(iperf_helpers), traffic_direction='UL'))
+ elif self.traffic_pattern_dl > 0:
+ # Downlink traffic
+ iperf_helpers.append(self.start_iperf_traffic(client_host, server_idx=len(iperf_helpers), traffic_direction='DL'))
+
+ return iperf_helpers
+
+ def start_iperf_traffic(self, client_host, server_idx, traffic_direction, bandwidth = 0):
+ """Starts iPerf data traffic.
+
+ Starts an iperf client in an android device and a server locally.
+
+ Args:
+ client_host: android device handler in which to start the iperf client
+ server_idx: id of the iperf server to connect to
+ traffic_direction: has to be either 'UL' or 'DL'
+ bandwidth: bandwidth limit for data traffic
+
+ Returns:
+ An IperfHelper object for the started client/server pair.
+ """
+
+ config = {
+ 'traffic_type': 'TCP',
+ 'duration': self.mon_duration + self.mon_offset + self.IPERF_MARGIN,
+ 'start_meas_time': 4,
+ 'server_idx': server_idx,
+ 'port': self.iperf_servers[server_idx].port,
+ 'traffic_direction': traffic_direction
+ }
+
+ # If bandwidth is equal to zero then no bandwith requirements are set
+ if bandwidth > 0:
+ config['bandwidth'] = bandwidth
+
+ iph = IPH.IperfHelper(config)
+
+ # Start the server locally
+ self.iperf_servers[server_idx].start()
+
+ # Start the client in the android device
+ wputils.run_iperf_client_nonblocking(client_host, self.iperf_server_address, iph.iperf_args)
+
+ return iph
+
+
+class PowerTelRvRTest(PowerTelTrafficTest):
+ """ Gets Range vs Rate curves while measuring power consumption.
+
+ Uses PowerTelTrafficTest as a base class.
+ """
+
+ # Test name configuration keywords
+ PARAM_SWEEP = "sweep"
+ PARAM_SWEEP_UPLINK = "uplink"
+ PARAM_SWEEP_DOWNLINK = "downlink"
+
+ # Sweep values. Need to be set before starting test by test
+ # function or child class.
+ downlink_power_sweep = None
+ uplink_power_sweep = None
+
+ def setup_test(self):
+ """ Executed before every test case.
+
+ Parses test configuration from the test name and prepares
+ the simulation for measurement.
+ """
+
+ # Call parent method first to setup simulation
+ if not super().setup_test():
+ return False
+
+
+ # Get which power value to sweep from config
+
+ try:
+ values = self.consume_parameter(self.PARAM_SWEEP, 1)
+
+ if values[1] == self.PARAM_SWEEP_UPLINK:
+ self.sweep = self.PARAM_SWEEP_UPLINK
+ elif values[1] == self.PARAM_SWEEP_DOWNLINK:
+ self.sweep = self.PARAM_SWEEP_DOWNLINK
+ else:
+ raise ValueError()
+ except:
+ self.log.error("The test name has to include parameter {} followed by either {} or {}.".format(
+ self.PARAM_SWEEP,
+ self.PARAM_SWEEP_DOWNLINK,
+ self.PARAM_SWEEP_UPLINK)
+ )
+ return False
+
+ return True
+
+ def power_tel_rvr_test(self):
+ """ Main function for the RvR test.
+
+ Produces the RvR curve according to the indicated sweep values.
+ """
+
+ if self.sweep == self.PARAM_SWEEP_DOWNLINK:
+ sweep_range = self.downlink_power_sweep
+ elif self.sweep == self.PARAM_SWEEP_UPLINK:
+ sweep_range = self.uplink_power_sweep
+
+ current = []
+ throughput = []
+
+ for pw in sweep_range:
+
+ if self.sweep == self.PARAM_SWEEP_DOWNLINK:
+ self.simulation.set_downlink_rx_power(pw)
+ elif self.sweep == self.PARAM_SWEEP_UPLINK:
+ self.simulation.set_uplink_tx_power(pw)
+
+ i, t = self.power_tel_traffic_test()
+ self.log.info("---------------------")
+ self.log.info("{} -- {} --".format(self.sweep, pw))
+ self.log.info("{} ----- {}".format(i, t[0]))
+ self.log.info("---------------------")
+
+ current.append(i)
+ throughput.append(t[0])
+
+ print(sweep_range)
+ print(current)
+ print(throughput)
+
+
+class PowerTelTetheringTest(PowerTelTrafficTest):
+ """ Cellular traffic over WiFi tethering power test.
+
+ Treated as a different case of data traffic. Inherits from PowerTelTrafficTest
+ and only needs to make a change in the measurement step.
+ """
+
+ # Test name configuration keywords
+ PARAM_WIFI_BAND = "wifiband"
+ PARAM_2G_BAND = "2g"
+ PARAM_5G_BAND = "5g"
+
+ def __init__(self, controllers):
+ """ Class initialization
+
+ Set attributes to default values.
+ """
+
+ super().__init__(controllers)
+
+ self.wifi_band = WIFI_CONFIG_APBAND_2G
+
+ def power_tel_tethering_test(self):
+ """ Measure power and throughput during data transmission.
+
+ Starts WiFi tethering in the DUT and connects a second device. Then
+ the iPerf client is hosted in the second android device.
+
+ """
+
+ # Setup tethering
+
+ # The second device needs to have a country code to be able to
+ # use the 5GHz band
+ self.android_devices[1].droid.wifiSetCountryCode('US')
+
+ self.network = { "SSID": "Pixel_1030", "password": "1234567890" }
+
+ wutils.start_wifi_tethering(self.dut,
+ self.network[wutils.WifiEnums.SSID_KEY],
+ self.network[wutils.WifiEnums.PWD_KEY],
+ self.wifi_band)
+
+ wutils.wifi_connect(self.android_devices[1], self.network, check_connectivity=False)
+
+ # Start data traffic
+ client_iperf_helper = self.start_tel_traffic(self.android_devices[1])
+
+ # Measure power
+ self.collect_power_data()
+
+ # Wait for iPerf to finish
+ time.sleep(self.IPERF_MARGIN + 2)
+
+ # Collect throughput measurement
+ for iph in client_iperf_helper:
+ print('Setting: {}\n'.format(iph.iperf_args))
+ iph.process_iperf_results(self.android_devices[1], self.log, self.iperf_servers, self.test_name)
+
+ # Checks if power is below the required threshold.
+ self.pass_fail_check()
+
+
+ def setup_test(self):
+ """ Executed before every test case.
+
+ Parses test configuration from the test name and prepares
+ the simulation for measurement.
+ """
+
+ # Call parent method first to setup simulation
+ if not super().setup_test():
+ return False
+
+ try:
+ values = self.consume_parameter(self.PARAM_WIFI_BAND, 1)
+
+ if values[1] == self.PARAM_2G_BAND:
+ self.wifi_band = WIFI_CONFIG_APBAND_2G
+ elif values[1] == self.PARAM_5G_BAND:
+ self.wifi_band = WIFI_CONFIG_APBAND_5G
+ else:
+ raise ValueError()
+ except:
+ self.log.error("The test name has to include parameter {} followed by either {} or {}.".format(
+ self.PARAM_WIFI_BAND,
+ self.PARAM_2G_BAND,
+ self.PARAM_5G_BAND)
+ )
+ return False
+
+ return True
diff --git a/acts/tests/google/power/tel/lab/genTests.py b/acts/tests/google/power/tel/lab/genTests.py
deleted file mode 100644
index d88df11..0000000
--- a/acts/tests/google/power/tel/lab/genTests.py
+++ /dev/null
@@ -1,38 +0,0 @@
-
-for b in [4, 7, 13]:
- if b == 4:
- bwl = [1.4, 3, 5, 10, 20]
- elif b == 7:
- bwl = [5, 10, 20]
- elif b == 13:
- bwl = [5, 10]
-
- for bw in bwl:
-
- if bw == 1.4:
- sbw = "14"
- else:
- sbw = str(bw)
-
- for tm in ['tm1', 'tm4']:
-
- for direction in ['downlink', 'uplink']:
-
- if direction == 'downlink':
- schedulingl = ['dynamic']
- sdirection = 'DIRECTION_DOWNLINK'
- elif direction == 'uplink':
- schedulingl = ['min_mcs', 'max_mcs']
- sdirection = 'DIRECTION_UPLINK'
-
- for scheduling in schedulingl:
-
- print(" def test_" + direction + "_" + tm + "_band" + str(b) + "_" + sbw + "MHz_" + scheduling + "(self):")
- if scheduling == 'dynamic':
- sscheduling = "SCHEDULING_DYNAMIC"
- elif scheduling == 'min_mcs':
- sscheduling = "SCHEDULING_MIN_MCS"
- elif scheduling == 'max_mcs':
- sscheduling = "SCHEDULING_MAX_MCS"
-
- print("\n self.do_test(direction = " + sdirection + ", band = " + str(b) + ", scheduling = " + sscheduling + ", bandwidth = " + str(bw) + ", transmission_mode = " + str(tm).upper() + ", ca_band2 = 0)\n")
diff --git a/acts/tests/google/tel/live/TelLiveDataTest.py b/acts/tests/google/tel/live/TelLiveDataTest.py
index d3a92bc..5f7d1dc 100644
--- a/acts/tests/google/tel/live/TelLiveDataTest.py
+++ b/acts/tests/google/tel/live/TelLiveDataTest.py
@@ -765,6 +765,7 @@
stop_wifi_tethering(self.log, self.provider)
self.provider.log.info("Provider disable bluetooth")
disable_bluetooth(self.provider.droid)
+ time.sleep(10)
for ad in self.clients:
ad.log.info(
@@ -781,6 +782,7 @@
"Provider failed to enable data connection.")
return False
+ time.sleep(10)
self.log.info("Verify internet")
if not self._test_internet_connection(
client_status=False, provider_status=True):
@@ -937,7 +939,7 @@
self.log.info("===== Toggling provider data connection =====")
self.provider.log.info("Disable provider data connection")
self.provider.droid.telephonyToggleDataConnection(False)
-
+ time.sleep(10)
if not self._test_internet_connection(False, False):
return False
self.provider.log.info("Enable provider data connection")
diff --git a/acts/tests/google/tel/live/TelLiveImsSettingsTest.py b/acts/tests/google/tel/live/TelLiveImsSettingsTest.py
index 8520d3b..d9309b7 100644
--- a/acts/tests/google/tel/live/TelLiveImsSettingsTest.py
+++ b/acts/tests/google/tel/live/TelLiveImsSettingsTest.py
@@ -279,7 +279,8 @@
else:
self.dut.log.info("WFC mode is %s as expected",
default_wfc_mode)
- if self.default_wfc_enabled:
+ if self.default_wfc_enabled and \
+ default_wfc_mode == WFC_MODE_WIFI_PREFERRED:
if not self.check_call_in_wfc():
result = False
elif not airplane_mode:
diff --git a/acts/tests/google/tel/live/TelLiveSmsTest.py b/acts/tests/google/tel/live/TelLiveSmsTest.py
index 78894e8..3fac9af 100644
--- a/acts/tests/google/tel/live/TelLiveSmsTest.py
+++ b/acts/tests/google/tel/live/TelLiveSmsTest.py
@@ -2867,8 +2867,9 @@
"""
ads = self.android_devices
expected_result = False
- if get_operator_name(self.log, ads[0]) == "vzw":
+ if get_operator_name(self.log, ads[0]) in ["vzw", "Verizon"]:
expected_result = True
+ ads[0].log.info("Expected Result is %s", expected_result)
try:
tasks = [(phone_setup_voice_general, (self.log, ads[0])),
@@ -2906,7 +2907,7 @@
"""
ads = self.android_devices
expected_result = False
- if get_operator_name(self.log, ads[0]) == "vzw":
+ if get_operator_name(self.log, ads[0]) in ["vzw", "Verizon"]:
expected_result = True
try:
tasks = [(phone_setup_voice_general, (self.log, ads[0])),
diff --git a/acts/tests/google/tel/live/TelLiveVoiceTest.py b/acts/tests/google/tel/live/TelLiveVoiceTest.py
index 6141df6..79df719 100644
--- a/acts/tests/google/tel/live/TelLiveVoiceTest.py
+++ b/acts/tests/google/tel/live/TelLiveVoiceTest.py
@@ -3408,8 +3408,7 @@
ad_download = ads[0]
if not start_youtube_video(ad_download):
- ad_download.log.error("Fail to bring up youtube video")
- return False
+ ad_download.log.warning("Fail to bring up youtube video")
if not call_setup_teardown(self.log, ad_caller, ad_callee, ad_caller,
None, None, 30):
diff --git a/acts/tests/google/tel/live/TelWifiDataTest.py b/acts/tests/google/tel/live/TelWifiDataTest.py
index 45b1491..85ce8d7 100644
--- a/acts/tests/google/tel/live/TelWifiDataTest.py
+++ b/acts/tests/google/tel/live/TelWifiDataTest.py
@@ -440,6 +440,51 @@
return False
return True
+ @test_tracker_info(uuid="ba183bde-6763-411a-ad29-7f1e96479950")
+ @TelephonyBaseTest.tel_test_wrap
+ def test_lte_oos_lte_camping(self):
+ """Test for Out Of Service Scenarios
+
+ Steps:
+ 1. Set WiFi and Cell available
+ 2. Setup Attenuator as No Service Scenario
+ 3. Verify there is no LTE or WiFi Signal
+ 4. Wait for 2 mins
+ 5. Setup Attenuator as Cellular only service
+ 6. Verify Data Connection
+
+ Expected Results:
+ 1. Device should camp back on LTE after OOS
+ 2. Data should be in working state
+
+ Returns:
+ True if Pass. False if fail.
+ """
+ ad = self.android_devices[0]
+ if not self._basic_connectivity_check():
+ self.log.error("Basic Connectivity Check Failed")
+ return False
+ self._atten_setup_no_service()
+ ad.log.info("Waiting for 1 min")
+ time.sleep(60)
+ if (wait_for_cell_data_connection(self.log, ad, True) or
+ wait_for_wifi_data_connection(self.log, ad, True)):
+ ad.log.error("Data is available, Expecting no Cellular/WiFi Signal")
+ get_telephony_signal_strength(ad)
+ get_wifi_signal_strength(ad)
+ return False
+ ad.log.info("Waiting for 2 mins")
+ time.sleep(120)
+ self._atten_setup_lte_only()
+ ad.on_mobile_data = True
+ if (not wait_for_cell_data_connection(self.log, ad, True)
+ or not verify_internet_connection(self.log, ad)):
+ ad.log.error("Data not on LTE")
+ get_telephony_signal_strength(ad)
+ get_wifi_signal_strength(ad)
+ return False
+ return True
+
@test_tracker_info(uuid="c5581e04-4589-4f32-b1f9-76f0b16666ce")
@TelephonyBaseTest.tel_test_wrap
def test_modem_power_poor_coverage(self):
diff --git a/acts/tests/google/wifi/WifiAutoUpdateTest.py b/acts/tests/google/wifi/WifiAutoUpdateTest.py
new file mode 100755
index 0000000..f9e5caa
--- /dev/null
+++ b/acts/tests/google/wifi/WifiAutoUpdateTest.py
@@ -0,0 +1,241 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2017 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import itertools
+import pprint
+import queue
+import time
+
+import acts.base_test
+import acts.signals as signals
+import acts.test_utils.wifi.wifi_test_utils as wutils
+import acts.utils
+
+from acts import asserts
+from acts.libs.ota import ota_updater
+from acts.test_decorators import test_tracker_info
+from acts.test_utils.wifi.WifiBaseTest import WifiBaseTest
+
+WifiEnums = wutils.WifiEnums
+# Default timeout used for reboot, toggle WiFi and Airplane mode,
+# for the system to settle down after the operation.
+DEFAULT_TIMEOUT = 10
+BAND_2GHZ = 0
+BAND_5GHZ = 1
+
+
+class WifiAutoUpdateTest(WifiBaseTest):
+ """Tests for APIs in Android's WifiManager class.
+
+ Test Bed Requirement:
+ * One Android device
+ * Several Wi-Fi networks visible to the device, including an open Wi-Fi
+ network.
+ """
+
+ def __init__(self, controllers):
+ WifiBaseTest.__init__(self, controllers)
+ self.tests = (
+ "test_check_wifi_state_after_au",
+ "test_verify_networks_after_au",
+ "test_all_networks_connectable_after_au",
+ "test_connection_to_new_networks",
+ "test_check_wifi_toggling_after_au",
+ "test_reset_wifi_after_au")
+
+ def setup_class(self):
+ super(WifiAutoUpdateTest, self).setup_class()
+ ota_updater.initialize(self.user_params, self.android_devices)
+ self.dut = self.android_devices[0]
+ wutils.wifi_test_device_init(self.dut)
+ req_params = []
+ opt_param = [
+ "open_network", "reference_networks", "iperf_server_address"
+ ]
+ self.unpack_userparams(
+ req_param_names=req_params, opt_param_names=opt_param)
+
+ if "AccessPoint" in self.user_params:
+ self.legacy_configure_ap_and_start()
+
+ asserts.assert_true(
+ len(self.reference_networks) > 0,
+ "Need at least two reference network with psk.")
+ asserts.assert_true(
+ len(self.open_network) > 0,
+ "Need at least two open network with psk.")
+ wutils.wifi_toggle_state(self.dut, True)
+
+ self.wifi_config_list = []
+
+ # Setup WiFi and add few open and wpa networks before OTA.
+ self.add_network_and_enable(self.open_network[0]['2g'])
+ self.add_network_and_enable(self.reference_networks[0]['5g'])
+
+ # Add few dummy networks to the list.
+ self.add_and_enable_dummy_networks()
+
+ # Run OTA below, if ota fails then abort all tests.
+ try:
+ ota_updater.update(self.dut)
+ except Exception as err:
+ raise signals.TestSkipClass(
+ "Failed up apply OTA update. Aborting tests")
+
+ def setup_test(self):
+ self.dut.droid.wakeLockAcquireBright()
+ self.dut.droid.wakeUpNow()
+
+ def teardown_test(self):
+ self.dut.droid.wakeLockRelease()
+ self.dut.droid.goToSleepNow()
+
+ def on_fail(self, test_name, begin_time):
+ self.dut.take_bug_report(test_name, begin_time)
+ self.dut.cat_adb_log(test_name, begin_time)
+
+ def teardown_class(self):
+ if "AccessPoint" in self.user_params:
+ del self.user_params["reference_networks"]
+ del self.user_params["open_network"]
+
+ """Helper Functions"""
+
+ def add_network_and_enable(self, network):
+ """Add a network and enable it.
+
+ Args:
+ network : Network details for the network to be added.
+
+ """
+ ret = self.dut.droid.wifiAddNetwork(network)
+ asserts.assert_true(ret != -1, "Add network %r failed" % network)
+ self.wifi_config_list.append({
+ WifiEnums.SSID_KEY: network[WifiEnums.SSID_KEY],
+ WifiEnums.NETID_KEY: ret})
+ self.dut.droid.wifiEnableNetwork(ret, 0)
+
+ def add_and_enable_dummy_networks(self, num_networks=5):
+ """Add some dummy networks to the device and enable them.
+
+ Args:
+ num_networks: Number of networks to add.
+ """
+ ssid_name_base = "dummy_network_"
+ for i in range(0, num_networks):
+ network = {}
+ network[WifiEnums.SSID_KEY] = ssid_name_base + str(i)
+ network[WifiEnums.PWD_KEY] = "dummynet_password"
+ self.add_network_and_enable(network)
+
+ def check_networks_after_autoupdate(self, networks):
+ """Verify that all previously configured networks are presistent after
+ reboot.
+
+ Args:
+ networks: List of network dicts.
+
+ Return:
+ None. Raises TestFailure.
+
+ """
+ network_info = self.dut.droid.wifiGetConfiguredNetworks()
+ if len(network_info) != len(networks):
+ msg = (
+ "Number of configured networks before and after Auto-update "
+ "don't match. \nBefore reboot = %s \n After reboot = %s" %
+ (networks, network_info))
+ raise signals.TestFailure(msg)
+ current_count = 0
+ # For each network, check if it exists in configured list after Auto-
+ # update.
+ for network in networks:
+ exists = wutils.match_networks({
+ WifiEnums.SSID_KEY: network[WifiEnums.SSID_KEY]
+ }, network_info)
+ if not len(exists):
+ raise signals.TestFailure("%s network is not present in the"
+ " configured list after Auto-update" %
+ network[WifiEnums.SSID_KEY])
+ # Get the new network id for each network after reboot.
+ network[WifiEnums.NETID_KEY] = exists[0]['networkId']
+
+ """Tests"""
+
+ @test_tracker_info(uuid="9ff1f01e-e5ff-408b-9a95-29e87a2df2d8")
+ def test_check_wifi_state_after_au(self):
+ """Check if the state of WiFi is enabled after Auto-update."""
+ if not self.dut.droid.wifiCheckState():
+ raise signals.TestFailure("WiFi is disabled after Auto-update!!!")
+
+ @test_tracker_info(uuid="e3ebdbba-71dd-4281-aef8-5b3d42b88770")
+ def test_verify_networks_after_au(self):
+ """Check if the previously added networks are intact.
+
+ Steps:
+ Number of networs should be the same and match each network.
+
+ """
+ self.check_networks_after_autoupdate(self.wifi_config_list)
+
+ @test_tracker_info(uuid="b8e47a4f-62fe-4a0e-b999-27ae1ebf4d19")
+ def test_connection_to_new_networks(self):
+ """Check if we can connect to new networks after Auto-update.
+
+ Steps:
+ 1. Connect to a PSK network.
+ 2. Connect to an open network.
+ 3. Forget ntworks added in 1 & 2.
+ TODO: (@bmahadev) Add WEP network once it's ready.
+
+ """
+ wutils.connect_to_wifi_network(self.dut, self.open_network[0]['5g'])
+ wutils.connect_to_wifi_network(self.dut, self.reference_networks[0]['2g'])
+ wutils.wifi_forget_network(self.dut,
+ self.reference_networks[0]['2g'][WifiEnums.SSID_KEY])
+ wutils.wifi_forget_network(self.dut,
+ self.open_network[0]['5g'][WifiEnums.SSID_KEY])
+
+ @test_tracker_info(uuid="1d8309e4-d5a2-4f48-ba3b-895a58c9bf3a")
+ def test_all_networks_connectable_after_au(self):
+ """Check if previously added networks are connectable.
+
+ Steps:
+ 1. Connect to previously added PSK network using network id.
+ 2. Connect to previously added open network using network id.
+ TODO: (@bmahadev) Add WEP network once it's ready.
+
+ """
+ for network in self.wifi_config_list:
+ if 'dummy' not in network[WifiEnums.SSID_KEY]:
+ if not wutils.connect_to_wifi_network_with_id(self.dut,
+ network[WifiEnums.NETID_KEY],
+ network[WifiEnums.SSID_KEY]):
+ raise signals.TestFailure("Failed to connect to %s after \
+ Auto-update" % network[WifiEnums.SSID_KEY])
+
+ @test_tracker_info(uuid="05671859-38b1-4dbf-930c-18048971d075")
+ def test_check_wifi_toggling_after_au(self):
+ """Check if WiFi can be toggled ON/OFF after auto-update."""
+ self.log.debug("Going from on to off.")
+ wutils.wifi_toggle_state(self.dut, False)
+ self.log.debug("Going from off to on.")
+ wutils.wifi_toggle_state(self.dut, True)
+
+ @test_tracker_info(uuid="440edf32-4b00-42b0-9811-9f2bc4a83efb")
+ def test_reset_wifi_after_au(self):
+ """"Check if WiFi can be reset after auto-update."""
+ wutils.reset_wifi(self.dut)
diff --git a/acts/tests/google/wifi/WifiChaosTest.py b/acts/tests/google/wifi/WifiChaosTest.py
new file mode 100755
index 0000000..9936f12
--- /dev/null
+++ b/acts/tests/google/wifi/WifiChaosTest.py
@@ -0,0 +1,239 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import time
+
+import acts.controllers.packet_capture as packet_capture
+import acts.signals as signals
+import acts.test_utils.wifi.rpm_controller_utils as rutils
+import acts.test_utils.wifi.wifi_datastore_utils as dutils
+import acts.test_utils.wifi.wifi_test_utils as wutils
+
+from acts import asserts
+from acts.base_test import BaseTestClass
+from acts.controllers.ap_lib import hostapd_constants
+from acts.test_decorators import test_tracker_info
+from acts.test_utils.wifi.WifiBaseTest import WifiBaseTest
+
+WifiEnums = wutils.WifiEnums
+
+WAIT_BEFORE_CONNECTION = 1
+SINGLE_BAND = 1
+DUAL_BAND = 2
+
+TIMEOUT = 1
+PING_ADDR = 'www.google.com'
+
+
+class WifiChaosTest(WifiBaseTest):
+ """ Tests for wifi IOT
+
+ Test Bed Requirement:
+ * One Android device
+ * Wi-Fi IOT networks visible to the device
+ """
+
+ def __init__(self, configs):
+ BaseTestClass.__init__(self, configs)
+ self.generate_interop_tests()
+
+ def generate_interop_testcase(self, base_test, testcase_name, ssid_dict):
+ """Generates a single test case from the given data.
+
+ Args:
+ base_test: The base test case function to run.
+ testcase_name: The name of the test case.
+ ssid_dict: The information about the network under test.
+ """
+ ssid = testcase_name
+ test_tracker_uuid = ssid_dict[testcase_name]['uuid']
+ hostname = ssid_dict[testcase_name]['host']
+ if not testcase_name.startswith('test_'):
+ testcase_name = 'test_%s' % testcase_name
+ test_case = test_tracker_info(uuid=test_tracker_uuid)(
+ lambda: base_test(ssid, hostname))
+ setattr(self, testcase_name, test_case)
+ self.tests.append(testcase_name)
+
+ def generate_interop_tests(self):
+ for ssid_dict in self.user_params['interop_ssid']:
+ testcase_name = list(ssid_dict)[0]
+ self.generate_interop_testcase(self.interop_base_test,
+ testcase_name, ssid_dict)
+
+ def setup_class(self):
+ self.dut = self.android_devices[0]
+ wutils.wifi_test_device_init(self.dut)
+
+ asserts.assert_true(
+ self.lock_pcap(),
+ "Could not lock a Packet Capture. Aborting Interop test.")
+
+ wutils.wifi_toggle_state(self.dut, True)
+
+ def lock_pcap(self):
+ """Lock a Packet Capturere to use for the test."""
+
+ # Get list of devices from the datastore.
+ locked_pcap = False
+ devices = dutils.get_devices()
+
+ for device in devices:
+
+ device_name = device['hostname']
+ device_type = device['ap_label']
+ if device_type == 'PCAP'and dutils.lock_device(device_name):
+ host = device['ip_address']
+ self.log.info("Locked Packet Capture device: %s" % device_name)
+ locked_pcap = True
+ break
+
+ elif device_type == 'PCAP':
+ self.log.warning("Failed to lock %s PCAP.")
+
+ if not locked_pcap:
+ return False
+
+ pcap_config = {'ssh_config':{'user':'root'} }
+ pcap_config['ssh_config']['host'] = host
+
+ self.pcap = packet_capture.PacketCapture(pcap_config)
+ return True
+
+ def setup_test(self):
+ self.dut.droid.wakeLockAcquireBright()
+ self.dut.droid.wakeUpNow()
+
+ def teardown_test(self):
+ self.dut.droid.wakeLockRelease()
+ self.dut.droid.goToSleepNow()
+ wutils.reset_wifi(self.dut)
+
+
+ """Helper Functions"""
+
+ def scan_and_connect_by_id(self, network, net_id):
+ """Scan for network and connect using network id.
+
+ Args:
+ net_id: Integer specifying the network id of the network.
+
+ """
+ ssid = network[WifiEnums.SSID_KEY]
+ wutils.start_wifi_connection_scan_and_ensure_network_found(self.dut,
+ ssid)
+ wutils.wifi_connect_by_id(self.dut, net_id)
+
+ def run_ping(self, sec):
+ """Run ping for given number of seconds.
+
+ Args:
+ sec: Time in seconds to run teh ping traffic.
+
+ """
+ self.log.info("Running ping for %d seconds" % sec)
+ result = self.dut.adb.shell("ping -w %d %s" % (sec, PING_ADDR),
+ timeout=sec + 1)
+ self.log.debug("Ping Result = %s" % result)
+ if "100% packet loss" in result:
+ raise signals.TestFailure("100% packet loss during ping")
+
+ def run_connect_disconnect(self, network):
+ """Run connect/disconnect to a given network in loop.
+
+ Args:
+ network: dict, network information.
+
+ Raises: TestFailure if the network connection fails.
+
+ """
+ for attempt in range(1):
+ # TODO:(bmahadev) Change it to 5 or more attempts later.
+ try:
+ begin_time = time.time()
+ ssid = network[WifiEnums.SSID_KEY]
+ net_id = self.dut.droid.wifiAddNetwork(network)
+ asserts.assert_true(net_id != -1, "Add network %s failed" % network)
+ self.log.info("Connecting to %s" % ssid)
+ self.scan_and_connect_by_id(network, net_id)
+ self.run_ping(1)
+ wutils.wifi_forget_network(self.dut, ssid)
+ time.sleep(WAIT_BEFORE_CONNECTION)
+ except:
+ self.log.error("Connection to %s network failed on the %d "
+ "attempt." % (ssid, attempt))
+ # TODO:(bmahadev) Uncomment after scan issue is fixed.
+ # self.dut.take_bug_report(ssid, begin_time)
+ # self.dut.cat_adb_log(ssid, begin_time)
+ raise signals.TestFailure("Failed to connect to %s" % ssid)
+
+ def interop_base_test(self, ssid, hostname):
+ """Base test for all the connect-disconnect interop tests.
+
+ Args:
+ ssid: string, SSID of the network to connect to.
+ hostname: string, hostname of the AP.
+
+ Steps:
+ 1. Lock AP in datstore.
+ 2. Turn on AP on the rpm switch.
+ 3. Run connect-disconnect in loop.
+ 4. Turn off AP on the rpm switch.
+ 5. Unlock AP in datastore.
+
+ """
+ network = {}
+ network['password'] = 'password'
+ network['SSID'] = ssid
+ wutils.reset_wifi(self.dut)
+
+ # Lock AP in datastore.
+ self.log.info("Lock AP in datastore")
+ if not dutils.lock_device(hostname):
+ self.log.warning("Failed to lock %s AP. Unlock AP in datastore"
+ " and try again.")
+ raise signals.TestFailure("Failed to lock AP")
+
+ ap_info = dutils.show_device(hostname)
+
+ band = SINGLE_BAND
+ if ('ssid_2g' in ap_info) and ('ssid_5g' in ap_info):
+ band = DUAL_BAND
+
+ # Get AP RPM attributes and Turn ON AP.
+ rpm_ip = ap_info['rpm_ip']
+ rpm_port = ap_info['rpm_port']
+
+ rutils.turn_on_ap(self.pcap, ssid, rpm_port, rpm_ip=rpm_ip)
+ self.log.info("Finished turning ON AP.")
+ # Experimental to check if 2G connects better.
+ time.sleep(30)
+
+ self.run_connect_disconnect(network)
+
+ # Un-lock only if it's a single band AP or we are running the last band.
+ if (band == SINGLE_BAND) or (
+ band == DUAL_BAND and hostapd_constants.BAND_5G in \
+ sys._getframe().f_code.co_name):
+
+ # Un-Lock AP in datastore.
+ self.log.debug("Un-lock AP in datastore")
+ if not dutils.unlock_device(hostname):
+ self.log.warning("Failed to unlock %s AP. Check AP in datastore.")
+
+ # Turn OFF AP from the RPM port.
+ rutils.turn_off_ap(rpm_port, rpm_ip)
diff --git a/acts/tests/google/wifi/WifiDiagnosticsTest.py b/acts/tests/google/wifi/WifiDiagnosticsTest.py
new file mode 100644
index 0000000..79fb082
--- /dev/null
+++ b/acts/tests/google/wifi/WifiDiagnosticsTest.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import itertools
+import pprint
+import queue
+import time
+
+import acts.base_test
+import acts.signals as signals
+import acts.test_utils.wifi.wifi_test_utils as wutils
+import acts.utils
+
+from acts import asserts
+from acts.test_decorators import test_tracker_info
+from acts.test_utils.wifi.WifiBaseTest import WifiBaseTest
+
+WifiEnums = wutils.WifiEnums
+
+DEFAULT_WAIT_TIME = 2
+
+
+class WifiDiagnosticsTest(WifiBaseTest):
+ """
+ Test Bed Requirement:
+ * One Android device
+ * An open Wi-Fi network.
+ * Verbose logging is on.
+ """
+
+ def __init__(self, controllers):
+ WifiBaseTest.__init__(self, controllers)
+
+ def setup_class(self):
+ self.dut = self.android_devices[0]
+ wutils.wifi_test_device_init(self.dut)
+ req_params = []
+ opt_param = ["open_network"]
+ self.unpack_userparams(
+ req_param_names=req_params, opt_param_names=opt_param)
+
+ if "AccessPoint" in self.user_params:
+ self.legacy_configure_ap_and_start()
+ wutils.wifi_toggle_state(self.dut, True)
+ asserts.assert_true(
+ len(self.open_network) > 0,
+ "Need at least one open network.")
+ self.open_network = self.open_network[0]["2g"]
+
+ def setup_test(self):
+ self.dut.droid.wakeLockAcquireBright()
+ self.dut.droid.wakeUpNow()
+
+ def teardown_test(self):
+ self.dut.droid.wakeLockRelease()
+ self.dut.droid.goToSleepNow()
+ wutils.reset_wifi(self.dut)
+
+
+ def on_fail(self, test_name, begin_time):
+ self.dut.take_bug_report(test_name, begin_time)
+ self.dut.cat_adb_log(test_name, begin_time)
+
+ def teardown_class(self):
+ if "AccessPoint" in self.user_params:
+ del self.user_params["open_network"]
+
+ """Tests"""
+
+ @test_tracker_info(uuid="d6f1661b-6732-4939-8c28-f20917774ec0")
+ def test_ringbuffers_are_dumped_during_lsdebug(self):
+ """Steps:
+ 1. Connect to a open network.
+ 2. Delete old files under data/vendor/tombstones/wifi
+ 3. Call lshal debug on wifi hal component
+ 4. Verify that files are created under data/vender/tombstones/wifi
+ """
+ wutils.connect_to_wifi_network(self.dut, self.open_network)
+ time.sleep(DEFAULT_WAIT_TIME)
+ self.dut.adb.shell("rm data/vendor/tombstones/wifi/*")
+ try:
+ self.dut.adb.shell("lshal debug android.hardware.wifi@1.2::IWifi")
+ except UnicodeDecodeError:
+ """ Gets this error because adb.shell trys to parse the output to a string
+ but ringbuffer dumps should already be generated """
+ self.log.info("Unicode decode error occurred, but this is ok")
+ file_count_plus_one = self.dut.adb.shell("ls -l data/vendor/tombstones/wifi | wc -l")
+ if int(file_count_plus_one) <= 1:
+ raise signals.TestFailure("Failed to create ringbuffer debug files.")
\ No newline at end of file
diff --git a/acts/tests/google/wifi/WifiIOTTest.py b/acts/tests/google/wifi/WifiIOTTest.py
index 6e68d70..ec0a314 100755
--- a/acts/tests/google/wifi/WifiIOTTest.py
+++ b/acts/tests/google/wifi/WifiIOTTest.py
@@ -37,7 +37,6 @@
"""
def __init__(self, controllers):
- self.attenuators = None
WifiBaseTest.__init__(self, controllers)
def setup_class(self):
diff --git a/acts/tests/google/wifi/WifiManagerTest.py b/acts/tests/google/wifi/WifiManagerTest.py
index 41f65c4..9fc7961 100755
--- a/acts/tests/google/wifi/WifiManagerTest.py
+++ b/acts/tests/google/wifi/WifiManagerTest.py
@@ -53,13 +53,14 @@
wutils.wifi_test_device_init(self.dut)
req_params = []
opt_param = [
- "open_network", "reference_networks", "iperf_server_address"
+ "open_network", "reference_networks", "iperf_server_address",
+ "wpa_networks", "wep_networks"
]
self.unpack_userparams(
req_param_names=req_params, opt_param_names=opt_param)
if "AccessPoint" in self.user_params:
- self.legacy_configure_ap_and_start()
+ self.legacy_configure_ap_and_start(wpa_network=True, wep_network=True)
asserts.assert_true(
len(self.reference_networks) > 0,
@@ -69,7 +70,8 @@
self.iperf_server = self.iperf_servers[0]
self.wpapsk_2g = self.reference_networks[0]["2g"]
self.wpapsk_5g = self.reference_networks[0]["5g"]
- self.open_network = self.open_network[0]["2g"]
+ self.open_network_2g = self.open_network[0]["2g"]
+ self.open_network_5g = self.open_network[0]["5g"]
if hasattr(self, 'iperf_server'):
self.iperf_server.start()
@@ -494,12 +496,15 @@
"""Tests"""
@test_tracker_info(uuid="525fc5e3-afba-4bfd-9a02-5834119e3c66")
- def test_toggle_state(self):
+ def test_toggle_wifi_state_and_get_startupTime(self):
"""Test toggling wifi"""
self.log.debug("Going from on to off.")
wutils.wifi_toggle_state(self.dut, False)
self.log.debug("Going from off to on.")
+ startTime = time.time()
wutils.wifi_toggle_state(self.dut, True)
+ startup_time = time.time() - startTime
+ self.log.debug("WiFi was enabled on the device in %s s." % startup_time)
@test_tracker_info(uuid="e9d11563-2bbe-4c96-87eb-ec919b51435b")
def test_toggle_with_screen(self):
@@ -523,9 +528,12 @@
@test_tracker_info(uuid="71556e06-7fb1-4e2b-9338-b01f1f8e286e")
def test_scan(self):
"""Test wifi connection scan can start and find expected networks."""
- ssid = self.open_network[WifiEnums.SSID_KEY]
+ ssid = self.open_network_2g[WifiEnums.SSID_KEY]
wutils.start_wifi_connection_scan_and_ensure_network_found(
- self.dut, ssid);
+ self.dut, ssid)
+ ssid = self.open_network_5g[WifiEnums.SSID_KEY]
+ wutils.start_wifi_connection_scan_and_ensure_network_found(
+ self.dut, ssid)
@test_tracker_info(uuid="3ea09efb-6921-429e-afb1-705ef5a09afa")
def test_scan_with_wifi_off_and_location_scan_on(self):
@@ -534,9 +542,12 @@
wutils.wifi_toggle_state(self.dut, False)
"""Test wifi connection scan can start and find expected networks."""
- ssid = self.open_network[WifiEnums.SSID_KEY]
+ ssid = self.open_network_2g[WifiEnums.SSID_KEY]
wutils.start_wifi_connection_scan_and_ensure_network_found(
- self.dut, ssid);
+ self.dut, ssid)
+ ssid = self.open_network_5g[WifiEnums.SSID_KEY]
+ wutils.start_wifi_connection_scan_and_ensure_network_found(
+ self.dut, ssid)
@test_tracker_info(uuid="770caebe-bcb1-43ac-95b6-5dd52dd90e80")
def test_scan_with_wifi_off_and_location_scan_off(self):
@@ -556,8 +567,8 @@
@test_tracker_info(uuid="a4ad9930-a8fa-4868-81ed-a79c7483e502")
def test_add_network(self):
"""Test wifi connection scan."""
- ssid = self.open_network[WifiEnums.SSID_KEY]
- nId = self.dut.droid.wifiAddNetwork(self.open_network)
+ ssid = self.open_network_2g[WifiEnums.SSID_KEY]
+ nId = self.dut.droid.wifiAddNetwork(self.open_network_2g)
asserts.assert_true(nId > -1, "Failed to add network.")
configured_networks = self.dut.droid.wifiGetConfiguredNetworks()
self.log.debug(
@@ -568,8 +579,8 @@
@test_tracker_info(uuid="aca85551-10ba-4007-90d9-08bcdeb16a60")
def test_forget_network(self):
- ssid = self.open_network[WifiEnums.SSID_KEY]
- nId = self.dut.droid.wifiAddNetwork(self.open_network)
+ ssid = self.open_network_2g[WifiEnums.SSID_KEY]
+ nId = self.dut.droid.wifiAddNetwork(self.open_network_2g)
asserts.assert_true(nId > -1, "Failed to add network.")
configured_networks = self.dut.droid.wifiGetConfiguredNetworks()
self.log.debug(
@@ -834,5 +845,45 @@
Connect to a wifi network, then the same as test_energy_info.
"""
- wutils.wifi_connect(self.dut, self.open_network)
+ wutils.wifi_connect(self.dut, self.open_network_2g)
self.get_energy_info()
+
+ @test_tracker_info(uuid="2622c253-defc-4a35-93a6-ca9d29a8238c")
+ def test_connect_to_wep_2g(self):
+ """Verify DUT can connect to 2GHz WEP network
+
+ Steps:
+ 1. Ensure the 2GHz WEP network is visible in scan result.
+ 2. Connect to the network and validate internet connection.
+ """
+ wutils.connect_to_wifi_network(self.dut, self.wep_networks[0]["2g"])
+
+ @test_tracker_info(uuid="1f2d17a2-e92d-43af-966b-3421c0db8620")
+ def test_connect_to_wep_5g(self):
+ """Verify DUT can connect to 5GHz WEP network
+
+ Steps:
+ 1. Ensure the 5GHz WEP network is visible in scan result.
+ 2. Connect to the network and validate internet connection.
+ """
+ wutils.connect_to_wifi_network(self.dut, self.wep_networks[0]["5g"])
+
+ @test_tracker_info(uuid="4a957952-289d-4657-9882-e1475274a7ff")
+ def test_connect_to_wpa_2g(self):
+ """Verify DUT can connect to 2GHz WPA-PSK network
+
+ Steps:
+ 1. Ensure the 2GHz WPA-PSK network is visible in scan result.
+ 2. Connect to the network and validate internet connection.
+ """
+ wutils.connect_to_wifi_network(self.dut, self.wpa_networks[0]["2g"])
+
+ @test_tracker_info(uuid="612c3c31-a4c5-4014-9a2d-3f4bcc20c0d7")
+ def test_connect_to_wpa_5g(self):
+ """Verify DUT can connect to 5GHz WPA-PSK network
+
+ Steps:
+ 1. Ensure the 5GHz WPA-PSK network is visible in scan result.
+ 2. Connect to the network and validate internet connection.
+ """
+ wutils.connect_to_wifi_network(self.dut, self.wpa_networks[0]["5g"])
diff --git a/acts/tests/google/wifi/WifiNetworkSelectorTest.py b/acts/tests/google/wifi/WifiNetworkSelectorTest.py
index 948f961..ffeb6b5 100644
--- a/acts/tests/google/wifi/WifiNetworkSelectorTest.py
+++ b/acts/tests/google/wifi/WifiNetworkSelectorTest.py
@@ -56,9 +56,12 @@
self.unpack_userparams(
req_param_names=req_params, opt_param_names=opt_param)
- if "AccessPoint" in self.user_params:
+ if hasattr(self, 'access_points'):
self.legacy_configure_ap_and_start(ap_count=2)
+ if hasattr(self, 'packet_capture'):
+ self.configure_packet_capture()
+
def setup_test(self):
#reset and clear all saved networks on the DUT
wutils.reset_wifi(self.dut)
@@ -70,12 +73,22 @@
self.dut.droid.wakeUpNow()
self.dut.ed.clear_all_events()
+ if hasattr(self, 'packet_capture'):
+ self.pcap_pids = wutils.start_pcap(
+ self.packet_capture, 'dual', self.log_path, self.test_name)
+
def teardown_test(self):
#turn off the screen
self.dut.droid.wakeLockRelease()
self.dut.droid.goToSleepNow()
+ def on_pass(self, test_name, begin_time):
+ if hasattr(self, 'packet_capture'):
+ wutils.stop_pcap(self.packet_capture, self.pcap_pids, True)
+
def on_fail(self, test_name, begin_time):
+ if hasattr(self, 'packet_capture'):
+ wutils.stop_pcap(self.packet_capture, self.pcap_pids, False)
self.dut.take_bug_report(test_name, begin_time)
self.dut.cat_adb_log(test_name, begin_time)
diff --git a/acts/tests/google/wifi/WifiPingTest.py b/acts/tests/google/wifi/WifiPingTest.py
new file mode 100644
index 0000000..808730f
--- /dev/null
+++ b/acts/tests/google/wifi/WifiPingTest.py
@@ -0,0 +1,494 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2017 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+import statistics
+import time
+from acts import asserts
+from acts import base_test
+from acts import utils
+from acts.test_utils.wifi import wifi_power_test_utils as wputils
+from acts.test_utils.wifi import wifi_retail_ap as retail_ap
+from acts.test_utils.wifi import wifi_test_utils as wutils
+
+
+class WifiPingTest(base_test.BaseTestClass):
+ """Class for ping-based Wifi performance tests.
+
+ This class implements WiFi ping performance tests such as range and RTT.
+ The class setups up the AP in the desired configurations, configures
+ and connects the phone to the AP, and runs For an example config file to
+ run this test class see example_connectivity_performance_ap_sta.json.
+ """
+
+ TEST_TIMEOUT = 10
+ SHORT_SLEEP = 1
+ MED_SLEEP = 5
+
+ def __init__(self, controllers):
+ base_test.BaseTestClass.__init__(self, controllers)
+ self.tests = (
+ "test_ping_range_ch1_VHT20", "test_fast_ping_rtt_ch1_VHT20",
+ "test_slow_ping_rtt_ch1_VHT20", "test_ping_range_ch6_VHT20",
+ "test_fast_ping_rtt_ch6_VHT20", "test_slow_ping_rtt_ch6_VHT20",
+ "test_ping_range_ch11_VHT20", "test_fast_ping_rtt_ch11_VHT20",
+ "test_slow_ping_rtt_ch11_VHT20", "test_ping_range_ch36_VHT20",
+ "test_fast_ping_rtt_ch36_VHT20", "test_slow_ping_rtt_ch36_VHT20",
+ "test_ping_range_ch36_VHT40", "test_fast_ping_rtt_ch36_VHT40",
+ "test_slow_ping_rtt_ch36_VHT40", "test_ping_range_ch36_VHT80",
+ "test_fast_ping_rtt_ch36_VHT80", "test_slow_ping_rtt_ch36_VHT80",
+ "test_ping_range_ch40_VHT20", "test_ping_range_ch44_VHT20",
+ "test_ping_range_ch44_VHT40", "test_ping_range_ch48_VHT20",
+ "test_ping_range_ch149_VHT20", "test_fast_ping_rtt_ch149_VHT20",
+ "test_slow_ping_rtt_ch149_VHT20", "test_ping_range_ch149_VHT40",
+ "test_fast_ping_rtt_ch149_VHT40", "test_slow_ping_rtt_ch149_VHT40",
+ "test_ping_range_ch149_VHT80", "test_fast_ping_rtt_ch149_VHT80",
+ "test_slow_ping_rtt_ch149_VHT80", "test_ping_range_ch153_VHT20",
+ "test_ping_range_ch157_VHT20", "test_ping_range_ch157_VHT40",
+ "test_ping_range_ch161_VHT20")
+
+ def setup_class(self):
+ self.client_dut = self.android_devices[-1]
+ req_params = ["ping_test_params", "testbed_params"]
+ opt_params = [
+ "main_network", "RetailAccessPoints", "golden_files_list"
+ ]
+ self.unpack_userparams(req_params, opt_params)
+ self.test_params = self.ping_test_params
+ self.num_atten = self.attenuators[0].instrument.num_atten
+ # iperf server doubles as ping server to reduce config parameters
+ self.iperf_server = self.iperf_servers[0]
+ if hasattr(self, "RetailAccessPoints"):
+ self.access_points = retail_ap.create(self.RetailAccessPoints)
+ self.access_point = self.access_points[0]
+ self.log.info("Access Point Configuration: {}".format(
+ self.access_point.ap_settings))
+ self.log_path = os.path.join(logging.log_path, "results")
+ utils.create_dir(self.log_path)
+ if not hasattr(self, "golden_files_list"):
+ self.golden_files_list = [
+ os.path.join(self.testbed_params["golden_results_path"],
+ file) for file in os.listdir(
+ self.testbed_params["golden_results_path"])
+ ]
+ self.testclass_results = []
+
+ # Turn WiFi ON
+ for dev in self.android_devices:
+ wutils.wifi_toggle_state(dev, True)
+
+ def pass_fail_check_ping_rtt(self, ping_range_result):
+ """Check the test result and decide if it passed or failed.
+
+ The function computes RTT statistics and fails any tests in which the
+ tail of the ping latency results exceeds the threshold defined in the
+ configuration file.
+
+ Args:
+ ping_range_result: dict containing ping results and other meta data
+ """
+ ignored_fraction = self.test_params[
+ "rtt_ignored_interval"] / self.test_params["rtt_ping_duration"]
+ sorted_rtt = [
+ sorted(x["rtt"][round(ignored_fraction * len(x["rtt"])):])
+ for x in ping_range_result["ping_results"]
+ ]
+ mean_rtt = [statistics.mean(x) for x in sorted_rtt]
+ std_rtt = [statistics.stdev(x) for x in sorted_rtt]
+ rtt_at_test_percentile = [
+ x[int(
+ len(x) *
+ ((100 - self.test_params["rtt_test_percentile"]) / 100))]
+ for x in sorted_rtt
+ ]
+
+ test_failed = False
+ for idx, rtt in enumerate(rtt_at_test_percentile):
+ if rtt > self.test_params["rtt_threshold"] * 1000:
+ test_failed = True
+ self.log.info(
+ "RTT Failed. Test %ile RTT = {}ms. Mean = {}ms. Stdev = {}".
+ format(rtt, mean_rtt[idx], std_rtt[idx]))
+ if test_failed:
+ asserts.fail("RTT above threshold")
+ else:
+ asserts.explicit_pass(
+ "Test Passed. RTTs at test percentile = {}".format(
+ rtt_at_test_percentile))
+
+ def pass_fail_check_ping_range(self, ping_range_result):
+ """Check the test result and decide if it passed or failed.
+
+ Checks whether the attenuation at which ping packet losses begin to
+ exceed the threshold matches the range derived from golden
+ rate-vs-range result files. The test fails is ping range is
+ range_gap_threshold worse than RvR range.
+
+ Args:
+ ping_range_result: dict containing ping results and meta data
+ """
+ try:
+ rvr_range = self.get_range_from_rvr()
+ except:
+ rvr_range = float("nan")
+
+ ping_loss_over_att = [
+ x["packet_loss_percentage"]
+ for x in ping_range_result["ping_results"]
+ ]
+ ping_loss_above_threshold = [
+ int(x < self.test_params["range_ping_loss_threshold"])
+ for x in ping_loss_over_att
+ ]
+ attenuation_at_range = self.atten_range[ping_loss_above_threshold.index(
+ 0) - 1] + ping_range_result["fixed_attenuation"]
+ if attenuation_at_range - rvr_range < -self.test_params["range_gap_threshold"]:
+ asserts.fail(
+ "Attenuation at range is {}dB. Golden range is {}dB".format(
+ attenuation_at_range, rvr_range))
+ else:
+ asserts.explicit_pass(
+ "Attenuation at range is {}dB. Golden range is {}dB".format(
+ attenuation_at_range, rvr_range))
+
+ def post_process_ping_results(self, ping_range_result):
+ """Saves and plots ping results.
+
+ Args:
+ ping_range_result: dict containing ping results and metadata
+ """
+ results_file_path = "{}/{}.json".format(self.log_path,
+ self.current_test_name)
+ with open(results_file_path, 'w') as results_file:
+ json.dump(ping_range_result, results_file, indent=4)
+
+ x_data = [
+ list(range(len(x["rtt"])))
+ for x in ping_range_result["ping_results"] if len(x["rtt"]) > 1
+ ]
+ rtt_data = [
+ x["rtt"] for x in ping_range_result["ping_results"]
+ if len(x["rtt"]) > 1
+ ]
+ #legend = ["Round Trip Time" for x in ping_range_result["ping_results"]]
+ legend = [
+ "RTT @ {}dB".format(att)
+ for att in ping_range_result["attenuation"]
+ ]
+
+ data_sets = [x_data, rtt_data]
+ fig_property = {
+ "title": self.current_test_name,
+ "x_label": 'Sample Index',
+ "y_label": 'Round Trip Time (ms)',
+ "linewidth": 3,
+ "markersize": 0
+ }
+ output_file_path = "{}/{}.html".format(self.log_path,
+ self.current_test_name)
+ wputils.bokeh_plot(
+ data_sets,
+ legend,
+ fig_property,
+ shaded_region=None,
+ output_file_path=output_file_path)
+
+ def get_range_from_rvr(self):
+ """Function gets range from RvR golden results
+
+ The function fetches the attenuation at which the RvR throughput goes
+ to zero.
+
+ Returns:
+ range: range derived from looking at rvr curves
+ """
+ # Fetch the golden RvR results
+ test_name = self.current_test_name
+ rvr_golden_file_name = "test_rvr_TCP_DL_" + "_".join(
+ test_name.split("_")[3:])
+ golden_path = [
+ file_name for file_name in self.golden_files_list
+ if rvr_golden_file_name in file_name
+ ]
+ with open(golden_path[0], 'r') as golden_file:
+ golden_results = json.load(golden_file)
+ # Get 0 Mbps attenuation and backoff by low_rssi_backoff_from_range
+ atten_idx = golden_results["throughput_receive"].index(0)
+ rvr_range = golden_results["attenuation"][atten_idx -
+ 1] + golden_results["fixed_attenuation"]
+ return rvr_range
+
+ def get_ping_stats(self, ping_from_dut, ping_duration, ping_interval,
+ ping_size):
+ """Run ping to or from the DUT.
+
+ The function computes either pings the DUT or pings a remote ip from
+ DUT.
+
+ Args:
+ ping_from_dut: boolean set to true if pinging from the DUT
+ ping_duration: timeout to set on the the ping process (in seconds)
+ ping_interval: time between pings (in seconds)
+ ping_size: size of ping packet payload
+ Returns:
+ ping_result: dict containing ping results and other meta data
+ """
+ ping_cmd = "ping -w {} -i {} -s {}".format(
+ ping_duration,
+ ping_interval,
+ ping_size,
+ )
+ if ping_from_dut:
+ ping_cmd = "{} {}".format(
+ ping_cmd, self.testbed_params["outgoing_ping_address"])
+ ping_output = self.client_dut.adb.shell(
+ ping_cmd,
+ timeout=ping_duration + self.TEST_TIMEOUT,
+ ignore_status=True)
+ else:
+ ping_cmd = "sudo {} {}".format(ping_cmd, self.dut_ip)
+ ping_output = self.iperf_server.ssh_session.run(
+ ping_cmd, ignore_status=True).stdout
+ ping_output = ping_output.splitlines()
+
+ if len(ping_output) == 1:
+ ping_result = {"connected": 0}
+ else:
+ packet_loss_line = [line for line in ping_output if "loss" in line]
+ packet_loss_percentage = int(
+ packet_loss_line[0].split("%")[0].split(" ")[-1])
+ if packet_loss_percentage == 100:
+ rtt = [float("nan")]
+ else:
+ rtt = [
+ line.split("time=")[1] for line in ping_output
+ if "time=" in line
+ ]
+ rtt = [float(line.split(" ")[0]) for line in rtt]
+ ping_result = {
+ "connected": 1,
+ "rtt": rtt,
+ "packet_loss_percentage": packet_loss_percentage
+ }
+ return ping_result
+
+ def ping_test(self, channel, mode, atten_levels, ping_duration,
+ ping_interval, ping_size):
+ """Main function to test ping.
+
+ The function sets up the AP in the correct channel and mode
+ configuration and calls get_ping_stats while sweeping attenuation
+
+ Args:
+ channel: Specifies AP's channel
+ mode: Specifies AP's bandwidth/mode (11g, VHT20, VHT40, VHT80)
+ atten_levels: array of attenuation levels to run ping test at
+ ping_duration: timeout to set on the the ping process (in seconds)
+ ping_interval: time between pings (in seconds)
+ ping_size: size of ping packet payload
+ Returns:
+ test_result: dict containing ping results and other meta data
+ """
+ band = self.access_point.band_lookup_by_channel(channel)
+ if "2G" in band:
+ frequency = wutils.WifiEnums.channel_2G_to_freq[channel]
+ else:
+ frequency = wutils.WifiEnums.channel_5G_to_freq[channel]
+ if frequency in wutils.WifiEnums.DFS_5G_FREQUENCIES:
+ self.access_point.set_region(self.testbed_params["DFS_region"])
+ else:
+ self.access_point.set_region(self.testbed_params["default_region"])
+ self.access_point.set_channel(band, channel)
+ self.access_point.set_bandwidth(band, mode)
+ self.log.info("Access Point Configuration: {}".format(
+ self.access_point.ap_settings))
+
+ # Set attenuator to 0 dB
+ [self.attenuators[i].set_atten(0) for i in range(self.num_atten)]
+
+ # Resest, configure, and connect DUT
+ wutils.reset_wifi(self.client_dut)
+ self.client_dut.droid.wifiSetCountryCode(
+ self.test_params["country_code"])
+ self.main_network[band]["channel"] = channel
+ wutils.wifi_connect(
+ self.client_dut, self.main_network[band], num_of_tries=5)
+ self.dut_ip = self.client_dut.droid.connectivityGetIPv4Addresses(
+ 'wlan0')[0]
+ time.sleep(self.MED_SLEEP)
+
+ test_result = {"ping_results": []}
+ test_result["test_name"] = self.current_test_name
+ test_result["ap_config"] = self.access_point.ap_settings.copy()
+ test_result["attenuation"] = atten_levels
+ test_result["fixed_attenuation"] = self.testbed_params[
+ "fixed_attenuation"][str(channel)]
+ for atten in atten_levels:
+ [
+ self.attenuators[i].set_atten(atten)
+ for i in range(self.num_atten)
+ ]
+ time.sleep(self.SHORT_SLEEP)
+ current_ping_stats = self.get_ping_stats(0, ping_duration,
+ ping_interval, ping_size)
+ if current_ping_stats["connected"]:
+ self.log.info(
+ "Attenuation = {0}dB Packet Loss Rate = {1}%. Avg Ping RTT = {2:.2f}ms".
+ format(atten, current_ping_stats["packet_loss_percentage"],
+ statistics.mean(current_ping_stats["rtt"])))
+ else:
+ self.log.info(
+ "Attenuation = {}dB. Disconnected.".format(atten))
+ test_result["ping_results"].append(current_ping_stats)
+ return test_result
+
+ def _test_ping_rtt(self):
+ """ Function that gets called for each RTT test case
+
+ The function gets called in each RTT test case. The function customizes
+ the RTT test based on the test name of the test that called it
+ """
+ test_params = self.current_test_name.split("_")
+ self.channel = int(test_params[4][2:])
+ self.mode = test_params[5]
+ self.atten_range = self.test_params["rtt_test_attenuation"]
+ ping_range_result = self.ping_test(
+ self.channel, self.mode, self.atten_range,
+ self.test_params["rtt_ping_duration"],
+ self.test_params["rtt_ping_interval"][test_params[1]],
+ self.test_params["ping_size"])
+ self.post_process_ping_results(ping_range_result)
+ self.pass_fail_check_ping_rtt(ping_range_result)
+
+ def _test_ping_range(self):
+ """ Function that gets called for each range test case
+
+ The function gets called in each range test case. It customizes the
+ range test based on the test name of the test that called it
+ """
+ test_params = self.current_test_name.split("_")
+ self.channel = int(test_params[3][2:])
+ self.mode = test_params[4]
+ num_atten_steps = int((self.test_params["range_atten_stop"] -
+ self.test_params["range_atten_start"]) /
+ self.test_params["range_atten_step"])
+ self.atten_range = [
+ self.test_params["range_atten_start"] +
+ x * self.test_params["range_atten_step"]
+ for x in range(0, num_atten_steps)
+ ]
+ ping_range_result = self.ping_test(
+ self.channel, self.mode, self.atten_range,
+ self.test_params["range_ping_duration"],
+ self.test_params["range_ping_interval"],
+ self.test_params["ping_size"])
+ self.post_process_ping_results(ping_range_result)
+ self.pass_fail_check_ping_range(ping_range_result)
+
+ def test_ping_range_ch1_VHT20(self):
+ self._test_ping_range()
+
+ def test_ping_range_ch6_VHT20(self):
+ self._test_ping_range()
+
+ def test_ping_range_ch11_VHT20(self):
+ self._test_ping_range()
+
+ def test_ping_range_ch36_VHT20(self):
+ self._test_ping_range()
+
+ def test_ping_range_ch36_VHT40(self):
+ self._test_ping_range()
+
+ def test_ping_range_ch36_VHT80(self):
+ self._test_ping_range()
+
+ def test_ping_range_ch40_VHT20(self):
+ self._test_ping_range()
+
+ def test_ping_range_ch44_VHT20(self):
+ self._test_ping_range()
+
+ def test_ping_range_ch44_VHT40(self):
+ self._test_ping_range()
+
+ def test_ping_range_ch48_VHT20(self):
+ self._test_ping_range()
+
+ def test_ping_range_ch149_VHT20(self):
+ self._test_ping_range()
+
+ def test_ping_range_ch149_VHT40(self):
+ self._test_ping_range()
+
+ def test_ping_range_ch149_VHT80(self):
+ self._test_ping_range()
+
+ def test_ping_range_ch153_VHT20(self):
+ self._test_ping_range()
+
+ def test_ping_range_ch157_VHT20(self):
+ self._test_ping_range()
+
+ def test_ping_range_ch157_VHT40(self):
+ self._test_ping_range()
+
+ def test_ping_range_ch161_VHT20(self):
+ self._test_ping_range()
+
+ def test_fast_ping_rtt_ch1_VHT20(self):
+ self._test_ping_rtt()
+
+ def test_slow_ping_rtt_ch1_VHT20(self):
+ self._test_ping_rtt()
+
+ def test_fast_ping_rtt_ch36_VHT20(self):
+ self._test_ping_rtt()
+
+ def test_slow_ping_rtt_ch36_VHT20(self):
+ self._test_ping_rtt()
+
+ def test_fast_ping_rtt_ch36_VHT40(self):
+ self._test_ping_rtt()
+
+ def test_slow_ping_rtt_ch36_VHT40(self):
+ self._test_ping_rtt()
+
+ def test_fast_ping_rtt_ch36_VHT80(self):
+ self._test_ping_rtt()
+
+ def test_slow_ping_rtt_ch36_VHT80(self):
+ self._test_ping_rtt()
+
+ def test_fast_ping_rtt_ch149_VHT20(self):
+ self._test_ping_rtt()
+
+ def test_slow_ping_rtt_ch149_VHT20(self):
+ self._test_ping_rtt()
+
+ def test_fast_ping_rtt_ch149_VHT40(self):
+ self._test_ping_rtt()
+
+ def test_slow_ping_rtt_ch149_VHT40(self):
+ self._test_ping_rtt()
+
+ def test_fast_ping_rtt_ch149_VHT80(self):
+ self._test_ping_rtt()
+
+ def test_slow_ping_rtt_ch149_VHT80(self):
+ self._test_ping_rtt()
\ No newline at end of file
diff --git a/acts/tests/google/wifi/WifiPnoTest.py b/acts/tests/google/wifi/WifiPnoTest.py
index b8f85c0..1282929 100644
--- a/acts/tests/google/wifi/WifiPnoTest.py
+++ b/acts/tests/google/wifi/WifiPnoTest.py
@@ -144,7 +144,7 @@
""" Tests Begin """
@test_tracker_info(uuid="33d3cae4-5fa7-4e90-b9e2-5d3747bba64c")
- def test_simple_pno_connection_2g_to_5g(self):
+ def test_simple_pno_connection_to_2g(self):
"""Test PNO triggered autoconnect to a network.
Steps:
@@ -152,16 +152,13 @@
2. Save 2 valid network configurations (a & b) in the device.
3. Attenuate 5Ghz network and wait for a few seconds to trigger PNO.
4. Check the device connected to 2Ghz network automatically.
- 5. Attenuate 2Ghz network and wait for a few seconds to trigger PNO.
- 6. Check the device connected to 5Ghz network automatically.
"""
self.add_network_and_enable(self.pno_network_a)
self.add_network_and_enable(self.pno_network_b)
self.trigger_pno_and_assert_connect("a_on_b_off", self.pno_network_a)
- self.trigger_pno_and_assert_connect("b_on_a_off", self.pno_network_b)
@test_tracker_info(uuid="39b945a1-830f-4f11-9e6a-9e9641066a96")
- def test_simple_pno_connection_5g_to_2g(self):
+ def test_simple_pno_connection_to_5g(self):
"""Test PNO triggered autoconnect to a network.
Steps:
@@ -169,15 +166,11 @@
2. Save 2 valid network configurations (a & b) in the device.
3. Attenuate 2Ghz network and wait for a few seconds to trigger PNO.
4. Check the device connected to 5Ghz network automatically.
- 5. Attenuate 5Ghz network and wait for a few seconds to trigger PNO.
- 6. Check the device connected to 2Ghz network automatically.
"""
self.add_network_and_enable(self.pno_network_a)
self.add_network_and_enable(self.pno_network_b)
self.trigger_pno_and_assert_connect("b_on_a_off", self.pno_network_b)
- self.trigger_pno_and_assert_connect("a_on_b_off", self.pno_network_a)
-
@test_tracker_info(uuid="844b15be-ff45-4b09-a11b-0b2b4bb13b22")
def test_pno_connection_with_multiple_saved_networks(self):
@@ -195,7 +188,8 @@
self.add_and_enable_dummy_networks(16)
self.add_network_and_enable(self.pno_network_a)
self.add_network_and_enable(self.pno_network_b)
+ # Force single scan so that both networks become preferred before PNO.
+ wutils.start_wifi_connection_scan(self.dut)
self.trigger_pno_and_assert_connect("a_on_b_off", self.pno_network_a)
- self.trigger_pno_and_assert_connect("b_on_a_off", self.pno_network_b)
""" Tests End """
diff --git a/acts/tests/google/wifi/WifiRssiTest.py b/acts/tests/google/wifi/WifiRssiTest.py
index 4247344..1ea75b5 100644
--- a/acts/tests/google/wifi/WifiRssiTest.py
+++ b/acts/tests/google/wifi/WifiRssiTest.py
@@ -40,6 +40,16 @@
class WifiRssiTest(base_test.BaseTestClass):
+ """Class to test WiFi RSSI reporting.
+
+ This class tests RSSI reporting on android devices. The class tests RSSI
+ accuracy by checking RSSI over a large attenuation range, checks for RSSI
+ stability over time when attenuation is fixed, and checks that RSSI quickly
+ and reacts to changes attenuation by checking RSSI trajectories over
+ configurable attenuation waveforms.For an example config file to run this
+ test class see example_connectivity_performance_ap_sta.json.
+ """
+
def __init__(self, controllers):
base_test.BaseTestClass.__init__(self, controllers)
@@ -572,6 +582,7 @@
wutils.wifi_toggle_state(self.dut, True)
wutils.reset_wifi(self.dut)
self.main_network[band]["channel"] = self.channel
+ self.dut.droid.wifiSetCountryCode(self.test_params["country_code"])
wutils.wifi_connect(self.dut, self.main_network[band], num_of_tries=5)
time.sleep(MED_SLEEP)
# Run RvR and log result
diff --git a/acts/tests/google/wifi/WifiRvrTest.py b/acts/tests/google/wifi/WifiRvrTest.py
index 9c45525..5da2e93 100644
--- a/acts/tests/google/wifi/WifiRvrTest.py
+++ b/acts/tests/google/wifi/WifiRvrTest.py
@@ -30,6 +30,15 @@
class WifiRvrTest(base_test.BaseTestClass):
+ """Class to test WiFi rate versus range.
+
+ This class implements WiFi rate versus range tests on single AP single STA
+ links. The class setups up the AP in the desired configurations, configures
+ and connects the phone to the AP, and runs iperf throughput test while
+ sweeping attenuation. For an example config file to run this test class see
+ example_connectivity_performance_ap_sta.json.
+ """
+
TEST_TIMEOUT = 10
SHORT_SLEEP = 1
MED_SLEEP = 5
@@ -357,8 +366,9 @@
self.access_point.ap_settings))
# Set attenuator to 0 dB
[self.attenuators[i].set_atten(0) for i in range(self.num_atten)]
- # Connect DUT to Network
+ # Resest, configure, and connect DUT
wutils.reset_wifi(self.client_dut)
+ self.client_dut.droid.wifiSetCountryCode(self.test_params["country_code"])
self.main_network[band]["channel"] = channel
wutils.wifi_connect(
self.client_dut, self.main_network[band], num_of_tries=5)
diff --git a/acts/tests/google/wifi/WifiScannerMultiScanTest.py b/acts/tests/google/wifi/WifiScannerMultiScanTest.py
index 0ff3574..1b33e57 100755
--- a/acts/tests/google/wifi/WifiScannerMultiScanTest.py
+++ b/acts/tests/google/wifi/WifiScannerMultiScanTest.py
@@ -149,15 +149,12 @@
'numUsage': 0,
'SSID': '"wh_ap1_2g"',
'timestamp': 4280078660,
- 'numConnection': 0,
'BSSID': '30:b5:c2:33:f9:05',
'frequency': 2412,
- 'numIpConfigFailures': 0,
'distanceSdCm': 0,
'distanceCm': 0,
'centerFreq1': 0,
'centerFreq0': 0,
- 'blackListTimestamp': 0,
'venueName': '',
'seen': 0,
'operatorFriendlyName': '',
diff --git a/acts/tests/google/wifi/WifiScannerScanTest.py b/acts/tests/google/wifi/WifiScannerScanTest.py
index 9eb6d38..b0d73de 100755
--- a/acts/tests/google/wifi/WifiScannerScanTest.py
+++ b/acts/tests/google/wifi/WifiScannerScanTest.py
@@ -75,12 +75,15 @@
"test_single_scan_while_pno",
"test_wifi_connection_and_pno_while_batch_scan",
"test_wifi_scanner_single_scan_in_isolated",
- "test_wifi_scanner_with_invalid_numBssidsPerScan")
+ "test_wifi_scanner_with_invalid_numBssidsPerScan",
+ "test_wifi_scanner_dual_radio_low_latency",
+ "test_wifi_scanner_dual_radio_low_power",
+ "test_wifi_scanner_dual_radio_high_accuracy")
def setup_class(self):
self.dut = self.android_devices[0]
wutils.wifi_test_device_init(self.dut)
- req_params = ("run_extended_test", "ping_addr", "max_bugreports")
+ req_params = ("run_extended_test", "ping_addr", "max_bugreports", "dbs_supported_models")
opt_param = ["reference_networks"]
self.unpack_userparams(
req_param_names=req_params, opt_param_names=opt_param)
@@ -108,6 +111,7 @@
self.attenuators = wutils.group_attenuators(self.attenuators)
self.attenuators[0].set_atten(0)
self.attenuators[1].set_atten(0)
+ self.dut.droid.wifiEnableWifiConnectivityManager(False)
def teardown_test(self):
base_test.BaseTestClass.teardown_test(self)
@@ -121,6 +125,7 @@
self.dut.cat_adb_log(test_name, begin_time)
def teardown_class(self):
+ self.dut.droid.wifiEnableWifiConnectivityManager(True)
if "AccessPoint" in self.user_params:
del self.user_params["reference_networks"]
del self.user_params["open_network"]
@@ -317,6 +322,8 @@
parameter.
3. Pop all full scan result events occurred earlier.
4. Verify that full scan results match with normal scan results.
+ 5. If the scan type is included in scan_setting, verify that the
+ radioChainInfos length.
Args:
scan_setting: The parameters for the single scan.
@@ -348,12 +355,27 @@
asserts.assert_true(
len(results) >= bssids,
"Full single shot result don't match {}".format(len(results)))
+ if 'type' in scan_setting.keys():
+ for item in results:
+ self.verify_radio_chain_length(scan_setting['type'], item)
except queue.Empty as error:
raise AssertionError(
"Event did not triggered for single shot {}".format(error))
finally:
self.dut.droid.wifiScannerStopScan(idx)
+ def verify_radio_chain_length(self, scan_setting_type, scan_result):
+ llen = len(scan_result[0]["radioChainInfos"])
+ if scan_setting_type == wutils.WifiEnums.SCAN_TYPE_LOW_LATENCY \
+ or scan_setting_type == wutils.WifiEnums.SCAN_TYPE_LOW_POWER:
+ asserts.assert_true(llen == 1,
+ "radioChainInfos len expected:{} "
+ "actual:{}".format(1, llen))
+ else:
+ asserts.assert_true(llen == 2,
+ "radioChainInfos len expected:{} "
+ "actual:{}".format(2, llen))
+
def wifi_scanner_batch_scan_full(self, scan_setting):
"""Common logic for batch scan test case for full scan result.
@@ -953,6 +975,63 @@
wutils.WifiEnums.REPORT_EVENT_AFTER_EACH_SCAN}
self.wifi_scanner_single_scan(scan_setting)
+ @test_tracker_info(uuid="7c8da0c4-dec7-4d04-abd4-f8ea467a5c6d")
+ def test_wifi_scanner_dual_radio_low_latency(self):
+ """Test WiFi scanner single scan for mix channel with default setting
+ parameters.
+
+ 1. Start WifiScanner single scan for type = SCAN_TYPE_LOW_LATENCY.
+ 2. Verify that scan results match with respective scan settings.
+ """
+ if self.dut.model not in self.dbs_supported_models:
+ asserts.skip(
+ ("Device %s does not support dual radio scanning.")
+ % self.dut.model)
+ scan_setting = {"channels": self.wifi_chs.MIX_CHANNEL_SCAN,
+ "periodInMs": SCANTIME,
+ "reportEvents":
+ wutils.WifiEnums.REPORT_EVENT_FULL_SCAN_RESULT,
+ "type": wutils.WifiEnums.SCAN_TYPE_LOW_LATENCY}
+ self.wifi_scanner_single_scan_full(scan_setting)
+
+ @test_tracker_info(uuid="58b49b01-851b-4e45-b218-9fd27c0be921")
+ def test_wifi_scanner_dual_radio_low_power(self):
+ """Test WiFi scanner single scan for mix channel with default setting
+ parameters.
+
+ 1. Start WifiScanner single scan for type = SCAN_TYPE_LOW_POWER.
+ 2. Verify that scan results match with respective scan settings.
+ """
+ if self.dut.model not in self.dbs_supported_models:
+ asserts.skip(
+ ("Device %s does not support dual radio scanning.")
+ % self.dut.model)
+ scan_setting = {"channels": self.wifi_chs.MIX_CHANNEL_SCAN,
+ "periodInMs": SCANTIME,
+ "reportEvents":
+ wutils.WifiEnums.REPORT_EVENT_FULL_SCAN_RESULT,
+ "type": wutils.WifiEnums.SCAN_TYPE_LOW_POWER}
+ self.wifi_scanner_single_scan_full(scan_setting)
+
+ @test_tracker_info(uuid="3e7288bc-45e4-497c-bf3a-977eec4e896e")
+ def test_wifi_scanner_dual_radio_high_accuracy(self):
+ """Test WiFi scanner single scan for mix channel with default setting
+ parameters.
+
+ 1. Start WifiScanner single scan for type = SCAN_TYPE_HIGH_ACCURACY.
+ 2. Verify that scan results match with respective scan settings.
+ """
+ if self.dut.model not in self.dbs_supported_models:
+ asserts.skip(
+ ("Device %s does not support dual radio scanning.")
+ % self.dut.model)
+ scan_setting = {"channels": self.wifi_chs.MIX_CHANNEL_SCAN,
+ "periodInMs": SCANTIME,
+ "reportEvents":
+ wutils.WifiEnums.REPORT_EVENT_FULL_SCAN_RESULT,
+ "type": wutils.WifiEnums.SCAN_TYPE_HIGH_ACCURACY}
+ self.wifi_scanner_single_scan_full(scan_setting)
+
@test_tracker_info(uuid="e9f3aaad-4af3-4c54-9829-65dc1d6d4987")
def test_wifi_scanner_batch_scan_channel_sanity(self):
"""Test WiFi scanner batch scan for mix channel with default setting
diff --git a/acts/tests/google/wifi/WifiSoftApTest.py b/acts/tests/google/wifi/WifiSoftApTest.py
index 987b14d..2a03eb1 100644
--- a/acts/tests/google/wifi/WifiSoftApTest.py
+++ b/acts/tests/google/wifi/WifiSoftApTest.py
@@ -16,11 +16,14 @@
import logging
import queue
+import random
import time
from acts import asserts
from acts import utils
from acts.test_decorators import test_tracker_info
+from acts.test_utils.net import arduino_test_utils as dutils
+from acts.test_utils.net import socket_test_utils as sutils
from acts.test_utils.tel import tel_defines
from acts.test_utils.tel import tel_test_utils as tel_utils
from acts.test_utils.tel.tel_test_utils import WIFI_CONFIG_APBAND_2G
@@ -55,7 +58,6 @@
utils.require_sl4a((self.dut, self.dut_client))
utils.sync_device_time(self.dut)
utils.sync_device_time(self.dut_client)
-
# Set country code explicitly to "US".
self.dut.droid.wifiSetCountryCode(wutils.WifiEnums.CountryCode.US)
self.dut_client.droid.wifiSetCountryCode(wutils.WifiEnums.CountryCode.US)
@@ -107,6 +109,31 @@
wutils.start_wifi_connection_scan_and_ensure_network_not_found(
self.dut_client, ap_ssid);
+ def validate_traffic_between_softap_clients(self, config):
+ """Send traffic between softAp clients.
+
+ Connect SoftAp clients to the wifi hotspot; one android
+ device and the other arduino wifi controller. Send UDP traffic
+ between the clients and verify that expected messages are received.
+
+ Args:
+ config: wifi network config with SSID, password
+ """
+ ad = self.dut_client
+ wd = self.arduino_wifi_dongles[0]
+ wutils.wifi_connect(ad, config, check_connectivity=False)
+ dutils.connect_wifi(wd, config)
+ local_ip = ad.droid.connectivityGetIPv4Addresses('wlan0')[0]
+ remote_ip = wd.ip_address()
+ port = random.randint(8000, 9000)
+ self.log.info("IP addr on android device: %s" % local_ip)
+ self.log.info("IP addr on arduino device: %s" % remote_ip)
+
+ socket = sutils.open_datagram_socket(ad, local_ip, port)
+ sutils.send_recv_data_datagram_sockets(
+ ad, ad, socket, socket, remote_ip, port)
+ sutils.close_datagram_socket(ad, socket)
+
def check_cell_data_and_enable(self):
"""Make sure that cell data is enabled if there is a sim present.
@@ -122,7 +149,8 @@
asserts.assert_true(self.dut.droid.telephonyIsDataEnabled(),
"Failed to enable cell data for softap dut.")
- def validate_full_tether_startup(self, band=None, hidden=None):
+ def validate_full_tether_startup(self, band=None, hidden=None,
+ test_clients=None):
"""Test full startup of wifi tethering
1. Report current state.
@@ -152,6 +180,8 @@
asserts.assert_true(ret != -1, "Add network %r failed" % config)
self.dut_client.droid.wifiEnableNetwork(ret, 0)
self.confirm_softap_in_scan_results(config[wutils.WifiEnums.SSID_KEY])
+ if test_clients:
+ self.validate_traffic_between_softap_clients(config)
wutils.stop_wifi_tethering(self.dut)
asserts.assert_false(self.dut.droid.wifiIsApEnabled(),
"SoftAp is still reported as running")
@@ -290,6 +320,24 @@
wutils.stop_wifi_tethering(self.dut)
wutils.wait_for_disconnect(self.dut_client)
+ @test_tracker_info(uuid="05c6f929-7754-477f-a9cd-f77e850b818b")
+ def test_full_tether_startup_2G_multiple_clients(self):
+ """Test full startup of wifi tethering in 2G band, connect clients
+ to softAp and send traffic between them.
+
+ 1. Report current state.
+ 2. Switch to AP mode.
+ 3. verify SoftAP active.
+ 4. Connect clients to softAp.
+ 5. Send and recv UDP traffic between them.
+ 6. Shutdown wifi tethering.
+ 7. verify back to previous mode.
+ """
+ asserts.skip_if(not hasattr(self, 'arduino_wifi_dongles'),
+ "No wifi dongles connected. Skipping test")
+ self.validate_full_tether_startup(WIFI_CONFIG_APBAND_2G,
+ test_clients=True)
+
""" Tests End """
diff --git a/acts/tests/google/wifi/WifiStaApConcurrencyTest.py b/acts/tests/google/wifi/WifiStaApConcurrencyTest.py
index 6f0beba..6211263 100755
--- a/acts/tests/google/wifi/WifiStaApConcurrencyTest.py
+++ b/acts/tests/google/wifi/WifiStaApConcurrencyTest.py
@@ -38,7 +38,7 @@
WIFI_NETWORK_AP_CHANNEL_5G_DFS = 132
class WifiStaApConcurrencyTest(WifiBaseTest):
- """Tests for STA + AP concurrency scenarions.
+ """Tests for STA + AP concurrency scenarios.
Test Bed Requirement:
* Two Android devices (For AP)
@@ -129,7 +129,7 @@
self.legacy_configure_ap_and_start(channel_2g=channel_2g)
else:
self.legacy_configure_ap_and_start(channel_2g=channel_2g,
- channel_5g=chanel_5g)
+ channel_5g=channel_5g)
self.wpapsk_2g = self.reference_networks[0]["2g"]
self.wpapsk_5g = self.reference_networks[0]["5g"]
@@ -174,21 +174,21 @@
params: A tuple of network info and AndroidDevice object.
"""
network, ad = params
- droid = ad.droid
- ed = ad.ed
SSID = network[WifiEnums.SSID_KEY]
wutils.start_wifi_connection_scan_and_ensure_network_found(
- ad, SSID);
+ ad, SSID)
wutils.wifi_connect(ad, network, num_of_tries=3)
- def confirm_softap_in_scan_results(self, ap_ssid):
+ def confirm_softap_can_be_connected(self, network):
"""Confirm the ap started by wifi tethering is seen in scan results.
Args:
- ap_ssid: SSID of the ap we are looking for.
+ network: config of the ap we are looking for.
"""
+ SSID = network[WifiEnums.SSID_KEY]
wutils.start_wifi_connection_scan_and_ensure_network_found(
- self.dut_client, ap_ssid);
+ self.dut_client, SSID)
+ wutils.wifi_connect(self.dut_client, network, check_connectivity=False)
def create_softap_config(self):
"""Create a softap config with ssid and password."""
@@ -202,18 +202,19 @@
def start_softap_and_verify(self, band):
"""Test startup of softap
- 1. Brinup AP mode.
+ 1. Bring up AP mode.
2. Verify SoftAP active using the client device.
"""
config = self.create_softap_config()
wutils.start_wifi_tethering(self.dut,
config[wutils.WifiEnums.SSID_KEY],
config[wutils.WifiEnums.PWD_KEY], band)
- self.confirm_softap_in_scan_results(config[wutils.WifiEnums.SSID_KEY])
+ self.confirm_softap_can_be_connected(config)
+ return config
def connect_to_wifi_network_and_start_softap(self, nw_params, softap_band):
- """Test concurrenct wifi connection and softap.
- This helper method first makes a wifi conenction and then starts SoftAp.
+ """Test concurrent wifi connection and softap.
+ This helper method first makes a wifi connection and then starts SoftAp.
Args:
nw_params: Params for network STA connection.
@@ -221,33 +222,35 @@
1. Bring up wifi.
2. Establish connection to a network.
- 3. Bring up softap and verify AP is seen on a client device.
- 4. Run iperf on the wifi connection to the network.
+ 3. Bring up softap and verify AP can be connected by a client device.
+ 4. Run iperf on the wifi/softap connection to the network.
"""
wutils.wifi_toggle_state(self.dut, True)
self.connect_to_wifi_network_and_verify((nw_params, self.dut))
- self.start_softap_and_verify(softap_band)
+ softap_config = self.start_softap_and_verify(softap_band)
self.run_iperf_client((nw_params, self.dut))
+ self.run_iperf_client((softap_config, self.dut_client))
# Verify that both softap & wifi is enabled concurrently.
self.verify_wifi_and_softap_enabled()
def start_softap_and_connect_to_wifi_network(self, nw_params, softap_band):
- """Test concurrenct wifi connection and softap.
- This helper method first starts SoftAp and then makes a wifi conenction.
+ """Test concurrent wifi connection and softap.
+ This helper method first starts SoftAp and then makes a wifi connection.
Args:
nw_params: Params for network STA connection.
softap_band: Band for the AP.
- 1. Bring up softap and verify AP is seen on a client device.
+ 1. Bring up softap and verify AP can be connected by a client device.
2. Bring up wifi.
3. Establish connection to a network.
- 4. Run iperf on the wifi connection to the network.
+ 4. Run iperf on the wifi/softap connection to the network.
"""
- self.start_softap_and_verify(softap_band)
+ softap_config = self.start_softap_and_verify(softap_band)
wutils.wifi_toggle_state(self.dut, True)
self.connect_to_wifi_network_and_verify((nw_params, self.dut))
self.run_iperf_client((nw_params, self.dut))
+ self.run_iperf_client((softap_config, self.dut_client))
# Verify that both softap & wifi is enabled concurrently.
self.verify_wifi_and_softap_enabled()
@@ -255,7 +258,7 @@
"""Helper to verify both wifi and softap is enabled
"""
asserts.assert_true(self.dut.droid.wifiCheckState(),
- "Wifi is not reported as running");
+ "Wifi is not reported as running")
asserts.assert_true(self.dut.droid.wifiIsApEnabled(),
"SoftAp is not reported as running")
@@ -346,9 +349,9 @@
def test_softap_5G_wifi_connection_2G(self):
"""Tests bringing up SoftAp on 5G followed by connection to 2G network.
"""
- self.configure_ap(channel_5g=WIFI_NETWORK_AP_CHANNEL_5G)
+ self.configure_ap(channel_2g=WIFI_NETWORK_AP_CHANNEL_2G)
self.start_softap_and_connect_to_wifi_network(
- self.wpapsk_5g, WIFI_CONFIG_APBAND_2G)
+ self.wpapsk_2g, WIFI_CONFIG_APBAND_5G)
@test_tracker_info(uuid="a2c62bc6-9ccd-4bc4-8a23-9a1b5d0b4b5c")
def test_softap_2G_wifi_connection_5G(self):
@@ -356,7 +359,7 @@
"""
self.configure_ap(channel_5g=WIFI_NETWORK_AP_CHANNEL_5G)
self.start_softap_and_connect_to_wifi_network(
- self.wpapsk_5g, WIFI_CONFIG_APBAND_5G)
+ self.wpapsk_5g, WIFI_CONFIG_APBAND_2G)
@test_tracker_info(uuid="a2c62bc6-9ccd-4bc4-8a23-9a1b5d0b4b5c")
def test_softap_2G_wifi_connection_5G_DFS(self):
@@ -364,14 +367,14 @@
"""
self.configure_ap(channel_5g=WIFI_NETWORK_AP_CHANNEL_5G_DFS)
self.start_softap_and_connect_to_wifi_network(
- self.wpapsk_5g, WIFI_CONFIG_APBAND_5G)
+ self.wpapsk_5g, WIFI_CONFIG_APBAND_2G)
@test_tracker_info(uuid="aa23a3fc-31a1-4d5c-8cf5-2eb9fdf9e7ce")
def test_softap_5G_wifi_connection_2G_with_location_scan_on(self):
"""Tests bringing up SoftAp on 5G followed by connection to 2G network
with location scans turned on.
"""
- self.configure_ap(channel_5g=WIFI_NETWORK_AP_CHANNEL_5G)
+ self.configure_ap(channel_2g=WIFI_NETWORK_AP_CHANNEL_2G)
self.turn_location_on_and_scan_toggle_on()
self.start_softap_and_connect_to_wifi_network(
- self.wpapsk_5g, WIFI_CONFIG_APBAND_2G)
+ self.wpapsk_2g, WIFI_CONFIG_APBAND_5G)
diff --git a/acts/tests/google/wifi/WifiStressTest.py b/acts/tests/google/wifi/WifiStressTest.py
index 01d9a6f..aee3d3b 100755
--- a/acts/tests/google/wifi/WifiStressTest.py
+++ b/acts/tests/google/wifi/WifiStressTest.py
@@ -32,7 +32,7 @@
WAIT_BEFORE_CONNECTION = 30
TIMEOUT = 1
-
+PING_ADDR = 'www.google.com'
class WifiStressTest(WifiBaseTest):
"""WiFi Stress test class.
@@ -80,11 +80,11 @@
def teardown_test(self):
self.dut.droid.wakeLockRelease()
self.dut.droid.goToSleepNow()
+ wutils.reset_wifi(self.dut)
def on_fail(self, test_name, begin_time):
self.dut.take_bug_report(test_name, begin_time)
self.dut.cat_adb_log(test_name, begin_time)
- pass
def teardown_class(self):
wutils.reset_wifi(self.dut)
@@ -120,6 +120,19 @@
ssid)
wutils.wifi_connect_by_id(self.dut, net_id)
+ def run_ping(self, sec):
+ """Run ping for given number of seconds.
+
+ Args:
+ sec: Time in seconds to run teh ping traffic.
+
+ """
+ self.log.info("Running ping for %d seconds" % sec)
+ result = self.dut.adb.shell("ping -w %d %s" %(sec, PING_ADDR),
+ timeout=sec+1)
+ self.log.debug("Ping Result = %s" % result)
+ if "100% packet loss" in result:
+ raise signals.TestFailure("100% packet loss during ping")
"""Tests"""
@@ -128,13 +141,20 @@
"""Toggle WiFi state ON and OFF for N times."""
for count in range(self.stress_count):
"""Test toggling wifi"""
- self.log.debug("Going from on to off.")
- wutils.wifi_toggle_state(self.dut, False)
- self.log.debug("Going from off to on.")
- startTime = time.time()
- wutils.wifi_toggle_state(self.dut, True)
- startup_time = time.time() - startTime
- self.log.debug("WiFi was enabled on the device in %s s." % startup_time)
+ try:
+ self.log.debug("Going from on to off.")
+ wutils.wifi_toggle_state(self.dut, False)
+ self.log.debug("Going from off to on.")
+ startTime = time.time()
+ wutils.wifi_toggle_state(self.dut, True)
+ startup_time = time.time() - startTime
+ self.log.debug("WiFi was enabled on the device in %s s." %
+ startup_time)
+ except:
+ signals.TestFailure(details="", extras={"Iterations":"%d" %
+ self.stress_count, "Pass":"%d" %count})
+ raise signals.TestPass(details="", extras={"Iterations":"%d" %
+ self.stress_count, "Pass":"%d" %(count+1)})
@test_tracker_info(uuid="49e3916a-9580-4bf7-a60d-a0f2545dcdde")
def test_stress_connect_traffic_disconnect_5g(self):
@@ -148,21 +168,26 @@
"""
for count in range(self.stress_count):
- net_id = self.dut.droid.wifiAddNetwork(self.wpa_5g)
- asserts.assert_true(net_id != -1, "Add network %r failed" % self.wpa_5g)
- self.dut.droid.wifiEnableNetwork(net_id, 0)
- self.scan_and_connect_by_id(self.wpa_5g, net_id)
- # Start IPerf traffic from phone to server.
- # Upload data for 10s.
- args = "-p {} -t {}".format(self.iperf_server.port, 10)
- self.log.info("Running iperf client {}".format(args))
- result, data = self.dut.run_iperf_client(self.iperf_server_address, args)
- if not result:
- self.log.debug("Error occurred in iPerf traffic.")
- raise signals.TestFailure("Error occurred in iPerf traffic. Current"
- " WiFi state = %d" % self.dut.droid.wifiCheckState())
- wutils.wifi_forget_network(self.dut,self.wpa_5g[WifiEnums.SSID_KEY])
- time.sleep(WAIT_BEFORE_CONNECTION)
+ try:
+ net_id = self.dut.droid.wifiAddNetwork(self.wpa_5g)
+ asserts.assert_true(net_id != -1, "Add network %r failed" % self.wpa_5g)
+ self.scan_and_connect_by_id(self.wpa_5g, net_id)
+ # Start IPerf traffic from phone to server.
+ # Upload data for 10s.
+ args = "-p {} -t {}".format(self.iperf_server.port, 10)
+ self.log.info("Running iperf client {}".format(args))
+ result, data = self.dut.run_iperf_client(self.iperf_server_address, args)
+ if not result:
+ self.log.debug("Error occurred in iPerf traffic.")
+ self.run_ping(10)
+ wutils.wifi_forget_network(self.dut,self.wpa_5g[WifiEnums.SSID_KEY])
+ time.sleep(WAIT_BEFORE_CONNECTION)
+ except:
+ raise signals.TestFailure("Network connect-disconnect failed."
+ "Look at logs", extras={"Iterations":"%d" %
+ self.stress_count, "Pass":"%d" %count})
+ raise signals.TestPass(details="", extras={"Iterations":"%d" %
+ self.stress_count, "Pass":"%d" %(count+1)})
@test_tracker_info(uuid="e9827dff-0755-43ec-8b50-1f9756958460")
def test_stress_connect_long_traffic_5g(self):
@@ -174,19 +199,24 @@
3. Verify no WiFi disconnects/data interruption.
"""
- self.scan_and_connect_by_ssid(self.wpa_5g)
- # Start IPerf traffic from server to phone.
- # Download data for 5 hours.
- sec = self.stress_hours * 60 * 60
- args = "-p {} -t {} -R".format(self.iperf_server.port, sec)
- self.log.info("Running iperf client {}".format(args))
- result, data = self.dut.run_iperf_client(self.iperf_server_address,
- args, timeout=sec+1)
- self.dut.droid.wifiDisconnect()
- if not result:
- self.log.debug("Error occurred in iPerf traffic.")
- raise signals.TestFailure("Error occurred in iPerf traffic. Current"
- " WiFi state = %d" % self.dut.droid.wifiCheckState())
+ try:
+ self.scan_and_connect_by_ssid(self.wpa_5g)
+ # Start IPerf traffic from server to phone.
+ # Download data for 5 hours.
+ sec = self.stress_hours * 60 * 60
+ args = "-p {} -t {} -R".format(self.iperf_server.port, sec)
+ self.log.info("Running iperf client {}".format(args))
+ result, data = self.dut.run_iperf_client(self.iperf_server_address,
+ args, timeout=sec+1)
+ if not result:
+ self.log.debug("Error occurred in iPerf traffic.")
+ self.run_ping(sec)
+ except:
+ raise signals.TestFailure("Network long-connect failed."
+ "Look at logs", extras={"Total Hours":"%d" %self.stress_hours,
+ "Seconds Run":"UNKNOWN"})
+ raise signals.TestPass(details="", extras={"Total Hours":"%d" %
+ self.stress_hours, "Seconds":"%d" %sec})
@test_tracker_info(uuid="d367c83e-5b00-4028-9ed8-f7b875997d13")
def test_stress_wifi_failover(self):
@@ -211,24 +241,28 @@
time.sleep(WAIT_FOR_AUTO_CONNECT)
cur_network = self.dut.droid.wifiGetConnectionInfo()
cur_ssid = cur_network[WifiEnums.SSID_KEY]
- self.log.debug("Cur_ssid = %s" % cur_ssid)
- for count in range(0,len(self.networks)):
+ self.log.info("Cur_ssid = %s" % cur_ssid)
+ for i in range(0,len(self.networks)):
self.log.debug("Forget network %s" % cur_ssid)
wutils.wifi_forget_network(self.dut, cur_ssid)
time.sleep(WAIT_FOR_AUTO_CONNECT)
cur_network = self.dut.droid.wifiGetConnectionInfo()
cur_ssid = cur_network[WifiEnums.SSID_KEY]
- self.log.debug("Cur_ssid = %s" % cur_ssid)
- if count == len(self.networks) - 1:
+ self.log.info("Cur_ssid = %s" % cur_ssid)
+ if i == len(self.networks) - 1:
break
if cur_ssid not in ssids:
raise signals.TestFailure("Device did not failover to the "
"expected network. SSID = %s" % cur_ssid)
network_config = self.dut.droid.wifiGetConfiguredNetworks()
- self.log.debug("Network Config = %s" % network_config)
+ self.log.info("Network Config = %s" % network_config)
if len(network_config):
raise signals.TestFailure("All the network configurations were not "
- "removed. Configured networks = %s" % network_config)
+ "removed. Configured networks = %s" % network_config,
+ extras={"Iterations":"%d" % self.stress_count,
+ "Pass":"%d" %(count*4)})
+ raise signals.TestPass(details="", extras={"Iterations":"%d" %
+ self.stress_count, "Pass":"%d" %((count+1)*4)})
@test_tracker_info(uuid="2c19e8d1-ac16-4d7e-b309-795144e6b956")
def test_stress_softAP_startup_and_stop_5g(self):
@@ -241,14 +275,14 @@
4. Verify softAP is turned down and WiFi is up.
"""
+ # Set country code explicitly to "US".
+ self.dut.droid.wifiSetCountryCode(wutils.WifiEnums.CountryCode.US)
+ self.dut_client.droid.wifiSetCountryCode(wutils.WifiEnums.CountryCode.US)
ap_ssid = "softap_" + utils.rand_ascii_str(8)
ap_password = utils.rand_ascii_str(8)
self.dut.log.info("softap setup: %s %s", ap_ssid, ap_password)
config = {wutils.WifiEnums.SSID_KEY: ap_ssid}
config[wutils.WifiEnums.PWD_KEY] = ap_password
- # Set country code explicitly to "US".
- self.dut.droid.wifiSetCountryCode(wutils.WifiEnums.CountryCode.US)
- self.dut_client.droid.wifiSetCountryCode(wutils.WifiEnums.CountryCode.US)
for count in range(self.stress_count):
initial_wifi_state = self.dut.droid.wifiCheckState()
wutils.start_wifi_tethering(self.dut,
@@ -257,31 +291,40 @@
WifiEnums.WIFI_CONFIG_APBAND_5G)
wutils.start_wifi_connection_scan_and_ensure_network_found(
self.dut_client, ap_ssid)
- # Toggle WiFi ON, which inturn calls softAP teardown.
- wutils.wifi_toggle_state(self.dut, True)
- time.sleep(TIMEOUT)
+ wutils.stop_wifi_tethering(self.dut)
asserts.assert_false(self.dut.droid.wifiIsApEnabled(),
"SoftAp failed to shutdown!")
time.sleep(TIMEOUT)
cur_wifi_state = self.dut.droid.wifiCheckState()
if initial_wifi_state != cur_wifi_state:
- raise signals.TestFailure("Wifi state was %d before softAP and %d now!" %
- (initial_wifi_state, cur_wifi_state))
+ raise signals.TestFailure("Wifi state was %d before softAP and %d now!" %
+ (initial_wifi_state, cur_wifi_state),
+ extras={"Iterations":"%d" % self.stres_count,
+ "Pass":"%d" %count})
+ raise signals.TestPass(details="", extras={"Iterations":"%d" %
+ self.stress_count, "Pass":"%d" %(count+1)})
@test_tracker_info(uuid="eb22e26b-95d1-4580-8c76-85dfe6a42a0f")
def test_stress_wifi_roaming(self):
AP1_network = self.reference_networks[0]["5g"]
AP2_network = self.reference_networks[1]["5g"]
wutils.set_attns(self.attenuators, "AP1_on_AP2_off")
- wutils.wifi_connect(self.dut, AP1_network)
+ self.scan_and_connect_by_ssid(AP1_network)
# Reduce iteration to half because each iteration does two roams.
- for count in range(self.stress_count/2):
+ for count in range(int(self.stress_count/2)):
self.log.info("Roaming iteration %d, from %s to %s", count,
AP1_network, AP2_network)
- wutils.trigger_roaming_and_validate(self.dut, self.attenuators,
- "AP1_off_AP2_on", AP2_network)
- self.log.info("Roaming iteration %d, from %s to %s", count,
- AP2_network, AP1_network)
- wutils.trigger_roaming_and_validate(self.dut, self.attenuators,
- "AP1_on_AP2_off", AP1_network)
+ try:
+ wutils.trigger_roaming_and_validate(self.dut, self.attenuators,
+ "AP1_off_AP2_on", AP2_network)
+ self.log.info("Roaming iteration %d, from %s to %s", count,
+ AP2_network, AP1_network)
+ wutils.trigger_roaming_and_validate(self.dut, self.attenuators,
+ "AP1_on_AP2_off", AP1_network)
+ except:
+ raise signals.TestFailure("Roaming failed. Look at logs",
+ extras={"Iterations":"%d" %self.stress_count, "Pass":"%d" %
+ (count*2)})
+ raise signals.TestPass(details="", extras={"Iterations":"%d" %
+ self.stress_count, "Pass":"%d" %((count+1)*2)})
diff --git a/acts/tests/google/wifi/WifiTeleCoexTest.py b/acts/tests/google/wifi/WifiTeleCoexTest.py
index f5d4c7f..3d30640 100644
--- a/acts/tests/google/wifi/WifiTeleCoexTest.py
+++ b/acts/tests/google/wifi/WifiTeleCoexTest.py
@@ -127,9 +127,11 @@
self.log.debug("Toggling wifi ON")
wifi_utils.wifi_toggle_state(self.dut, True)
+ # Sleep for 1s before getting new WiFi state.
+ time.sleep(1)
if not self.dut.droid.wifiGetisWifiEnabled():
raise signals.TestFailure("WiFi did not turn on after turning ON"
- "Airplane mode")
+ " Airplane mode")
asserts.assert_true(
acts.utils.force_airplane_mode(self.dut, False),
"Can not turn on airplane mode on: %s" % self.dut.serial)
@@ -159,6 +161,8 @@
3. Make a short sequence voice call between Phone A and B.
"""
+ # Sleep for 5s before getting new WiFi state.
+ time.sleep(5)
wifi_info = self.dut.droid.wifiGetConnectionInfo()
if wifi_info[WifiEnums.SSID_KEY] != self.wifi_network_ssid:
raise signals.TestFailure("Phone failed to connect to %s network on"
diff --git a/acts/tests/google/wifi/WifiThroughputStabilityTest.py b/acts/tests/google/wifi/WifiThroughputStabilityTest.py
index 326e8e8..17613fa 100644
--- a/acts/tests/google/wifi/WifiThroughputStabilityTest.py
+++ b/acts/tests/google/wifi/WifiThroughputStabilityTest.py
@@ -34,6 +34,15 @@
class WifiThroughputStabilityTest(base_test.BaseTestClass):
+ """Class to test WiFi throughput stability.
+
+ This class tests throughput stability and identifies cases where throughput
+ fluctuates over time. The class setups up the AP, configures and connects
+ the phone, and runs iperf throughput test at several attenuations For an
+ example config file to run this test class see
+ example_connectivity_performance_ap_sta.json.
+ """
+
def __init__(self, controllers):
base_test.BaseTestClass.__init__(self, controllers)
self.tests = ("test_tput_stability_high_TCP_DL_ch6_VHT20",
@@ -253,6 +262,7 @@
wutils.wifi_toggle_state(self.dut, True)
wutils.reset_wifi(self.dut)
self.main_network[band]["channel"] = channel
+ self.dut.droid.wifiSetCountryCode(self.test_params["country_code"])
wutils.wifi_connect(self.dut, self.main_network[band], num_of_tries=5)
time.sleep(MED_SLEEP)
# Run test and log result
diff --git a/acts/tests/google/wifi/aware/functional/AttachTest.py b/acts/tests/google/wifi/aware/functional/AttachTest.py
index 598cca6..37f07e0 100644
--- a/acts/tests/google/wifi/aware/functional/AttachTest.py
+++ b/acts/tests/google/wifi/aware/functional/AttachTest.py
@@ -16,12 +16,13 @@
import time
+from acts import asserts
+from acts import utils
from acts.test_decorators import test_tracker_info
from acts.test_utils.wifi import wifi_test_utils as wutils
from acts.test_utils.wifi.aware import aware_const as aconsts
from acts.test_utils.wifi.aware import aware_test_utils as autils
from acts.test_utils.wifi.aware.AwareBaseTest import AwareBaseTest
-from acts.utils import force_airplane_mode
class AttachTest(AwareBaseTest):
@@ -99,8 +100,8 @@
"""Function test case / Attach test cases / attempt to attach with wifi off
Validates that if trying to attach with Wi-Fi disabled will receive the
- expected failure callback. As a side-effect also validates that the broadcast
- for Aware unavailable is received.
+ expected failure callback. As a side-effect also validates that the
+ broadcast for Aware unavailable is received.
"""
dut = self.android_devices[0]
wutils.wifi_toggle_state(dut, False)
@@ -108,6 +109,39 @@
dut.droid.wifiAwareAttach()
autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACH_FAILED)
+ @test_tracker_info(uuid="7dcc4530-c936-4447-9d22-a7c5b315e2ce")
+ def test_attach_with_doze(self):
+ """Function test case / Attach test cases / attempt to attach with doze on
+
+ Validates that if trying to attach with device in doze mode will receive the
+ expected failure callback. As a side-effect also validates that the
+ broadcast for Aware unavailable is received.
+ """
+ dut = self.android_devices[0]
+ asserts.assert_true(utils.enable_doze(dut), "Can't enable doze")
+ autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_NOT_AVAILABLE)
+ dut.droid.wifiAwareAttach()
+ autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACH_FAILED)
+ asserts.assert_true(utils.disable_doze(dut), "Can't disable doze")
+ autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_AVAILABLE)
+
+ @test_tracker_info(uuid="2574fd01-8974-4dd0-aeb8-a7194461140e")
+ def test_attach_with_location_off(self):
+ """Function test case / Attach test cases / attempt to attach with location
+ mode off.
+
+ Validates that if trying to attach with device location mode off will
+ receive the expected failure callback. As a side-effect also validates that
+ the broadcast for Aware unavailable is received.
+ """
+ dut = self.android_devices[0]
+ utils.set_location_service(dut, False)
+ autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_NOT_AVAILABLE)
+ dut.droid.wifiAwareAttach()
+ autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACH_FAILED)
+ utils.set_location_service(dut, True)
+ autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_AVAILABLE)
+
@test_tracker_info(uuid="7ffde8e7-a010-4b77-97f5-959f263b5249")
def test_attach_apm_toggle_attach_again(self):
"""Validates that enabling Airplane mode while Aware is on resets it
@@ -120,12 +154,12 @@
autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
# enable airplane mode
- force_airplane_mode(dut, True)
+ utils.force_airplane_mode(dut, True)
autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_NOT_AVAILABLE)
# wait a few seconds and disable airplane mode
time.sleep(10)
- force_airplane_mode(dut, False)
+ utils.force_airplane_mode(dut, False)
autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_AVAILABLE)
# try enabling Aware again (attach)
diff --git a/acts/tests/google/wifi/aware/functional/DataPathTest.py b/acts/tests/google/wifi/aware/functional/DataPathTest.py
index 66ec715..8afb2a4 100644
--- a/acts/tests/google/wifi/aware/functional/DataPathTest.py
+++ b/acts/tests/google/wifi/aware/functional/DataPathTest.py
@@ -19,6 +19,7 @@
from acts import asserts
from acts.test_decorators import test_tracker_info
from acts.test_utils.net import connectivity_const as cconsts
+from acts.test_utils.wifi import wifi_test_utils as wutils
from acts.test_utils.wifi.aware import aware_const as aconsts
from acts.test_utils.wifi.aware import aware_test_utils as autils
from acts.test_utils.wifi.aware.AwareBaseTest import AwareBaseTest
@@ -148,7 +149,8 @@
use_peer_id,
passphrase_to_use=None,
pub_on_both=False,
- pub_on_both_same=True):
+ pub_on_both_same=True,
+ expect_failure=False):
"""Runs the in-band data-path tests.
Args:
@@ -163,6 +165,8 @@
publisher isn't used (existing to test use-case).
pub_on_both_same: If True then the second publish uses an identical
service name, otherwise a different service name.
+ expect_failure: If True then don't expect NDP formation, otherwise expect
+ NDP setup to succeed.
"""
(p_dut, s_dut, p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub,
peer_id_on_pub) = self.set_up_discovery(ptype, stype, use_peer_id,
@@ -189,51 +193,59 @@
s_dut.droid.wifiAwareCreateNetworkSpecifier(s_disc_id, peer_id_on_sub,
passphrase, pmk))
- # Publisher & Subscriber: wait for network formation
- p_net_event = autils.wait_for_event_with_keys(
- p_dut, cconsts.EVENT_NETWORK_CALLBACK,
- autils.EVENT_NDP_TIMEOUT,
- (cconsts.NETWORK_CB_KEY_EVENT,
- cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
- (cconsts.NETWORK_CB_KEY_ID, p_req_key))
- s_net_event = autils.wait_for_event_with_keys(
- s_dut, cconsts.EVENT_NETWORK_CALLBACK,
- autils.EVENT_NDP_TIMEOUT,
- (cconsts.NETWORK_CB_KEY_EVENT,
- cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
- (cconsts.NETWORK_CB_KEY_ID, s_req_key))
+ if expect_failure:
+ # Publisher & Subscriber: fail on network formation
+ time.sleep(autils.EVENT_NDP_TIMEOUT)
+ autils.fail_on_event_with_keys(p_dut, cconsts.EVENT_NETWORK_CALLBACK, 0,
+ (cconsts.NETWORK_CB_KEY_ID, p_req_key))
+ autils.fail_on_event_with_keys(s_dut, cconsts.EVENT_NETWORK_CALLBACK, 0,
+ (cconsts.NETWORK_CB_KEY_ID, s_req_key))
+ else:
+ # Publisher & Subscriber: wait for network formation
+ p_net_event = autils.wait_for_event_with_keys(
+ p_dut, cconsts.EVENT_NETWORK_CALLBACK,
+ autils.EVENT_NDP_TIMEOUT,
+ (cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+ (cconsts.NETWORK_CB_KEY_ID, p_req_key))
+ s_net_event = autils.wait_for_event_with_keys(
+ s_dut, cconsts.EVENT_NETWORK_CALLBACK,
+ autils.EVENT_NDP_TIMEOUT,
+ (cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+ (cconsts.NETWORK_CB_KEY_ID, s_req_key))
- p_aware_if = p_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
- s_aware_if = s_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
- self.log.info("Interface names: p=%s, s=%s", p_aware_if, s_aware_if)
+ p_aware_if = p_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+ s_aware_if = s_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+ self.log.info("Interface names: p=%s, s=%s", p_aware_if, s_aware_if)
- p_ipv6 = p_dut.droid.connectivityGetLinkLocalIpv6Address(p_aware_if).split(
- "%")[0]
- s_ipv6 = s_dut.droid.connectivityGetLinkLocalIpv6Address(s_aware_if).split(
- "%")[0]
- self.log.info("Interface addresses (IPv6): p=%s, s=%s", p_ipv6, s_ipv6)
+ p_ipv6 = \
+ p_dut.droid.connectivityGetLinkLocalIpv6Address(p_aware_if).split("%")[0]
+ s_ipv6 = \
+ s_dut.droid.connectivityGetLinkLocalIpv6Address(s_aware_if).split("%")[0]
+ self.log.info("Interface addresses (IPv6): p=%s, s=%s", p_ipv6, s_ipv6)
- # TODO: possibly send messages back and forth, prefer to use netcat/nc
+ # TODO: possibly send messages back and forth, prefer to use netcat/nc
- # terminate sessions and wait for ON_LOST callbacks
- p_dut.droid.wifiAwareDestroy(p_id)
- s_dut.droid.wifiAwareDestroy(s_id)
+ # terminate sessions and wait for ON_LOST callbacks
+ p_dut.droid.wifiAwareDestroy(p_id)
+ s_dut.droid.wifiAwareDestroy(s_id)
- autils.wait_for_event_with_keys(
- p_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
- (cconsts.NETWORK_CB_KEY_EVENT,
- cconsts.NETWORK_CB_LOST), (cconsts.NETWORK_CB_KEY_ID, p_req_key))
- autils.wait_for_event_with_keys(
- s_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
- (cconsts.NETWORK_CB_KEY_EVENT,
- cconsts.NETWORK_CB_LOST), (cconsts.NETWORK_CB_KEY_ID, s_req_key))
+ autils.wait_for_event_with_keys(
+ p_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
+ (cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LOST), (cconsts.NETWORK_CB_KEY_ID, p_req_key))
+ autils.wait_for_event_with_keys(
+ s_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
+ (cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LOST), (cconsts.NETWORK_CB_KEY_ID, s_req_key))
# clean-up
p_dut.droid.connectivityUnregisterNetworkCallback(p_req_key)
s_dut.droid.connectivityUnregisterNetworkCallback(s_req_key)
def run_oob_data_path_test(self, encr_type, use_peer_id,
- setup_discovery_sessions=False):
+ setup_discovery_sessions=False, expect_failure=False):
"""Runs the out-of-band data-path tests.
Args:
@@ -243,6 +255,8 @@
setup_discovery_sessions: If True also set up a (spurious) discovery
session (pub on both sides, sub on Responder side). Validates a corner
case.
+ expect_failure: If True then don't expect NDP formation, otherwise expect
+ NDP setup to succeed.
"""
init_dut = self.android_devices[0]
init_dut.pretty_name = "Initiator"
@@ -299,47 +313,57 @@
init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, passphrase, pmk))
- # Initiator & Responder: wait for network formation
- init_net_event = autils.wait_for_event_with_keys(
- init_dut, cconsts.EVENT_NETWORK_CALLBACK,
- autils.EVENT_NDP_TIMEOUT,
- (cconsts.NETWORK_CB_KEY_EVENT,
- cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
- (cconsts.NETWORK_CB_KEY_ID, init_req_key))
- resp_net_event = autils.wait_for_event_with_keys(
- resp_dut, cconsts.EVENT_NETWORK_CALLBACK,
- autils.EVENT_NDP_TIMEOUT,
- (cconsts.NETWORK_CB_KEY_EVENT,
- cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
- (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
+ if expect_failure:
+ # Initiator & Responder: fail on network formation
+ time.sleep(autils.EVENT_NDP_TIMEOUT)
+ autils.fail_on_event_with_keys(resp_dut, cconsts.EVENT_NETWORK_CALLBACK,
+ 0,
+ (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
+ autils.fail_on_event_with_keys(init_dut, cconsts.EVENT_NETWORK_CALLBACK,
+ 0,
+ (cconsts.NETWORK_CB_KEY_ID, init_req_key))
+ else:
+ # Initiator & Responder: wait for network formation
+ init_net_event = autils.wait_for_event_with_keys(
+ init_dut, cconsts.EVENT_NETWORK_CALLBACK,
+ autils.EVENT_NDP_TIMEOUT,
+ (cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+ (cconsts.NETWORK_CB_KEY_ID, init_req_key))
+ resp_net_event = autils.wait_for_event_with_keys(
+ resp_dut, cconsts.EVENT_NETWORK_CALLBACK,
+ autils.EVENT_NDP_TIMEOUT,
+ (cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+ (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
- init_aware_if = init_net_event["data"][
- cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
- resp_aware_if = resp_net_event["data"][
- cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
- self.log.info("Interface names: I=%s, R=%s", init_aware_if, resp_aware_if)
+ init_aware_if = init_net_event["data"][
+ cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+ resp_aware_if = resp_net_event["data"][
+ cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+ self.log.info("Interface names: I=%s, R=%s", init_aware_if, resp_aware_if)
- init_ipv6 = init_dut.droid.connectivityGetLinkLocalIpv6Address(
- init_aware_if).split("%")[0]
- resp_ipv6 = resp_dut.droid.connectivityGetLinkLocalIpv6Address(
- resp_aware_if).split("%")[0]
- self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
- resp_ipv6)
+ init_ipv6 = init_dut.droid.connectivityGetLinkLocalIpv6Address(
+ init_aware_if).split("%")[0]
+ resp_ipv6 = resp_dut.droid.connectivityGetLinkLocalIpv6Address(
+ resp_aware_if).split("%")[0]
+ self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
+ resp_ipv6)
- # TODO: possibly send messages back and forth, prefer to use netcat/nc
+ # TODO: possibly send messages back and forth, prefer to use netcat/nc
- # terminate sessions and wait for ON_LOST callbacks
- init_dut.droid.wifiAwareDestroy(init_id)
- resp_dut.droid.wifiAwareDestroy(resp_id)
+ # terminate sessions and wait for ON_LOST callbacks
+ init_dut.droid.wifiAwareDestroy(init_id)
+ resp_dut.droid.wifiAwareDestroy(resp_id)
- autils.wait_for_event_with_keys(
- init_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
- (cconsts.NETWORK_CB_KEY_EVENT,
- cconsts.NETWORK_CB_LOST), (cconsts.NETWORK_CB_KEY_ID, init_req_key))
- autils.wait_for_event_with_keys(
- resp_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
- (cconsts.NETWORK_CB_KEY_EVENT,
- cconsts.NETWORK_CB_LOST), (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
+ autils.wait_for_event_with_keys(
+ init_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
+ (cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LOST), (cconsts.NETWORK_CB_KEY_ID, init_req_key))
+ autils.wait_for_event_with_keys(
+ resp_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
+ (cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LOST), (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
# clean-up
resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
@@ -638,13 +662,14 @@
# or different from the primary session.
# pub_type: Type of publish discovery session: unsolicited or solicited.
# sub_type: Type of subscribe discovery session: passive or active.
- # encr_type: Encription type: open, passphrase
+ # encr_type: Encryption type: open, passphrase
# peer_spec: Peer specification method: any or specific
#
# Note: In-Band means using Wi-Fi Aware for discovery and referring to the
# peer using the Aware-provided peer handle (as opposed to a MAC address).
#######################################
+ @test_tracker_info(uuid="e855dd81-45c8-4bb2-a204-7687c48ff843")
def test_ib_extra_pub_same_unsolicited_passive_open_specific(self):
"""Data-path: in-band, unsolicited/passive, open encryption, specific peer.
@@ -661,6 +686,7 @@
pub_on_both=True,
pub_on_both_same=True)
+ @test_tracker_info(uuid="57fc9d53-32ae-470f-a8b1-2fe37893687d")
def test_ib_extra_pub_same_unsolicited_passive_open_any(self):
"""Data-path: in-band, unsolicited/passive, open encryption, any peer.
@@ -677,6 +703,7 @@
pub_on_both=True,
pub_on_both_same=True)
+ @test_tracker_info(uuid="7a32f439-d745-4716-a75e-b54109aaaf82")
def test_ib_extra_pub_diff_unsolicited_passive_open_specific(self):
"""Data-path: in-band, unsolicited/passive, open encryption, specific peer.
@@ -693,6 +720,7 @@
pub_on_both=True,
pub_on_both_same=False)
+ @test_tracker_info(uuid="a14ddc66-88fd-4b49-ab37-225533867c63")
def test_ib_extra_pub_diff_unsolicited_passive_open_any(self):
"""Data-path: in-band, unsolicited/passive, open encryption, any peer.
@@ -715,7 +743,7 @@
# names is: test_oob_<encr_type>_<peer_spec>
# where:
#
- # encr_type: Encription type: open, passphrase
+ # encr_type: Encryption type: open, passphrase
# peer_spec: Peer specification method: any or specific
#
# Optionally set up an extra discovery session to test coexistence. If so
@@ -785,6 +813,7 @@
encr_type=self.ENCR_TYPE_PMK,
use_peer_id=False)
+ @test_tracker_info(uuid="dd464f24-b404-4eea-955c-d10c9e8adefc")
def test_oob_ib_coex_open_specific(self):
"""Data-path: out-of-band, open encryption, specific peer - in-band coex:
set up a concurrent discovery session to verify no impact. The session
@@ -797,6 +826,7 @@
use_peer_id=True,
setup_discovery_sessions=True)
+ @test_tracker_info(uuid="088fcd3a-b015-4179-a9a5-91f782b03e3b")
def test_oob_ib_coex_open_any(self):
"""Data-path: out-of-band, open encryption, any peer - in-band coex:
set up a concurrent discovery session to verify no impact. The session
@@ -928,7 +958,8 @@
"""
num_events = 0
while num_events != len(req_keys):
- event = autils.wait_for_event(dut, cconsts.EVENT_NETWORK_CALLBACK)
+ event = autils.wait_for_event(dut, cconsts.EVENT_NETWORK_CALLBACK,
+ timeout=autils.EVENT_NDP_TIMEOUT)
if (event["data"][cconsts.NETWORK_CB_KEY_EVENT] ==
cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED):
if event["data"][cconsts.NETWORK_CB_KEY_ID] in req_keys:
@@ -1204,6 +1235,8 @@
dut1_req_keys = []
dut2_aware_ifs = []
dut1_aware_ifs = []
+ dut2_aware_ipv6 = []
+ dut1_aware_ipv6 = []
dut2_type = aconsts.DATA_PATH_RESPONDER
dut1_type = aconsts.DATA_PATH_INITIATOR
@@ -1244,20 +1277,24 @@
# Wait for network
dut1_net_event = autils.wait_for_event_with_keys(
- dut1, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_TIMEOUT,
+ dut1, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
(cconsts.NETWORK_CB_KEY_EVENT,
cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
(cconsts.NETWORK_CB_KEY_ID, dut1_req_key))
dut2_net_event = autils.wait_for_event_with_keys(
- dut2, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_TIMEOUT,
+ dut2, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
(cconsts.NETWORK_CB_KEY_EVENT,
cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
(cconsts.NETWORK_CB_KEY_ID, dut2_req_key))
- dut2_aware_ifs.append(
- dut2_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME])
- dut1_aware_ifs.append(
- dut1_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME])
+ dut2_aware_if = dut2_net_event["data"][
+ cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+ dut1_aware_if = dut1_net_event["data"][
+ cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+ dut2_aware_ifs.append(dut2_aware_if)
+ dut1_aware_ifs.append(dut1_aware_if)
+ dut2_aware_ipv6.append(autils.get_ipv6_addr(dut2, dut2_aware_if))
+ dut1_aware_ipv6.append(autils.get_ipv6_addr(dut1, dut1_aware_if))
if flip_init_resp:
if dut2_is_responder:
@@ -1268,12 +1305,16 @@
dut1_type = aconsts.DATA_PATH_INITIATOR
dut2_is_responder = not dut2_is_responder
- # check that we are using 2 NDIs
+ # check that we are using 2 NDIs & that they have unique IPv6 addresses
dut1_aware_ifs = list(set(dut1_aware_ifs))
dut2_aware_ifs = list(set(dut2_aware_ifs))
+ dut1_aware_ipv6 = list(set(dut1_aware_ipv6))
+ dut2_aware_ipv6 = list(set(dut2_aware_ipv6))
self.log.info("Interface names: DUT1=%s, DUT2=%s", dut1_aware_ifs,
dut2_aware_ifs)
+ self.log.info("IPv6 addresses: DUT1=%s, DUT2=%s", dut1_aware_ipv6,
+ dut2_aware_ipv6)
self.log.info("DUT1 requests: %s", dut1_req_keys)
self.log.info("DUT2 requests: %s", dut2_req_keys)
@@ -1281,6 +1322,10 @@
len(dut1_aware_ifs), len(sec_configs), "Multiple DUT1 interfaces")
asserts.assert_equal(
len(dut2_aware_ifs), len(sec_configs), "Multiple DUT2 interfaces")
+ asserts.assert_equal(
+ len(dut1_aware_ipv6), len(sec_configs), "Multiple DUT1 IPv6 addresses")
+ asserts.assert_equal(
+ len(dut2_aware_ipv6), len(sec_configs), "Multiple DUT2 IPv6 addresses")
for i in range(len(sec_configs)):
if_name = "%s%d" % (aconsts.AWARE_NDI_PREFIX, i)
@@ -1302,41 +1347,42 @@
@test_tracker_info(uuid="2d728163-11cc-46ba-a973-c8e1e71397fc")
def test_multiple_ndi_open_passphrase(self):
- """Verify that can between 2 DUTs can create 2 NDPs with different security
+ """Verify that between 2 DUTs can create 2 NDPs with different security
configuration (one open, one using passphrase). The result should use two
different NDIs"""
self.run_multiple_ndi([None, self.PASSPHRASE])
@test_tracker_info(uuid="5f2c32aa-20b2-41f0-8b1e-d0b68df73ada")
def test_multiple_ndi_open_pmk(self):
- """Verify that can between 2 DUTs can create 2 NDPs with different security
+ """Verify that between 2 DUTs can create 2 NDPs with different security
configuration (one open, one using pmk). The result should use two
different NDIs"""
self.run_multiple_ndi([None, self.PMK])
@test_tracker_info(uuid="34467659-bcfb-40cd-ba25-7e50560fca63")
def test_multiple_ndi_passphrase_pmk(self):
- """Verify that can between 2 DUTs can create 2 NDPs with different security
+ """Verify that between 2 DUTs can create 2 NDPs with different security
configuration (one using passphrase, one using pmk). The result should use
two different NDIs"""
self.run_multiple_ndi([self.PASSPHRASE, self.PMK])
@test_tracker_info(uuid="d9194ce6-45b6-41b1-9cc8-ada79968966d")
def test_multiple_ndi_passphrases(self):
- """Verify that can between 2 DUTs can create 2 NDPs with different security
+ """Verify that between 2 DUTs can create 2 NDPs with different security
configuration (using different passphrases). The result should use two
different NDIs"""
self.run_multiple_ndi([self.PASSPHRASE, self.PASSPHRASE2])
@test_tracker_info(uuid="879df795-62d2-40d4-a862-bd46d8f7e67f")
def test_multiple_ndi_pmks(self):
- """Verify that can between 2 DUTs can create 2 NDPs with different security
+ """Verify that between 2 DUTs can create 2 NDPs with different security
configuration (using different PMKS). The result should use two different
NDIs"""
self.run_multiple_ndi([self.PMK, self.PMK2])
+ @test_tracker_info(uuid="397d380a-8e41-466e-9ccb-cf8f413d83ba")
def test_multiple_ndi_open_passphrase_flip(self):
- """Verify that can between 2 DUTs can create 2 NDPs with different security
+ """Verify that between 2 DUTs can create 2 NDPs with different security
configuration (one open, one using passphrase). The result should use two
different NDIs.
@@ -1344,8 +1390,9 @@
"""
self.run_multiple_ndi([None, self.PASSPHRASE], flip_init_resp=True)
+ @test_tracker_info(uuid="b3a4300b-1514-4cb8-a814-9c2baa449700")
def test_multiple_ndi_open_pmk_flip(self):
- """Verify that can between 2 DUTs can create 2 NDPs with different security
+ """Verify that between 2 DUTs can create 2 NDPs with different security
configuration (one open, one using pmk). The result should use two
different NDIs
@@ -1353,8 +1400,9 @@
"""
self.run_multiple_ndi([None, self.PMK], flip_init_resp=True)
+ @test_tracker_info(uuid="0bfea9e4-e57d-417f-8db4-245741e9bbd5")
def test_multiple_ndi_passphrase_pmk_flip(self):
- """Verify that can between 2 DUTs can create 2 NDPs with different security
+ """Verify that between 2 DUTs can create 2 NDPs with different security
configuration (one using passphrase, one using pmk). The result should use
two different NDIs
@@ -1362,8 +1410,9 @@
"""
self.run_multiple_ndi([self.PASSPHRASE, self.PMK], flip_init_resp=True)
+ @test_tracker_info(uuid="74023483-5417-431b-a362-991ad4a03ab8")
def test_multiple_ndi_passphrases_flip(self):
- """Verify that can between 2 DUTs can create 2 NDPs with different security
+ """Verify that between 2 DUTs can create 2 NDPs with different security
configuration (using different passphrases). The result should use two
different NDIs
@@ -1372,11 +1421,505 @@
self.run_multiple_ndi([self.PASSPHRASE, self.PASSPHRASE2],
flip_init_resp=True)
+ @test_tracker_info(uuid="873b2d91-28a1-403f-ae9c-d756bb2f59ee")
def test_multiple_ndi_pmks_flip(self):
- """Verify that can between 2 DUTs can create 2 NDPs with different security
+ """Verify that between 2 DUTs can create 2 NDPs with different security
configuration (using different PMKS). The result should use two different
NDIs
Flip Initiator and Responder roles.
"""
self.run_multiple_ndi([self.PMK, self.PMK2], flip_init_resp=True)
+
+ #######################################
+
+ @test_tracker_info(uuid="2f10a9df-7fbd-490d-a238-3523f47ab54c")
+ def test_ib_responder_any_usage(self):
+ """Verify that configuring an in-band (Aware discovery) Responder to receive
+ an NDP request from any peer is not permitted by current API level. Override
+ API check to validate that possible (i.e. that failure at current API level
+ is due to an API check and not some underlying failure).
+ """
+
+ # configure all devices to override API check and allow a Responder from ANY
+ for ad in self.android_devices:
+ autils.configure_ndp_allow_any_override(ad, True)
+ self.run_ib_data_path_test(
+ ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+ stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+ encr_type=self.ENCR_TYPE_OPEN,
+ use_peer_id=False)
+
+ # configure all devices to respect API check - i.e. disallow a Responder
+ # from ANY
+ for ad in self.android_devices:
+ autils.configure_ndp_allow_any_override(ad, False)
+ self.run_ib_data_path_test(
+ ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+ stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+ encr_type=self.ENCR_TYPE_OPEN,
+ use_peer_id=False,
+ expect_failure=True)
+
+ @test_tracker_info(uuid="5889cd41-0a72-4b7b-ab82-5b9168b9b5b8")
+ def test_oob_responder_any_usage(self):
+ """Verify that configuring an out-of-band (Aware discovery) Responder to
+ receive an NDP request from any peer is not permitted by current API level.
+ Override API check to validate that possible (i.e. that failure at current
+ API level is due to an API check and not some underlying failure).
+ """
+
+ # configure all devices to override API check and allow a Responder from ANY
+ for ad in self.android_devices:
+ autils.configure_ndp_allow_any_override(ad, True)
+ self.run_oob_data_path_test(
+ encr_type=self.ENCR_TYPE_OPEN,
+ use_peer_id=False)
+
+ # configure all devices to respect API check - i.e. disallow a Responder
+ # from ANY
+ for ad in self.android_devices:
+ autils.configure_ndp_allow_any_override(ad, False)
+ self.run_oob_data_path_test(
+ encr_type=self.ENCR_TYPE_OPEN,
+ use_peer_id=False,
+ expect_failure=True)
+
+ #######################################
+
+ def run_multiple_regulatory_domains(self, use_ib, init_domain, resp_domain):
+ """Verify that a data-path setup with two conflicting regulatory domains
+ works (the result should be run in Channel 6 - but that is not tested).
+
+ Args:
+ use_ib: True to use in-band discovery, False to use out-of-band discovery.
+ init_domain: The regulatory domain of the Initiator/Subscriber.
+ resp_domain: The regulator domain of the Responder/Publisher.
+ """
+ init_dut = self.android_devices[0]
+ resp_dut = self.android_devices[1]
+
+ init_dut.droid.wifiSetCountryCode(init_domain)
+ resp_dut.droid.wifiSetCountryCode(resp_domain)
+
+ if use_ib:
+ (resp_req_key, init_req_key, resp_aware_if, init_aware_if, resp_ipv6,
+ init_ipv6) = autils.create_ib_ndp(resp_dut, init_dut,
+ autils.create_discovery_config(
+ "GoogleTestXyz",
+ aconsts.PUBLISH_TYPE_UNSOLICITED),
+ autils.create_discovery_config(
+ "GoogleTestXyz",
+ aconsts.SUBSCRIBE_TYPE_PASSIVE),
+ self.device_startup_offset)
+ else:
+ (init_req_key, resp_req_key, init_aware_if, resp_aware_if, init_ipv6,
+ resp_ipv6) = autils.create_oob_ndp(init_dut, resp_dut)
+
+ self.log.info("Interface names: I=%s, R=%s", init_aware_if, resp_aware_if)
+ self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
+ resp_ipv6)
+
+ # clean-up
+ resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
+ init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
+
+ @test_tracker_info(uuid="eff53739-35c5-47a6-81f0-d70b51d89c3b")
+ def test_multiple_regulator_domains_ib_us_jp(self):
+ """Verify data-path setup across multiple regulator domains.
+
+ - Uses in-band discovery
+ - Subscriber=US, Publisher=JP
+ """
+ self.run_multiple_regulatory_domains(
+ use_ib=True,
+ init_domain=wutils.WifiEnums.CountryCode.US,
+ resp_domain=wutils.WifiEnums.CountryCode.JAPAN)
+
+ @test_tracker_info(uuid="19af47cc-3204-40ef-b50f-14cf7b89cf4a")
+ def test_multiple_regulator_domains_ib_jp_us(self):
+ """Verify data-path setup across multiple regulator domains.
+
+ - Uses in-band discovery
+ - Subscriber=JP, Publisher=US
+ """
+ self.run_multiple_regulatory_domains(
+ use_ib=True,
+ init_domain=wutils.WifiEnums.CountryCode.JAPAN,
+ resp_domain=wutils.WifiEnums.CountryCode.US)
+
+ @test_tracker_info(uuid="65285ab3-977f-4dbd-b663-d5a02f4fc663")
+ def test_multiple_regulator_domains_oob_us_jp(self):
+ """Verify data-path setup across multiple regulator domains.
+
+ - Uses out-f-band discovery
+ - Initiator=US, Responder=JP
+ """
+ self.run_multiple_regulatory_domains(
+ use_ib=False,
+ init_domain=wutils.WifiEnums.CountryCode.US,
+ resp_domain=wutils.WifiEnums.CountryCode.JAPAN)
+
+ @test_tracker_info(uuid="8a417e24-aaf6-44b9-a089-a07c3ba8d954")
+ def test_multiple_regulator_domains_oob_jp_us(self):
+ """Verify data-path setup across multiple regulator domains.
+
+ - Uses out-of-band discovery
+ - Initiator=JP, Responder=US
+ """
+ self.run_multiple_regulatory_domains(
+ use_ib=False,
+ init_domain=wutils.WifiEnums.CountryCode.JAPAN,
+ resp_domain=wutils.WifiEnums.CountryCode.US)
+
+ ########################################################################
+
+ def run_mix_ib_oob(self, same_request, ib_first, inits_on_same_dut):
+ """Validate that multiple network requests issued using both in-band and
+ out-of-band discovery behave as expected.
+
+ The same_request parameter controls whether identical single NDP is
+ expected, if True, or whether multiple NDPs on different NDIs are expected,
+ if False.
+
+ Args:
+ same_request: Issue canonically identical requests (same NMI peer, same
+ passphrase) if True, if False use different passphrases.
+ ib_first: If True then the in-band network is requested first, otherwise
+ (if False) then the out-of-band network is requested first.
+ inits_on_same_dut: If True then the Initiators are run on the same device,
+ otherwise (if False) then the Initiators are run on
+ different devices. Note that Subscribe == Initiator.
+ """
+ if not same_request:
+ asserts.skip_if(self.android_devices[0].aware_capabilities[
+ aconsts.CAP_MAX_NDI_INTERFACES] < 2 or
+ self.android_devices[1].aware_capabilities[
+ aconsts.CAP_MAX_NDI_INTERFACES] < 2,
+ "DUTs do not support enough NDIs")
+
+ (p_dut, s_dut, p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub,
+ peer_id_on_pub_null) = self.set_up_discovery(
+ aconsts.PUBLISH_TYPE_UNSOLICITED, aconsts.SUBSCRIBE_TYPE_PASSIVE, False)
+
+ p_id2, p_mac = autils.attach_with_identity(p_dut)
+ s_id2, s_mac = autils.attach_with_identity(s_dut)
+
+ if inits_on_same_dut:
+ resp_dut = p_dut
+ resp_id = p_id2
+ resp_mac = p_mac
+
+ init_dut = s_dut
+ init_id = s_id2
+ init_mac = s_mac
+ else:
+ resp_dut = s_dut
+ resp_id = s_id2
+ resp_mac = s_mac
+
+ init_dut = p_dut
+ init_id = p_id2
+ init_mac = p_mac
+
+ passphrase = None if same_request else self.PASSPHRASE
+
+ if ib_first:
+ # request in-band network (to completion)
+ p_req_key = self.request_network(
+ p_dut,
+ p_dut.droid.wifiAwareCreateNetworkSpecifier(p_disc_id, None))
+ s_req_key = self.request_network(
+ s_dut,
+ s_dut.droid.wifiAwareCreateNetworkSpecifier(s_disc_id,
+ peer_id_on_sub))
+
+ # Publisher & Subscriber: wait for network formation
+ p_net_event = autils.wait_for_event_with_keys(
+ p_dut, cconsts.EVENT_NETWORK_CALLBACK,
+ autils.EVENT_NDP_TIMEOUT,
+ (cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+ (cconsts.NETWORK_CB_KEY_ID, p_req_key))
+ s_net_event = autils.wait_for_event_with_keys(
+ s_dut, cconsts.EVENT_NETWORK_CALLBACK,
+ autils.EVENT_NDP_TIMEOUT,
+ (cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+ (cconsts.NETWORK_CB_KEY_ID, s_req_key))
+
+ # request out-of-band network
+ resp_req_key = autils.request_network(resp_dut,
+ resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+ resp_id, aconsts.DATA_PATH_RESPONDER, init_mac, passphrase))
+ init_req_key = autils.request_network(init_dut,
+ init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+ init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, passphrase))
+
+ resp_net_event = autils.wait_for_event_with_keys(
+ resp_dut, cconsts.EVENT_NETWORK_CALLBACK,
+ autils.EVENT_NDP_TIMEOUT,
+ (cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+ (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
+ init_net_event = autils.wait_for_event_with_keys(
+ init_dut, cconsts.EVENT_NETWORK_CALLBACK,
+ autils.EVENT_NDP_TIMEOUT,
+ (cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+ (cconsts.NETWORK_CB_KEY_ID, init_req_key))
+
+ if not ib_first:
+ # request in-band network (to completion)
+ p_req_key = self.request_network(
+ p_dut,
+ p_dut.droid.wifiAwareCreateNetworkSpecifier(p_disc_id, None))
+ s_req_key = self.request_network(
+ s_dut,
+ s_dut.droid.wifiAwareCreateNetworkSpecifier(s_disc_id,
+ peer_id_on_sub))
+
+ # Publisher & Subscriber: wait for network formation
+ p_net_event = autils.wait_for_event_with_keys(
+ p_dut, cconsts.EVENT_NETWORK_CALLBACK,
+ autils.EVENT_NDP_TIMEOUT,
+ (cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+ (cconsts.NETWORK_CB_KEY_ID, p_req_key))
+ s_net_event = autils.wait_for_event_with_keys(
+ s_dut, cconsts.EVENT_NETWORK_CALLBACK,
+ autils.EVENT_NDP_TIMEOUT,
+ (cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+ (cconsts.NETWORK_CB_KEY_ID, s_req_key))
+
+ # extract net info
+ pub_interface = p_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+ sub_interface = s_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+ resp_interface = resp_net_event["data"][
+ cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+ init_interface = init_net_event["data"][
+ cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+
+ self.log.info(
+ "Interface names: Pub=%s, Sub=%s, Resp=%s, Init=%s", pub_interface,
+ sub_interface, resp_interface, init_interface)
+
+ pub_ipv6 = \
+ p_dut.droid.connectivityGetLinkLocalIpv6Address(pub_interface).split("%")[0]
+ sub_ipv6 = \
+ s_dut.droid.connectivityGetLinkLocalIpv6Address(sub_interface).split("%")[0]
+ resp_ipv6 = \
+ resp_dut.droid.connectivityGetLinkLocalIpv6Address(resp_interface).split(
+ "%")[0]
+ init_ipv6 = \
+ init_dut.droid.connectivityGetLinkLocalIpv6Address(init_interface).split(
+ "%")[0]
+
+ self.log.info(
+ "Interface addresses (IPv6): Pub=%s, Sub=%s, Resp=%s, Init=%s", pub_ipv6,
+ sub_ipv6, resp_ipv6, init_ipv6)
+
+ # validate NDP/NDI conditions (using interface names & ipv6)
+ if same_request:
+ asserts.assert_equal(pub_interface,
+ resp_interface if inits_on_same_dut else init_interface,
+ "NDP interfaces don't match on Pub/other")
+ asserts.assert_equal(sub_interface,
+ init_interface if inits_on_same_dut else resp_interface,
+ "NDP interfaces don't match on Sub/other")
+
+ asserts.assert_equal(pub_ipv6,
+ resp_ipv6 if inits_on_same_dut else init_ipv6,
+ "NDP IPv6 don't match on Pub/other")
+ asserts.assert_equal(sub_ipv6,
+ init_ipv6 if inits_on_same_dut else resp_ipv6,
+ "NDP IPv6 don't match on Sub/other")
+ else:
+ asserts.assert_false(pub_interface == (
+ resp_interface if inits_on_same_dut else init_interface),
+ "NDP interfaces match on Pub/other")
+ asserts.assert_false(sub_interface == (
+ init_interface if inits_on_same_dut else resp_interface),
+ "NDP interfaces match on Sub/other")
+
+ asserts.assert_false(pub_ipv6 ==
+ (resp_ipv6 if inits_on_same_dut else init_ipv6),
+ "NDP IPv6 match on Pub/other")
+ asserts.assert_false(sub_ipv6 ==
+ (init_ipv6 if inits_on_same_dut else resp_ipv6),
+ "NDP IPv6 match on Sub/other")
+
+ # release requests
+ p_dut.droid.connectivityUnregisterNetworkCallback(p_req_key)
+ s_dut.droid.connectivityUnregisterNetworkCallback(s_req_key)
+ resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
+ init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
+
+ @test_tracker_info(uuid="d8a0839d-4ba0-43f2-af93-3cf1382f9f16")
+ def test_identical_ndps_mix_ib_oob_ib_first_same_polarity(self):
+ """Validate that a single NDP is created for multiple identical requests
+ which are issued through either in-band (ib) or out-of-band (oob) APIs.
+
+ The in-band request is issued first. Both Initiators (Sub == Initiator) are
+ run on the same device.
+ """
+ self.run_mix_ib_oob(same_request=True,
+ ib_first=True,
+ inits_on_same_dut=True)
+
+ @test_tracker_info(uuid="70bbb811-0bed-4a19-96b3-f2446e777c8a")
+ def test_identical_ndps_mix_ib_oob_oob_first_same_polarity(self):
+ """Validate that a single NDP is created for multiple identical requests
+ which are issued through either in-band (ib) or out-of-band (oob) APIs.
+
+ The out-of-band request is issued first. Both Initiators (Sub == Initiator)
+ are run on the same device.
+ """
+ self.run_mix_ib_oob(same_request=True,
+ ib_first=False,
+ inits_on_same_dut=True)
+
+ @test_tracker_info(uuid="d9796da5-f96a-4a51-be0f-89d6f5bfe3ad")
+ def test_identical_ndps_mix_ib_oob_ib_first_diff_polarity(self):
+ """Validate that a single NDP is created for multiple identical requests
+ which are issued through either in-band (ib) or out-of-band (oob) APIs.
+
+ The in-band request is issued first. Initiators (Sub == Initiator) are
+ run on different devices.
+ """
+ self.run_mix_ib_oob(same_request=True,
+ ib_first=True,
+ inits_on_same_dut=False)
+
+ @test_tracker_info(uuid="72b16cbf-53ad-4f98-8dcf-a8cc5fa812e3")
+ def test_identical_ndps_mix_ib_oob_oob_first_diff_polarity(self):
+ """Validate that a single NDP is created for multiple identical requests
+ which are issued through either in-band (ib) or out-of-band (oob) APIs.
+
+ The out-of-band request is issued first. Initiators (Sub == Initiator) are
+ run on different devices.
+ """
+ self.run_mix_ib_oob(same_request=True,
+ ib_first=False,
+ inits_on_same_dut=False)
+
+ @test_tracker_info(uuid="51f9581e-c5ee-48a7-84d2-adff4876c3d7")
+ def test_multiple_ndis_mix_ib_oob_ib_first_same_polarity(self):
+ """Validate that multiple NDIs are created for NDPs which are requested with
+ different security configurations. Use a mix of in-band and out-of-band APIs
+ to request the different NDPs.
+
+ The in-band request is issued first. Initiators (Sub == Initiator) are
+ run on the same device.
+ """
+ self.run_mix_ib_oob(same_request=False,
+ ib_first=True,
+ inits_on_same_dut=True)
+
+ @test_tracker_info(uuid="b1e3070e-4d38-4b31-862d-39b82e0f2853")
+ def test_multiple_ndis_mix_ib_oob_oob_first_same_polarity(self):
+ """Validate that multiple NDIs are created for NDPs which are requested with
+ different security configurations. Use a mix of in-band and out-of-band APIs
+ to request the different NDPs.
+
+ The out-of-band request is issued first. Initiators (Sub == Initiator) are
+ run on the same device.
+ """
+ self.run_mix_ib_oob(same_request=False,
+ ib_first=False,
+ inits_on_same_dut=True)
+
+ @test_tracker_info(uuid="b1e3070e-4d38-4b31-862d-39b82e0f2853")
+ def test_multiple_ndis_mix_ib_oob_ib_first_diff_polarity(self):
+ """Validate that multiple NDIs are created for NDPs which are requested with
+ different security configurations. Use a mix of in-band and out-of-band APIs
+ to request the different NDPs.
+
+ The in-band request is issued first. Initiators (Sub == Initiator) are
+ run on different devices.
+ """
+ self.run_mix_ib_oob(same_request=False,
+ ib_first=True,
+ inits_on_same_dut=False)
+
+ @test_tracker_info(uuid="596caadf-028e-494b-bbce-8304ccec2cbb")
+ def test_multiple_ndis_mix_ib_oob_oob_first_diff_polarity(self):
+ """Validate that multiple NDIs are created for NDPs which are requested with
+ different security configurations. Use a mix of in-band and out-of-band APIs
+ to request the different NDPs.
+
+ The out-of-band request is issued first. Initiators (Sub == Initiator) are
+ run on different devices.
+ """
+ self.run_mix_ib_oob(same_request=False,
+ ib_first=False,
+ inits_on_same_dut=False)
+
+ ########################################################################
+
+ def test_ndp_loop(self):
+ """Validate that can create a loop (chain) of N NDPs between N devices,
+ where N >= 3, e.g.
+
+ A - B
+ B - C
+ C - A
+
+ The NDPs are all OPEN (no encryption).
+ """
+ asserts.assert_true(len(self.android_devices) >= 3,
+ 'A minimum of 3 devices is needed to run the test, have %d' %
+ len(self.android_devices))
+
+ duts = self.android_devices
+ loop_len = len(duts)
+ ids = []
+ macs = []
+ reqs = [[], [], []]
+ ifs = [[], [], []]
+ ipv6s = [[], [], []]
+
+ for i in range(loop_len):
+ duts[i].pretty_name = chr(ord("A") + i)
+
+ # start-up 3 devices (attach w/ identity)
+ for i in range(loop_len):
+ ids.append(duts[i].droid.wifiAwareAttach(True))
+ autils.wait_for_event(duts[i], aconsts.EVENT_CB_ON_ATTACHED)
+ ident_event = autils.wait_for_event(duts[i],
+ aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+ macs.append(ident_event['data']['mac'])
+
+ # wait for for devices to synchronize with each other - there are no other
+ # mechanisms to make sure this happens for OOB discovery (except retrying
+ # to execute the data-path request)
+ time.sleep(autils.WAIT_FOR_CLUSTER)
+
+ # create the N NDPs: i to (i+1) % N
+ for i in range(loop_len):
+ peer_device = (i + 1) % loop_len
+
+ (init_req_key, resp_req_key, init_aware_if,
+ resp_aware_if, init_ipv6, resp_ipv6) = autils.create_oob_ndp_on_sessions(
+ duts[i], duts[peer_device],
+ ids[i], macs[i], ids[peer_device], macs[peer_device])
+
+ reqs[i].append(init_req_key)
+ reqs[peer_device].append(resp_req_key)
+ ifs[i].append(init_aware_if)
+ ifs[peer_device].append(resp_aware_if)
+ ipv6s[i].append(init_ipv6)
+ ipv6s[peer_device].append(resp_ipv6)
+
+ # clean-up
+ for i in range(loop_len):
+ for req in reqs[i]:
+ duts[i].droid.connectivityUnregisterNetworkCallback(req)
+
+ # info
+ self.log.info("MACs: %s", macs)
+ self.log.info("Interface names: %s", ifs)
+ self.log.info("IPv6 addresses: %s", ipv6s)
+ asserts.explicit_pass("NDP loop test",
+ extras={"macs": macs, "ifs": ifs, "ipv6s": ipv6s})
diff --git a/acts/tests/google/wifi/aware/functional/DiscoveryTest.py b/acts/tests/google/wifi/aware/functional/DiscoveryTest.py
index 1784d12..c6f75b0 100644
--- a/acts/tests/google/wifi/aware/functional/DiscoveryTest.py
+++ b/acts/tests/google/wifi/aware/functional/DiscoveryTest.py
@@ -831,7 +831,7 @@
s_mf_1="goodbye there string")
#######################################
- # Multiple concurrent services key
+ # Multiple concurrent services
#######################################
def run_multiple_concurrent_services(self, type_x, type_y):
@@ -957,6 +957,7 @@
event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING], y_msg,
"Message on service Y from DUT2 to DUT1 not received correctly")
+ @test_tracker_info(uuid="eef80cf3-1fd2-4526-969b-6af2dce785d7")
def test_multiple_concurrent_services_both_unsolicited_passive(self):
"""Validate multiple concurrent discovery sessions running on both devices.
- DUT1 & DUT2 running Publish for X
@@ -974,6 +975,7 @@
type_x=[aconsts.PUBLISH_TYPE_UNSOLICITED, aconsts.SUBSCRIBE_TYPE_PASSIVE],
type_y=[aconsts.PUBLISH_TYPE_UNSOLICITED, aconsts.SUBSCRIBE_TYPE_PASSIVE])
+ @test_tracker_info(uuid="46739f04-ab2b-4556-b1a4-9aa2774869b5")
def test_multiple_concurrent_services_both_solicited_active(self):
"""Validate multiple concurrent discovery sessions running on both devices.
- DUT1 & DUT2 running Publish for X
@@ -991,6 +993,7 @@
type_x=[aconsts.PUBLISH_TYPE_SOLICITED, aconsts.SUBSCRIBE_TYPE_ACTIVE],
type_y=[aconsts.PUBLISH_TYPE_SOLICITED, aconsts.SUBSCRIBE_TYPE_ACTIVE])
+ @test_tracker_info(uuid="5f8f7fd2-4a0e-4cca-8cbb-6d54353f2baa")
def test_multiple_concurrent_services_mix_unsolicited_solicited(self):
"""Validate multiple concurrent discovery sessions running on both devices.
- DUT1 & DUT2 running Publish for X
@@ -1008,3 +1011,25 @@
self.run_multiple_concurrent_services(
type_x=[aconsts.PUBLISH_TYPE_UNSOLICITED, aconsts.SUBSCRIBE_TYPE_PASSIVE],
type_y=[aconsts.PUBLISH_TYPE_SOLICITED, aconsts.SUBSCRIBE_TYPE_ACTIVE])
+
+ #########################################################
+
+ @test_tracker_info(uuid="908ec896-fc7a-4ee4-b633-a2f042b74448")
+ def test_upper_lower_service_name_equivalence(self):
+ """Validate that Service Name is case-insensitive. Publish a service name
+ with mixed case, subscribe to the same service name with alternative case
+ and verify that discovery happens."""
+ p_dut = self.android_devices[0]
+ s_dut = self.android_devices[1]
+
+ pub_service_name = "GoogleAbCdEf"
+ sub_service_name = "GoogleaBcDeF"
+
+ autils.create_discovery_pair(p_dut, s_dut,
+ p_config=autils.create_discovery_config(
+ pub_service_name,
+ aconsts.PUBLISH_TYPE_UNSOLICITED),
+ s_config=autils.create_discovery_config(
+ sub_service_name,
+ aconsts.SUBSCRIBE_TYPE_PASSIVE),
+ device_startup_offset=self.device_startup_offset)
diff --git a/acts/tests/google/wifi/aware/functional/MacRandomTest.py b/acts/tests/google/wifi/aware/functional/MacRandomTest.py
index 329ead4..af1503b 100644
--- a/acts/tests/google/wifi/aware/functional/MacRandomTest.py
+++ b/acts/tests/google/wifi/aware/functional/MacRandomTest.py
@@ -57,6 +57,10 @@
(NAN data-interface) on each enable/disable cycle"""
dut = self.android_devices[0]
+ # re-enable randomization interval (since if disabled it may also disable
+ # the 'randomize on enable' feature).
+ autils.configure_mac_random_interval(dut, 1800)
+
# DUT: attach and wait for confirmation & identity 10 times
mac_addresses = {}
for i in range(self.NUM_ITERATIONS):
@@ -108,9 +112,8 @@
dut = self.android_devices[0]
- # set randomization interval to 5 seconds
- dut.adb.shell("cmd wifiaware native_api set mac_random_interval_sec %d" %
- RANDOM_INTERVAL)
+ # set randomization interval to 120 seconds
+ autils.configure_mac_random_interval(dut, RANDOM_INTERVAL)
# attach and wait for first identity
id = dut.droid.wifiAwareAttach(True)
diff --git a/acts/tests/google/wifi/aware/performance/LatencyTest.py b/acts/tests/google/wifi/aware/performance/LatencyTest.py
index bde9ff4..bfadebc 100644
--- a/acts/tests/google/wifi/aware/performance/LatencyTest.py
+++ b/acts/tests/google/wifi/aware/performance/LatencyTest.py
@@ -92,8 +92,8 @@
s_dut.pretty_name = "Subscriber"
# override the default DW configuration
- autils.config_dw_all_modes(p_dut, dw_24ghz, dw_5ghz)
- autils.config_dw_all_modes(s_dut, dw_24ghz, dw_5ghz)
+ autils.config_power_settings(p_dut, dw_24ghz, dw_5ghz)
+ autils.config_power_settings(s_dut, dw_24ghz, dw_5ghz)
latencies = []
failed_discoveries = 0
@@ -174,8 +174,8 @@
s_dut.pretty_name = "Subscriber"
# override the default DW configuration
- autils.config_dw_all_modes(p_dut, dw_24ghz, dw_5ghz)
- autils.config_dw_all_modes(s_dut, dw_24ghz, dw_5ghz)
+ autils.config_power_settings(p_dut, dw_24ghz, dw_5ghz)
+ autils.config_power_settings(s_dut, dw_24ghz, dw_5ghz)
# Publisher+Subscriber: attach and wait for confirmation
p_id = p_dut.droid.wifiAwareAttach(False)
@@ -253,8 +253,8 @@
s_dut = self.android_devices[1]
# override the default DW configuration
- autils.config_dw_all_modes(p_dut, dw_24ghz, dw_5ghz)
- autils.config_dw_all_modes(s_dut, dw_24ghz, dw_5ghz)
+ autils.config_power_settings(p_dut, dw_24ghz, dw_5ghz)
+ autils.config_power_settings(s_dut, dw_24ghz, dw_5ghz)
# Start up a discovery session
(p_id, s_id, p_disc_id, s_disc_id,
@@ -341,8 +341,8 @@
resp_dut.pretty_name = 'Responder'
# override the default DW configuration
- autils.config_dw_all_modes(init_dut, dw_24ghz, dw_5ghz)
- autils.config_dw_all_modes(resp_dut, dw_24ghz, dw_5ghz)
+ autils.config_power_settings(init_dut, dw_24ghz, dw_5ghz)
+ autils.config_power_settings(resp_dut, dw_24ghz, dw_5ghz)
# Initiator+Responder: attach and wait for confirmation & identity
init_id = init_dut.droid.wifiAwareAttach(True)
@@ -427,6 +427,209 @@
dw_5ghz))
results[key_avail]["ndp_setup_failures"] = ndp_setup_failures
+ def run_end_to_end_latency(self, results, dw_24ghz, dw_5ghz, num_iterations,
+ startup_offset, include_setup):
+ """Measure the latency for end-to-end communication link setup:
+ - Start Aware
+ - Discovery
+ - Message from Sub -> Pub
+ - Message from Pub -> Sub
+ - NDP setup
+
+ Args:
+ results: Result array to be populated - will add results (not erase it)
+ dw_24ghz: DW interval in the 2.4GHz band.
+ dw_5ghz: DW interval in the 5GHz band.
+ startup_offset: The start-up gap (in seconds) between the two devices
+ include_setup: True to include the cluster setup in the latency
+ measurements.
+ """
+ key = "dw24_%d_dw5_%d" % (dw_24ghz, dw_5ghz)
+ results[key] = {}
+ results[key]["num_iterations"] = num_iterations
+
+ p_dut = self.android_devices[0]
+ p_dut.pretty_name = "Publisher"
+ s_dut = self.android_devices[1]
+ s_dut.pretty_name = "Subscriber"
+
+ # override the default DW configuration
+ autils.config_power_settings(p_dut, dw_24ghz, dw_5ghz)
+ autils.config_power_settings(s_dut, dw_24ghz, dw_5ghz)
+
+ latencies = []
+
+ # allow for failures here since running lots of samples and would like to
+ # get the partial data even in the presence of errors
+ failures = 0
+
+ if not include_setup:
+ # Publisher+Subscriber: attach and wait for confirmation
+ p_id = p_dut.droid.wifiAwareAttach(False)
+ autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
+ time.sleep(startup_offset)
+ s_id = s_dut.droid.wifiAwareAttach(False)
+ autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
+
+ for i in range(num_iterations):
+ while (True): # for pseudo-goto/finalize
+ timestamp_start = time.perf_counter()
+
+ if include_setup:
+ # Publisher+Subscriber: attach and wait for confirmation
+ p_id = p_dut.droid.wifiAwareAttach(False)
+ autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
+ time.sleep(startup_offset)
+ s_id = s_dut.droid.wifiAwareAttach(False)
+ autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
+
+ # start publish
+ p_disc_id, p_disc_event = self.start_discovery_session(
+ p_dut, p_id, True, aconsts.PUBLISH_TYPE_UNSOLICITED)
+
+ # start subscribe
+ s_disc_id, s_session_event = self.start_discovery_session(
+ s_dut, s_id, False, aconsts.SUBSCRIBE_TYPE_PASSIVE)
+
+ # wait for discovery (allow for failures here since running lots of
+ # samples and would like to get the partial data even in the presence of
+ # errors)
+ try:
+ event = s_dut.ed.pop_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+ autils.EVENT_TIMEOUT)
+ s_dut.log.info("[Subscriber] SESSION_CB_ON_SERVICE_DISCOVERED: %s",
+ event["data"])
+ peer_id_on_sub = event['data'][aconsts.SESSION_CB_KEY_PEER_ID]
+ except queue.Empty:
+ s_dut.log.info("[Subscriber] Timed out while waiting for "
+ "SESSION_CB_ON_SERVICE_DISCOVERED")
+ failures = failures + 1
+ break
+
+ # message from Sub -> Pub
+ msg_s2p = "Message Subscriber -> Publisher #%d" % i
+ next_msg_id = self.get_next_msg_id()
+ s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub, next_msg_id,
+ msg_s2p, 0)
+
+ # wait for Tx confirmation
+ try:
+ s_dut.ed.pop_event(aconsts.SESSION_CB_ON_MESSAGE_SENT,
+ autils.EVENT_TIMEOUT)
+ except queue.Empty:
+ s_dut.log.info("[Subscriber] Timed out while waiting for "
+ "SESSION_CB_ON_MESSAGE_SENT")
+ failures = failures + 1
+ break
+
+ # wait for Rx confirmation (and validate contents)
+ try:
+ event = p_dut.ed.pop_event(aconsts.SESSION_CB_ON_MESSAGE_RECEIVED,
+ autils.EVENT_TIMEOUT)
+ peer_id_on_pub = event['data'][aconsts.SESSION_CB_KEY_PEER_ID]
+ if (event["data"][
+ aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING] != msg_s2p):
+ p_dut.log.info("[Publisher] Corrupted input message - %s", event)
+ failures = failures + 1
+ break
+ except queue.Empty:
+ p_dut.log.info("[Publisher] Timed out while waiting for "
+ "SESSION_CB_ON_MESSAGE_RECEIVED")
+ failures = failures + 1
+ break
+
+ # message from Pub -> Sub
+ msg_p2s = "Message Publisher -> Subscriber #%d" % i
+ next_msg_id = self.get_next_msg_id()
+ p_dut.droid.wifiAwareSendMessage(p_disc_id, peer_id_on_pub, next_msg_id,
+ msg_p2s, 0)
+
+ # wait for Tx confirmation
+ try:
+ p_dut.ed.pop_event(aconsts.SESSION_CB_ON_MESSAGE_SENT,
+ autils.EVENT_TIMEOUT)
+ except queue.Empty:
+ p_dut.log.info("[Publisher] Timed out while waiting for "
+ "SESSION_CB_ON_MESSAGE_SENT")
+ failures = failures + 1
+ break
+
+ # wait for Rx confirmation (and validate contents)
+ try:
+ event = s_dut.ed.pop_event(aconsts.SESSION_CB_ON_MESSAGE_RECEIVED,
+ autils.EVENT_TIMEOUT)
+ if (event["data"][
+ aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING] != msg_p2s):
+ s_dut.log.info("[Subscriber] Corrupted input message - %s", event)
+ failures = failures + 1
+ break
+ except queue.Empty:
+ s_dut.log.info("[Subscriber] Timed out while waiting for "
+ "SESSION_CB_ON_MESSAGE_RECEIVED")
+ failures = failures + 1
+ break
+
+ # create NDP
+
+ # Publisher: request network
+ p_req_key = autils.request_network(
+ p_dut,
+ p_dut.droid.wifiAwareCreateNetworkSpecifier(p_disc_id,
+ peer_id_on_pub, None))
+
+ # Subscriber: request network
+ s_req_key = autils.request_network(
+ s_dut,
+ s_dut.droid.wifiAwareCreateNetworkSpecifier(s_disc_id,
+ peer_id_on_sub, None))
+
+ # Publisher & Subscriber: wait for network formation
+ try:
+ p_net_event = autils.wait_for_event_with_keys(
+ p_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_TIMEOUT, (
+ cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+ (cconsts.NETWORK_CB_KEY_ID, p_req_key))
+ s_net_event = autils.wait_for_event_with_keys(
+ s_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_TIMEOUT, (
+ cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+ (cconsts.NETWORK_CB_KEY_ID, s_req_key))
+ except:
+ failures = failures + 1
+ break
+
+ p_aware_if = p_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+ s_aware_if = s_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+
+ p_ipv6 = \
+ p_dut.droid.connectivityGetLinkLocalIpv6Address(p_aware_if).split("%")[
+ 0]
+ s_ipv6 = \
+ s_dut.droid.connectivityGetLinkLocalIpv6Address(s_aware_if).split("%")[
+ 0]
+
+ p_dut.log.info("[Publisher] IF=%s, IPv6=%s", p_aware_if, p_ipv6)
+ s_dut.log.info("[Subscriber] IF=%s, IPv6=%s", s_aware_if, s_ipv6)
+
+ latencies.append(time.perf_counter() - timestamp_start)
+ break
+
+ # destroy sessions
+ p_dut.droid.wifiAwareDestroyDiscoverySession(p_disc_id)
+ s_dut.droid.wifiAwareDestroyDiscoverySession(s_disc_id)
+ if include_setup:
+ p_dut.droid.wifiAwareDestroy(p_id)
+ s_dut.droid.wifiAwareDestroy(s_id)
+
+ autils.extract_stats(
+ p_dut,
+ data=latencies,
+ results=results[key],
+ key_prefix="",
+ log_prefix="End-to-End(dw24=%d, dw5=%d)" % (dw_24ghz, dw_5ghz))
+ results[key]["failures"] = failures
+
########################################################################
@@ -438,8 +641,8 @@
self.run_synchronization_latency(
results=results,
do_unsolicited_passive=True,
- dw_24ghz=aconsts.DW_24_INTERACTIVE,
- dw_5ghz=aconsts.DW_5_INTERACTIVE,
+ dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
+ dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
num_iterations=10,
startup_offset=startup_offset,
timeout_period=20)
@@ -454,8 +657,8 @@
self.run_synchronization_latency(
results=results,
do_unsolicited_passive=True,
- dw_24ghz=aconsts.DW_24_NON_INTERACTIVE,
- dw_5ghz=aconsts.DW_5_NON_INTERACTIVE,
+ dw_24ghz=aconsts.POWER_DW_24_NON_INTERACTIVE,
+ dw_5ghz=aconsts.POWER_DW_5_NON_INTERACTIVE,
num_iterations=10,
startup_offset=startup_offset,
timeout_period=20)
@@ -469,8 +672,8 @@
self.run_discovery_latency(
results=results,
do_unsolicited_passive=True,
- dw_24ghz=aconsts.DW_24_INTERACTIVE,
- dw_5ghz=aconsts.DW_5_INTERACTIVE,
+ dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
+ dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
num_iterations=100)
asserts.explicit_pass(
"test_discovery_latency_default_parameters finished", extras=results)
@@ -482,8 +685,8 @@
self.run_discovery_latency(
results=results,
do_unsolicited_passive=True,
- dw_24ghz=aconsts.DW_24_NON_INTERACTIVE,
- dw_5ghz=aconsts.DW_5_NON_INTERACTIVE,
+ dw_24ghz=aconsts.POWER_DW_24_NON_INTERACTIVE,
+ dw_5ghz=aconsts.POWER_DW_5_NON_INTERACTIVE,
num_iterations=100)
asserts.explicit_pass(
"test_discovery_latency_non_interactive_dws finished", extras=results)
@@ -510,8 +713,8 @@
results = {}
self.run_message_latency(
results=results,
- dw_24ghz=aconsts.DW_24_INTERACTIVE,
- dw_5ghz=aconsts.DW_5_INTERACTIVE,
+ dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
+ dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
num_iterations=100)
asserts.explicit_pass(
"test_message_latency_default_dws finished", extras=results)
@@ -524,8 +727,8 @@
results = {}
self.run_message_latency(
results=results,
- dw_24ghz=aconsts.DW_24_NON_INTERACTIVE,
- dw_5ghz=aconsts.DW_5_NON_INTERACTIVE,
+ dw_24ghz=aconsts.POWER_DW_24_NON_INTERACTIVE,
+ dw_5ghz=aconsts.POWER_DW_5_NON_INTERACTIVE,
num_iterations=100)
asserts.explicit_pass(
"test_message_latency_non_interactive_dws finished", extras=results)
@@ -536,8 +739,8 @@
results = {}
self.run_ndp_oob_latency(
results=results,
- dw_24ghz=aconsts.DW_24_INTERACTIVE,
- dw_5ghz=aconsts.DW_5_INTERACTIVE,
+ dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
+ dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
num_iterations=100)
asserts.explicit_pass(
"test_ndp_setup_latency_default_dws finished", extras=results)
@@ -549,8 +752,49 @@
results = {}
self.run_ndp_oob_latency(
results=results,
- dw_24ghz=aconsts.DW_24_NON_INTERACTIVE,
- dw_5ghz=aconsts.DW_5_NON_INTERACTIVE,
+ dw_24ghz=aconsts.POWER_DW_24_NON_INTERACTIVE,
+ dw_5ghz=aconsts.POWER_DW_5_NON_INTERACTIVE,
num_iterations=100)
asserts.explicit_pass(
"test_ndp_setup_latency_non_interactive_dws finished", extras=results)
+
+ def test_end_to_end_latency_default_dws(self):
+ """Measure the latency for end-to-end communication link setup:
+ - Start Aware
+ - Discovery
+ - Message from Sub -> Pub
+ - Message from Pub -> Sub
+ - NDP setup
+ """
+ results = {}
+ self.run_end_to_end_latency(
+ results,
+ dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
+ dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
+ num_iterations=10,
+ startup_offset=0,
+ include_setup=True)
+ asserts.explicit_pass(
+ "test_end_to_end_latency_default_dws finished", extras=results)
+
+ def test_end_to_end_latency_post_attach_default_dws(self):
+ """Measure the latency for end-to-end communication link setup without
+ the initial synchronization:
+ - Start Aware & synchronize initially
+ - Loop:
+ - Discovery
+ - Message from Sub -> Pub
+ - Message from Pub -> Sub
+ - NDP setup
+ """
+ results = {}
+ self.run_end_to_end_latency(
+ results,
+ dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
+ dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
+ num_iterations=10,
+ startup_offset=0,
+ include_setup=False)
+ asserts.explicit_pass(
+ "test_end_to_end_latency_post_attach_default_dws finished",
+ extras=results)
diff --git a/acts/tests/google/wifi/aware/performance/ThroughputTest.py b/acts/tests/google/wifi/aware/performance/ThroughputTest.py
index 6cf1046..ddb6d15 100644
--- a/acts/tests/google/wifi/aware/performance/ThroughputTest.py
+++ b/acts/tests/google/wifi/aware/performance/ThroughputTest.py
@@ -36,7 +36,7 @@
PASSPHRASE2 = "This is some random passphrase - very very secure - but diff!!"
def __init__(self, controllers):
- AwareBaseTest.__init__(self, controllers)
+ super(ThroughputTest, self).__init__(controllers)
def request_network(self, dut, ns):
"""Request a Wi-Fi Aware network.
@@ -302,12 +302,12 @@
# Wait for network
init_net_event = autils.wait_for_event_with_keys(
- init_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_TIMEOUT,
+ init_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
(cconsts.NETWORK_CB_KEY_EVENT,
cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
(cconsts.NETWORK_CB_KEY_ID, init_req_key))
resp_net_event = autils.wait_for_event_with_keys(
- resp_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_TIMEOUT,
+ resp_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
(cconsts.NETWORK_CB_KEY_EVENT,
cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
(cconsts.NETWORK_CB_KEY_ID, resp_req_key))
diff --git a/acts/tests/google/wifi/aware/stress/DataPathStressTest.py b/acts/tests/google/wifi/aware/stress/DataPathStressTest.py
index 9a862cb..f718007 100644
--- a/acts/tests/google/wifi/aware/stress/DataPathStressTest.py
+++ b/acts/tests/google/wifi/aware/stress/DataPathStressTest.py
@@ -18,6 +18,7 @@
import time
from acts import asserts
+from acts.test_decorators import test_tracker_info
from acts.test_utils.net import connectivity_const as cconsts
from acts.test_utils.wifi.aware import aware_const as aconsts
from acts.test_utils.wifi.aware import aware_test_utils as autils
@@ -30,16 +31,29 @@
ATTACH_ITERATIONS = 2
# Number of iterations on create/destroy NDP in each discovery session.
- NDP_ITERATIONS = 20
+ NDP_ITERATIONS = 50
+
+ # Maximum percentage of NDP setup failures over all iterations
+ MAX_FAILURE_PERCENTAGE = 1
def __init__(self, controllers):
AwareBaseTest.__init__(self, controllers)
################################################################
- def test_oob_ndp_stress(self):
+ def run_oob_ndp_stress(self, attach_iterations, ndp_iterations,
+ trigger_failure_on_index=None):
"""Run NDP (NAN data-path) stress test creating and destroying Aware
- attach sessions, discovery sessions, and NDPs."""
+ attach sessions, discovery sessions, and NDPs.
+
+ Args:
+ attach_iterations: Number of attach sessions.
+ ndp_iterations: Number of NDP to be attempted per attach session.
+ trigger_failure_on_index: Trigger a failure on this NDP iteration (the
+ mechanism is to request NDP on Initiator
+ before issuing the requeest on the Responder).
+ If None then no artificial failure triggered.
+ """
init_dut = self.android_devices[0]
init_dut.pretty_name = 'Initiator'
resp_dut = self.android_devices[1]
@@ -50,7 +64,7 @@
ndp_resp_setup_success = 0
ndp_resp_setup_failures = 0
- for attach_iter in range(self.ATTACH_ITERATIONS):
+ for attach_iter in range(attach_iterations):
init_id = init_dut.droid.wifiAwareAttach(True)
autils.wait_for_event(init_dut, aconsts.EVENT_CB_ON_ATTACHED)
init_ident_event = autils.wait_for_event(
@@ -68,18 +82,41 @@
# to execute the data-path request)
time.sleep(autils.WAIT_FOR_CLUSTER)
- for ndp_iteration in range(self.NDP_ITERATIONS):
- # Responder: request network
- resp_req_key = autils.request_network(
- resp_dut,
- resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
- resp_id, aconsts.DATA_PATH_RESPONDER, init_mac, None))
+ for ndp_iteration in range(ndp_iterations):
+ if trigger_failure_on_index != ndp_iteration:
+ # Responder: request network
+ resp_req_key = autils.request_network(
+ resp_dut,
+ resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+ resp_id, aconsts.DATA_PATH_RESPONDER, init_mac, None))
- # Initiator: request network
- init_req_key = autils.request_network(
- init_dut,
- init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
- init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, None))
+ # Wait a minimal amount of time to let the Responder configure itself
+ # and be ready for the request. While calling it first may be
+ # sufficient there are no guarantees that a glitch may slow the
+ # Responder slightly enough to invert the setup order.
+ time.sleep(1)
+
+ # Initiator: request network
+ init_req_key = autils.request_network(
+ init_dut,
+ init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+ init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, None))
+ else:
+ # Initiator: request network
+ init_req_key = autils.request_network(
+ init_dut,
+ init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+ init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, None))
+
+ # Wait a minimal amount of time to let the Initiator configure itself
+ # to guarantee failure!
+ time.sleep(2)
+
+ # Responder: request network
+ resp_req_key = autils.request_network(
+ resp_dut,
+ resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+ resp_id, aconsts.DATA_PATH_RESPONDER, init_mac, None))
# Initiator: wait for network formation
got_on_available = False
@@ -138,9 +175,31 @@
results['ndp_init_setup_failures'] = ndp_init_setup_failures
results['ndp_resp_setup_success'] = ndp_resp_setup_success
results['ndp_resp_setup_failures'] = ndp_resp_setup_failures
- asserts.assert_equal(
- ndp_init_setup_failures + ndp_resp_setup_failures,
- 0,
- 'test_oob_ndp_stress finished',
- extras=results)
- asserts.explicit_pass("test_oob_ndp_stress done", extras=results)
+ max_failures = (
+ self.MAX_FAILURE_PERCENTAGE * attach_iterations * ndp_iterations / 100)
+ if max_failures == 0:
+ max_failures = 1
+ if trigger_failure_on_index is not None:
+ max_failures = max_failures + 1 # for the triggered failure
+ asserts.assert_true(
+ (ndp_init_setup_failures + ndp_resp_setup_failures) < (2 * max_failures),
+ 'NDP setup failure rate exceeds threshold', extras=results)
+ asserts.explicit_pass("test_oob_ndp_stress* done", extras=results)
+
+ @test_tracker_info(uuid="a20a96ba-e71f-4d31-b850-b88a75381981")
+ def test_oob_ndp_stress(self):
+ """Run NDP (NAN data-path) stress test creating and destroying Aware
+ attach sessions, discovery sessions, and NDPs."""
+ self.run_oob_ndp_stress(self.ATTACH_ITERATIONS, self.NDP_ITERATIONS)
+
+ @test_tracker_info(uuid="1fb4a383-bf1a-411a-a904-489dd9e29c6a")
+ def test_oob_ndp_stress_failure_case(self):
+ """Run NDP (NAN data-path) stress test creating and destroying Aware
+ attach sessions, discovery sessions, and NDPs.
+
+ Verify recovery from failure by triggering an artifical failure and
+ verifying that all subsequent iterations succeed.
+ """
+ self.run_oob_ndp_stress(attach_iterations=1,
+ ndp_iterations=10,
+ trigger_failure_on_index=3)
diff --git a/acts/tests/google/wifi/aware/stress/DiscoveryStressTest.py b/acts/tests/google/wifi/aware/stress/DiscoveryStressTest.py
index 8b3d925..eaa5d19 100644
--- a/acts/tests/google/wifi/aware/stress/DiscoveryStressTest.py
+++ b/acts/tests/google/wifi/aware/stress/DiscoveryStressTest.py
@@ -15,9 +15,9 @@
# limitations under the License.
import queue
-import time
from acts import asserts
+from acts.test_decorators import test_tracker_info
from acts.test_utils.wifi.aware import aware_const as aconsts
from acts.test_utils.wifi.aware import aware_test_utils as autils
from acts.test_utils.wifi.aware.AwareBaseTest import AwareBaseTest
@@ -37,6 +37,7 @@
####################################################################
+ @test_tracker_info(uuid="783791e5-7726-44e0-ac5b-98c1dbf493cb")
def test_discovery_stress(self):
"""Create and destroy a random array of discovery sessions, up to the
limit of capabilities."""
@@ -101,9 +102,6 @@
results = {}
results['discovery_setup_success'] = discovery_setup_success
results['discovery_setup_fail'] = discovery_setup_fail
- asserts.assert_equal(
- discovery_setup_fail,
- 0,
- 'test_discovery_stress finished',
- extras=results)
+ asserts.assert_equal(discovery_setup_fail, 0,
+ 'Discovery setup failures', extras=results)
asserts.explicit_pass('test_discovery_stress done', extras=results)
diff --git a/acts/tests/google/wifi/aware/stress/MessagesStressTest.py b/acts/tests/google/wifi/aware/stress/MessagesStressTest.py
index 5871d61..34827f1 100644
--- a/acts/tests/google/wifi/aware/stress/MessagesStressTest.py
+++ b/acts/tests/google/wifi/aware/stress/MessagesStressTest.py
@@ -17,6 +17,7 @@
import queue
from acts import asserts
+from acts.test_decorators import test_tracker_info
from acts.test_utils.wifi.aware import aware_const as aconsts
from acts.test_utils.wifi.aware import aware_test_utils as autils
from acts.test_utils.wifi.aware.AwareBaseTest import AwareBaseTest
@@ -29,7 +30,19 @@
class MessagesStressTest(AwareBaseTest):
"""Set of stress tests for Wi-Fi Aware L2 (layer 2) message exchanges."""
+
+ # Number of iterations in the stress test (number of messages)
NUM_ITERATIONS = 100
+
+ # Maximum permitted percentage of messages which fail to be transmitted
+ # correctly
+ MAX_TX_FAILURE_PERCENTAGE = 2
+
+ # Maximum permitted percentage of messages which are received more than once
+ # (indicating, most likely, that the ACK wasn't received and the message was
+ # retransmitted)
+ MAX_DUPLICATE_RX_PERCENTAGE = 2
+
SERVICE_NAME = "GoogleTestServiceXY"
def __init__(self, controllers):
@@ -184,6 +197,7 @@
#######################################################################
+ @test_tracker_info(uuid="e88c060f-4ca7-41c1-935a-d3d62878ec0b")
def test_stress_message(self):
"""Stress test for bi-directional message transmission and reception."""
p_dut = self.android_devices[0]
@@ -250,6 +264,9 @@
# clear errors
asserts.assert_equal(results["tx_unknown_ids"], 0, "Message ID corruption",
results)
+ asserts.assert_equal(results["tx_count_neither"], 0,
+ "Tx message with no success or fail indication",
+ results)
asserts.assert_equal(results["tx_count_duplicate_fail"], 0,
"Duplicate Tx fail messages", results)
asserts.assert_equal(results["tx_count_duplicate_success"], 0,
@@ -266,4 +283,14 @@
asserts.assert_equal(results["rx_count_fail_tx_indication"], 0,
"Message received but Tx didn't get ACK", results)
- asserts.explicit_pass("test_stress_message done", extras=results)
+ # permissible failures based on thresholds
+ asserts.assert_true(results["tx_count_fail"] <= (
+ self.MAX_TX_FAILURE_PERCENTAGE * self.NUM_ITERATIONS / 100),
+ "Number of Tx failures exceeds threshold",
+ extras=results)
+ asserts.assert_true(results["rx_count_duplicate"] <= (
+ self.MAX_DUPLICATE_RX_PERCENTAGE * self.NUM_ITERATIONS / 100),
+ "Number of duplicate Rx exceeds threshold",
+ extras=results)
+
+ asserts.explicit_pass("test_stress_message done", extras=results)
\ No newline at end of file
diff --git a/acts/tests/google/wifi/example_config.json b/acts/tests/google/wifi/example_config.json
new file mode 100644
index 0000000..42b0be7
--- /dev/null
+++ b/acts/tests/google/wifi/example_config.json
@@ -0,0 +1,38 @@
+{
+ "_description": "This and example IOT WiFi testbed.",
+ "testbed": [
+ {
+ "_description": "WiFi testbed with 1 devices",
+ "name": "<test station name>",
+ "AndroidDevice": [
+ "<device serial>"
+ ],
+ "IPerfServer": [
+ 5005
+ ]
+ }
+ ],
+ "logpath": "/tmp/ACTS_logs",
+ "testpaths": [
+ "<path to acts root>/tools/test/connectivity/acts/tests/google/wifi"
+ ],
+ "iot_networks": [
+ {
+ "SSID": "<your SSID 2G>",
+ "password": "<your password>"
+ },
+ {
+ "SSID": "<your SSID 5G>",
+ "password": "<your password>"
+ },
+ {
+ "SSID": "<your SSID 2G 2>",
+ "password": "<your password>"
+ },
+ {
+ "SSID": "<your SSID 5G 2>",
+ "password": "<your password>"
+ }
+ ],
+ "iperf_server_address": "<your IP address>"
+}
\ No newline at end of file
diff --git a/acts/tests/google/wifi/example_connectivity_performance_ap_sta.json b/acts/tests/google/wifi/example_connectivity_performance_ap_sta.json
new file mode 100644
index 0000000..234df4a
--- /dev/null
+++ b/acts/tests/google/wifi/example_connectivity_performance_ap_sta.json
@@ -0,0 +1,89 @@
+{
+ "testbed": [{
+ "name": "<your testbed name>",
+ "AndroidDevice": ["<your device serial number>"],
+ "bug_report": 1,
+ "RetailAccessPoints": ["<your ap configuration. see class definition in wifi_retail_ap.py>"],
+ "Attenuator": ["<your attenuator configuration. see attenuator class definition>"],
+ "main_network": {
+ "<your network name>": {
+ "SSID": "<your SSID>",
+ "password": "<your key>",
+ "BSSID": "<your BSSID>"
+ },
+ "<your other network names>": {
+ "SSID": "<your SSID>",
+ "password": "<your key>",
+ "BSSID": "<your BSSID>"
+ }
+ },
+ "IPerfServer": ["<your iperf server configuation. see class definition in iperf_server>"],
+ "testbed_params": {
+ "default_region": "<default access point region to run tests in. This will be used for all non DFS channels>",
+ "DFS_region": "<access point region to run DFS tests in>",
+ "iperf_server_address": "<ip address of iperf server generating or accepting test traffic>",
+ "fixed_attenuation": {"<your channel number 1>": "<your testbed attenuation on this channel>", "<your channel number 2>": "<your testbed attenuation on this channel>"},
+ "dut_front_end_loss": {"<your channel number 1>": "<your DUT front end loss on this channel>", "<your channel number 2>": "<your DUT front end loss on this channel>"},
+ "ap_tx_power": {"<your channel number 1>": "<your access point transmit power on this channel>", "<your channel number 2>": "<your access point transmit power on this channel>"},
+ "golden_results_path": "<your full path to golden results used for pass fail check>"
+ }
+ }
+ ],
+ "rvr_test_params":{
+ "country_code": "<device country code to set during rvr tests>",
+ "iperf_duration": 30,
+ "iperf_ignored_interval": 2,
+ "UDP_rates": {"VHT20": "<throughput to transmit in this mode>", "VHT40": "<throughput to transmit in this mode>", "VHT80": "<throughput to transmit in this mode>"},
+ "rvr_atten_start": 20,
+ "rvr_atten_stop": 30,
+ "rvr_atten_step": 5,
+ "pct_tolerance": 5,
+ "abs_tolerance": 5,
+ "failure_count_tolerance": 1
+ },
+ "rssi_test_params":{
+ "country_code": "<device country code to set during rvr tests>",
+ "rssi_vs_atten_start": 20,
+ "rssi_vs_atten_stop": 80,
+ "rssi_vs_atten_step": 1,
+ "rssi_vs_atten_connected_measurements": 10,
+ "rssi_vs_atten_scan_measurements": 0,
+ "rssi_vs_atten_metrics": ["signal_poll_rssi", "scan_rssi", "chain_0_rssi", "chain_1_rssi"],
+ "rssi_stability_atten": [20, 55],
+ "rssi_stability_duration": 10,
+ "rssi_tracking_waveforms": [{"atten_levels": [40, 61, 40], "step_size": 1, "step_duration": 1, "repetitions":1}],
+ "polling_frequency": 0.25,
+ "abs_tolerance": 2.5,
+ "stdev_tolerance": 1
+ },
+ "throughput_stability_test_params":{
+ "country_code": "<device country code to set during rvr tests>",
+ "iperf_duration": 30,
+ "iperf_ignored_interval": 5,
+ "UDP_rates": {"VHT20": "200M", "VHT40": "400M", "VHT80": "700M"},
+ "low_rssi_backoff_from_range": 10,
+ "min_throughput_threshold": 75,
+ "std_deviation_threshold": 5
+
+ },
+ "ping_test_params":{
+ "country_code": "<device country code to set during rvr tests>",
+ "ping_size": 64,
+ "range_ping_duration": 1,
+ "range_ping_interval": 0.002,
+ "range_atten_start": 60,
+ "range_atten_step": 1,
+ "range_atten_stop": 70,
+ "range_ping_loss_threshold": 25,
+ "range_gap_threshold": 2,
+ "rtt_ping_duration": 30,
+ "rtt_ping_interval": {"fast": 0.002, "slow": 0.5},
+ "rtt_ignored_interval": 0.15,
+ "rtt_test_attenuation": [20, 50],
+ "rtt_test_percentile": 5,
+ "rtt_threshold": 0.2,
+ "rtt_std_deviation_threshold": 5
+ },
+ "logpath": "<path to logs>",
+ "testpaths": ["<path to ACTS root folder>/tools/test/connectivity/acts/tests/google/wifi"]
+}
diff --git a/acts/tests/google/wifi/rtt/README.md b/acts/tests/google/wifi/rtt/README.md
new file mode 100644
index 0000000..639c3d8
--- /dev/null
+++ b/acts/tests/google/wifi/rtt/README.md
@@ -0,0 +1,56 @@
+# Wi-Fi RTT (IEEE 802.11mc) Integrated (ACTS/sl4a) Test Suite
+
+This directory contains ACTS/sl4a test scripts to verify and characterize
+the Wi-Fi RTT (IEEE 802.11mc) implementation in Android.
+
+There are 2 groups of tests (in 2 sub-directories):
+
+* functional: Functional tests that each implementation must pass. These
+are pass/fail tests.
+* stress: Tests which run through a large number of iterations to stress
+test the implementation. Considering that some failures are expected,
+especially in an over-the-air situation, pass/fail criteria are either
+not provided or may not apply to all implementations or test environments.
+
+The tests can be executed using:
+
+`act.py -c <config> -tc {<test_class>|<test_class>:<test_name>}`
+
+Where a test file is any of the `.py` files in any of the test sub-directories.
+If a test class is specified, then all tests within that test class are executed.
+
+## Test Beds
+The Wi-Fi RTT tests support several different test scenarios which require different test bed
+configuration. The test beds and their corresponding test files are:
+
+* Device Under Test + AP which supports IEEE 802.11mc
+ * functional/RangeApSupporting11McTest.py
+ * functional/RttRequestManagementTest.py
+ * functional/RttDisableTest.py
+ * stress/StressRangeApTest.py
+* Device Under Test + AP which does **not** support IEEE 802.11mc
+ * functional/RangeApNonSupporting11McTest.py
+* 2 Devices Under Test
+ * functional/RangeAwareTest.py
+ * functional/AwareDiscoveryWithRangingTest.py
+ * functional/RangeSoftApTest.py
+ * stress/StressRangeAwareTest.py
+
+## Test Configurations
+The test configuration, the `<config>` in the commands above, is stored in
+the *config* sub-directory. The configuration simply uses all connected
+devices without listing specific serial numbers. Note that some tests use a
+single device while others use 2 devices.
+
+The only provided configuration is *wifi_rtt.json*.
+
+The configuration defines the following keys to configure the test:
+
+* **lci_reference**, **lcr_reference**: Arrays of bytes used to validate that the *correct* LCI and
+LCR were received from the AP. These are empty by default and should be configured to match the
+configuration of the AP used in the test.
+* **rtt_reference_distance_mm**: The reference distance, in mm, between the test device and the test
+AP or between the two test devices (for Aware ranging tests).
+* **stress_test_min_iteration_count**, **stress_test_target_run_time_sec**: Parameters used to
+control the length and duration of the stress tests. The stress test runs for the specified number
+of iterations or for the specified duration - whichever is longer.
diff --git a/acts/tests/google/wifi/rtt/config/wifi_rtt.json b/acts/tests/google/wifi/rtt/config/wifi_rtt.json
new file mode 100644
index 0000000..41f77dc
--- /dev/null
+++ b/acts/tests/google/wifi/rtt/config/wifi_rtt.json
@@ -0,0 +1,20 @@
+{
+ "_description": "This is a test configuration file for Wi-Fi RTT tests.",
+ "testbed":
+ [
+ {
+ "_description": "Wi-Fi RTT testbed: auto-detect all attached devices",
+ "name": "WifiRttAllAttached",
+ "AndroidDevice": "*"
+ }
+ ],
+ "logpath": "~/logs",
+ "testpaths": ["./tools/test/connectivity/acts/tests/google/wifi"],
+ "adb_logcat_param": "-b all",
+ "aware_default_power_mode": "INTERACTIVE",
+ "lci_reference": [],
+ "lcr_reference": [],
+ "rtt_reference_distance_mm": 100,
+ "stress_test_min_iteration_count": 100,
+ "stress_test_target_run_time_sec" : 30
+}
diff --git a/acts/tests/google/wifi/rtt/functional/AwareDiscoveryWithRangingTest.py b/acts/tests/google/wifi/rtt/functional/AwareDiscoveryWithRangingTest.py
new file mode 100644
index 0000000..f6d7c8d
--- /dev/null
+++ b/acts/tests/google/wifi/rtt/functional/AwareDiscoveryWithRangingTest.py
@@ -0,0 +1,1567 @@
+#!/usr/bin/python3.4
+#
+# Copyright 2017 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import time
+
+from acts import asserts
+from acts.test_decorators import test_tracker_info
+from acts.test_utils.net import connectivity_const as cconsts
+from acts.test_utils.wifi.aware import aware_const as aconsts
+from acts.test_utils.wifi.aware import aware_test_utils as autils
+from acts.test_utils.wifi.aware.AwareBaseTest import AwareBaseTest
+from acts.test_utils.wifi.rtt import rtt_const as rconsts
+from acts.test_utils.wifi.rtt import rtt_test_utils as rutils
+from acts.test_utils.wifi.rtt.RttBaseTest import RttBaseTest
+
+
+class AwareDiscoveryWithRangingTest(AwareBaseTest, RttBaseTest):
+ """Set of tests for Wi-Fi Aware discovery configured with ranging (RTT)."""
+
+ SERVICE_NAME = "GoogleTestServiceRRRRR"
+
+ # Flag indicating whether the device has a limitation that does not allow it
+ # to execute Aware-based Ranging (whether direct or as part of discovery)
+ # whenever NDP is enabled.
+ RANGING_NDP_CONCURRENCY_LIMITATION = True
+
+ # Flag indicating whether the device has a limitation that does not allow it
+ # to execute Aware-based Ranging (whether direct or as part of discovery)
+ # for both Initiators and Responders. Only the first mode works.
+ RANGING_INITIATOR_RESPONDER_CONCURRENCY_LIMITATION = True
+
+ def __init__(self, controllers):
+ AwareBaseTest.__init__(self, controllers)
+ RttBaseTest.__init__(self, controllers)
+
+ def setup_test(self):
+ """Manual setup here due to multiple inheritance: explicitly execute the
+ setup method from both parents."""
+ AwareBaseTest.setup_test(self)
+ RttBaseTest.setup_test(self)
+
+ def teardown_test(self):
+ """Manual teardown here due to multiple inheritance: explicitly execute the
+ teardown method from both parents."""
+ AwareBaseTest.teardown_test(self)
+ RttBaseTest.teardown_test(self)
+
+ #########################################################################
+
+ def run_discovery(self, p_config, s_config, expect_discovery,
+ expect_range=False):
+ """Run discovery on the 2 input devices with the specified configurations.
+
+ Args:
+ p_config, s_config: Publisher and Subscriber discovery configuration.
+ expect_discovery: True or False indicating whether discovery is expected
+ with the specified configurations.
+ expect_range: True if we expect distance results (i.e. ranging to happen).
+ Only relevant if expect_discovery is True.
+ Returns:
+ p_dut, s_dut: Publisher/Subscribe DUT
+ p_disc_id, s_disc_id: Publisher/Subscribe discovery session ID
+ """
+ p_dut = self.android_devices[0]
+ p_dut.pretty_name = "Publisher"
+ s_dut = self.android_devices[1]
+ s_dut.pretty_name = "Subscriber"
+
+ # Publisher+Subscriber: attach and wait for confirmation
+ p_id = p_dut.droid.wifiAwareAttach(False)
+ autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
+ time.sleep(self.device_startup_offset)
+ s_id = s_dut.droid.wifiAwareAttach(False)
+ autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
+
+ # Publisher: start publish and wait for confirmation
+ p_disc_id = p_dut.droid.wifiAwarePublish(p_id, p_config)
+ autils.wait_for_event(p_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+
+ # Subscriber: start subscribe and wait for confirmation
+ s_disc_id = s_dut.droid.wifiAwareSubscribe(s_id, s_config)
+ autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
+
+ # Subscriber: wait or fail on service discovery
+ if expect_discovery:
+ event = autils.wait_for_event(s_dut,
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+ if expect_range:
+ asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging expected!")
+ else:
+ asserts.assert_false(
+ aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging NOT expected!")
+ else:
+ autils.fail_on_event(s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+
+ # (single) sleep for timeout period and then verify that no further events
+ time.sleep(autils.EVENT_TIMEOUT)
+ autils.verify_no_more_events(p_dut, timeout=0)
+ autils.verify_no_more_events(s_dut, timeout=0)
+
+ return p_dut, s_dut, p_disc_id, s_disc_id
+
+ def run_discovery_update(self, p_dut, s_dut, p_disc_id, s_disc_id, p_config,
+ s_config, expect_discovery, expect_range=False):
+ """Run discovery on the 2 input devices with the specified update
+ configurations. I.e. update the existing discovery sessions with the
+ configurations.
+
+ Args:
+ p_dut, s_dut: Publisher/Subscriber DUTs.
+ p_disc_id, s_disc_id: Publisher/Subscriber discovery session IDs.
+ p_config, s_config: Publisher and Subscriber discovery configuration.
+ expect_discovery: True or False indicating whether discovery is expected
+ with the specified configurations.
+ expect_range: True if we expect distance results (i.e. ranging to happen).
+ Only relevant if expect_discovery is True.
+ """
+
+ # try to perform reconfiguration at same time (and wait once for all
+ # confirmations)
+ if p_config is not None:
+ p_dut.droid.wifiAwareUpdatePublish(p_disc_id, p_config)
+ if s_config is not None:
+ s_dut.droid.wifiAwareUpdateSubscribe(s_disc_id, s_config)
+
+ if p_config is not None:
+ autils.wait_for_event(p_dut, aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED)
+ if s_config is not None:
+ autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED)
+
+ # Subscriber: wait or fail on service discovery
+ if expect_discovery:
+ event = autils.wait_for_event(s_dut,
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+ if expect_range:
+ asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging expected!")
+ else:
+ asserts.assert_false(
+ aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging NOT expected!")
+ else:
+ autils.fail_on_event(s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+
+ # (single) sleep for timeout period and then verify that no further events
+ time.sleep(autils.EVENT_TIMEOUT)
+ autils.verify_no_more_events(p_dut, timeout=0)
+ autils.verify_no_more_events(s_dut, timeout=0)
+
+ def run_discovery_prange_sminmax_outofrange(self, is_unsolicited_passive):
+ """Run discovery with ranging:
+ - Publisher enables ranging
+ - Subscriber enables ranging with min/max such that out of range (min=large,
+ max=large+1)
+
+ Expected: no discovery
+
+ This is a baseline test for the update-configuration tests.
+
+ Args:
+ is_unsolicited_passive: True for Unsolicited/Passive, False for
+ Solicited/Active.
+ Returns: the return arguments of the run_discovery.
+ """
+ pub_type = (aconsts.PUBLISH_TYPE_UNSOLICITED if is_unsolicited_passive
+ else aconsts.PUBLISH_TYPE_SOLICITED)
+ sub_type = (aconsts.SUBSCRIBE_TYPE_PASSIVE if is_unsolicited_passive
+ else aconsts.SUBSCRIBE_TYPE_ACTIVE)
+ return self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME, pub_type,
+ ssi=self.getname(2)),
+ enable_ranging=True),
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME, sub_type,
+ ssi=self.getname(2)),
+ min_distance_mm=1000000,
+ max_distance_mm=1000001),
+ expect_discovery=False)
+
+ def getname(self, level=1):
+ """Python magic to return the name of the *calling* function.
+
+ Args:
+ level: How many levels up to go for the method name. Default = calling
+ method.
+ """
+ return sys._getframe(level).f_code.co_name
+
+ #########################################################################
+ # Run discovery with ranging configuration.
+ #
+ # Names: test_ranged_discovery_<ptype>_<stype>_<p_range>_<s_range>_<ref_dist>
+ #
+ # where:
+ # <ptype>_<stype>: unsolicited_passive or solicited_active
+ # <p_range>: prange or pnorange
+ # <s_range>: smin or smax or sminmax or snorange
+ # <ref_distance>: inrange or outoforange
+ #########################################################################
+
+ @test_tracker_info(uuid="3a216e9a-7a57-4741-89c0-84456975e1ac")
+ def test_ranged_discovery_unsolicited_passive_prange_snorange(self):
+ """Verify discovery with ranging:
+ - Unsolicited Publish/Passive Subscribe
+ - Publisher enables ranging
+ - Subscriber disables ranging
+
+ Expect: normal discovery (as if no ranging performed) - no distance
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_UNSOLICITED,
+ ssi=self.getname()),
+ enable_ranging=True),
+ s_config=autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_PASSIVE,
+ ssi=self.getname()),
+ expect_discovery=True,
+ expect_range=False)
+
+ @test_tracker_info(uuid="859a321e-18e2-437b-aa7a-2a45a42ee737")
+ def test_ranged_discovery_solicited_active_prange_snorange(self):
+ """Verify discovery with ranging:
+ - Solicited Publish/Active Subscribe
+ - Publisher enables ranging
+ - Subscriber disables ranging
+
+ Expect: normal discovery (as if no ranging performed) - no distance
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_SOLICITED,
+ ssi=self.getname()),
+ enable_ranging=True),
+ s_config=autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_ACTIVE,
+ ssi=self.getname()),
+ expect_discovery=True,
+ expect_range=False)
+
+ @test_tracker_info(uuid="12a4f899-4f70-4641-8f3c-351004669b71")
+ def test_ranged_discovery_unsolicited_passive_pnorange_smax_inrange(self):
+ """Verify discovery with ranging:
+ - Unsolicited Publish/Passive Subscribe
+ - Publisher disables ranging
+ - Subscriber enables ranging with max such that always within range (large
+ max)
+
+ Expect: normal discovery (as if no ranging performed) - no distance
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_UNSOLICITED,
+ ssi=self.getname()),
+ enable_ranging=False),
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_PASSIVE,
+ ssi=self.getname()),
+ min_distance_mm=None,
+ max_distance_mm=1000000),
+ expect_discovery=True,
+ expect_range=False)
+
+ @test_tracker_info(uuid="b7f90793-113d-4355-be20-856d92ac939f")
+ def test_ranged_discovery_solicited_active_pnorange_smax_inrange(self):
+ """Verify discovery with ranging:
+ - Solicited Publish/Active Subscribe
+ - Publisher disables ranging
+ - Subscriber enables ranging with max such that always within range (large
+ max)
+
+ Expect: normal discovery (as if no ranging performed) - no distance
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_SOLICITED,
+ ssi=self.getname()),
+ enable_ranging=False),
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_ACTIVE,
+ ssi=self.getname()),
+ min_distance_mm=None,
+ max_distance_mm=1000000),
+ expect_discovery=True,
+ expect_range=False)
+
+ @test_tracker_info(uuid="da3ab6df-58f9-44ae-b7be-8200d9e1bb76")
+ def test_ranged_discovery_unsolicited_passive_pnorange_smin_outofrange(self):
+ """Verify discovery with ranging:
+ - Unsolicited Publish/Passive Subscribe
+ - Publisher disables ranging
+ - Subscriber enables ranging with min such that always out of range (large
+ min)
+
+ Expect: normal discovery (as if no ranging performed) - no distance
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_UNSOLICITED,
+ ssi=self.getname()),
+ enable_ranging=False),
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_PASSIVE,
+ ssi=self.getname()),
+ min_distance_mm=1000000,
+ max_distance_mm=None),
+ expect_discovery=True,
+ expect_range=False)
+
+ @test_tracker_info(uuid="275e0806-f266-4fa6-9ca0-1cfd7b65a6ca")
+ def test_ranged_discovery_solicited_active_pnorange_smin_outofrange(self):
+ """Verify discovery with ranging:
+ - Solicited Publish/Active Subscribe
+ - Publisher disables ranging
+ - Subscriber enables ranging with min such that always out of range (large
+ min)
+
+ Expect: normal discovery (as if no ranging performed) - no distance
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_SOLICITED,
+ ssi=self.getname()),
+ enable_ranging=False),
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_ACTIVE,
+ ssi=self.getname()),
+ min_distance_mm=1000000,
+ max_distance_mm=None),
+ expect_discovery=True,
+ expect_range=False)
+
+ @test_tracker_info(uuid="8cd0aa1e-6866-4a5d-a550-f25483eebea1")
+ def test_ranged_discovery_unsolicited_passive_prange_smin_inrange(self):
+ """Verify discovery with ranging:
+ - Unsolicited Publish/Passive Subscribe
+ - Publisher enables ranging
+ - Subscriber enables ranging with min such that in range (min=0)
+
+ Expect: discovery with distance
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_UNSOLICITED,
+ ssi=self.getname()),
+ enable_ranging=True),
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_PASSIVE,
+ ssi=self.getname()),
+ min_distance_mm=0,
+ max_distance_mm=None),
+ expect_discovery=True,
+ expect_range=True)
+
+ @test_tracker_info(uuid="97c22c54-669b-4f7a-bf51-2f484e5f3e74")
+ def test_ranged_discovery_unsolicited_passive_prange_smax_inrange(self):
+ """Verify discovery with ranging:
+ - Unsolicited Publish/Passive Subscribe
+ - Publisher enables ranging
+ - Subscriber enables ranging with max such that in range (max=large)
+
+ Expect: discovery with distance
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_UNSOLICITED,
+ ssi=self.getname()),
+ enable_ranging=True),
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_PASSIVE,
+ ssi=self.getname()),
+ min_distance_mm=None,
+ max_distance_mm=1000000),
+ expect_discovery=True,
+ expect_range=True)
+
+ @test_tracker_info(uuid="616673d7-9d0b-43de-a378-e5e949b51b32")
+ def test_ranged_discovery_unsolicited_passive_prange_sminmax_inrange(self):
+ """Verify discovery with ranging:
+ - Unsolicited Publish/Passive Subscribe
+ - Publisher enables ranging
+ - Subscriber enables ranging with min/max such that in range (min=0,
+ max=large)
+
+ Expect: discovery with distance
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_UNSOLICITED,
+ ssi=self.getname()),
+ enable_ranging=True),
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_PASSIVE,
+ ssi=self.getname()),
+ min_distance_mm=0,
+ max_distance_mm=1000000),
+ expect_discovery=True,
+ expect_range=True)
+
+ @test_tracker_info(uuid="2bf84912-dcad-4a8f-971f-e445a07f05ce")
+ def test_ranged_discovery_solicited_active_prange_smin_inrange(self):
+ """Verify discovery with ranging:
+ - Solicited Publish/Active Subscribe
+ - Publisher enables ranging
+ - Subscriber enables ranging with min such that in range (min=0)
+
+ Expect: discovery with distance
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_SOLICITED,
+ ssi=self.getname()),
+ enable_ranging=True),
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_ACTIVE,
+ ssi=self.getname()),
+ min_distance_mm=0,
+ max_distance_mm=None),
+ expect_discovery=True,
+ expect_range=True)
+
+ @test_tracker_info(uuid="5cfd7961-9665-4742-a1b5-2d1fc97f9795")
+ def test_ranged_discovery_solicited_active_prange_smax_inrange(self):
+ """Verify discovery with ranging:
+ - Solicited Publish/Active Subscribe
+ - Publisher enables ranging
+ - Subscriber enables ranging with max such that in range (max=large)
+
+ Expect: discovery with distance
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_SOLICITED,
+ ssi=self.getname()),
+ enable_ranging=True),
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_ACTIVE,
+ ssi=self.getname()),
+ min_distance_mm=None,
+ max_distance_mm=1000000),
+ expect_discovery=True,
+ expect_range=True)
+
+ @test_tracker_info(uuid="5cf650ad-0b42-4b7d-9e05-d5f45fe0554d")
+ def test_ranged_discovery_solicited_active_prange_sminmax_inrange(self):
+ """Verify discovery with ranging:
+ - Solicited Publish/Active Subscribe
+ - Publisher enables ranging
+ - Subscriber enables ranging with min/max such that in range (min=0,
+ max=large)
+
+ Expect: discovery with distance
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_SOLICITED,
+ ssi=self.getname()),
+ enable_ranging=True),
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_ACTIVE,
+ ssi=self.getname()),
+ min_distance_mm=0,
+ max_distance_mm=1000000),
+ expect_discovery=True,
+ expect_range=True)
+
+ @test_tracker_info(uuid="5277f418-ac35-43ce-9b30-3c895272898e")
+ def test_ranged_discovery_unsolicited_passive_prange_smin_outofrange(self):
+ """Verify discovery with ranging:
+ - Unsolicited Publish/Passive Subscribe
+ - Publisher enables ranging
+ - Subscriber enables ranging with min such that out of range (min=large)
+
+ Expect: no discovery
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_UNSOLICITED,
+ ssi=self.getname()),
+ enable_ranging=True),
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_PASSIVE,
+ ssi=self.getname()),
+ min_distance_mm=1000000,
+ max_distance_mm=None),
+ expect_discovery=False)
+
+ @test_tracker_info(uuid="8a7e6ab1-acf4-41a7-a5fb-8c164d593b5f")
+ def test_ranged_discovery_unsolicited_passive_prange_smax_outofrange(self):
+ """Verify discovery with ranging:
+ - Unsolicited Publish/Passive Subscribe
+ - Publisher enables ranging
+ - Subscriber enables ranging with max such that in range (max=0)
+
+ Expect: no discovery
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_UNSOLICITED,
+ ssi=self.getname()),
+ enable_ranging=True),
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_PASSIVE,
+ ssi=self.getname()),
+ min_distance_mm=None,
+ max_distance_mm=0),
+ expect_discovery=False)
+
+ @test_tracker_info(uuid="b744f5f9-2641-4373-bf86-3752e2f9aace")
+ def test_ranged_discovery_unsolicited_passive_prange_sminmax_outofrange(self):
+ """Verify discovery with ranging:
+ - Unsolicited Publish/Passive Subscribe
+ - Publisher enables ranging
+ - Subscriber enables ranging with min/max such that out of range (min=large,
+ max=large+1)
+
+ Expect: no discovery
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_UNSOLICITED,
+ ssi=self.getname()),
+ enable_ranging=True),
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_PASSIVE,
+ ssi=self.getname()),
+ min_distance_mm=1000000,
+ max_distance_mm=1000001),
+ expect_discovery=False)
+
+ @test_tracker_info(uuid="d2e94199-b2e6-4fa5-a347-24594883c801")
+ def test_ranged_discovery_solicited_active_prange_smin_outofrange(self):
+ """Verify discovery with ranging:
+ - Solicited Publish/Active Subscribe
+ - Publisher enables ranging
+ - Subscriber enables ranging with min such that out of range (min=large)
+
+ Expect: no discovery
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_SOLICITED,
+ ssi=self.getname()),
+ enable_ranging=True),
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_ACTIVE,
+ ssi=self.getname()),
+ min_distance_mm=1000000,
+ max_distance_mm=None),
+ expect_discovery=False)
+
+ @test_tracker_info(uuid="a5619835-496a-4244-a428-f85cba3d4115")
+ def test_ranged_discovery_solicited_active_prange_smax_outofrange(self):
+ """Verify discovery with ranging:
+ - Solicited Publish/Active Subscribe
+ - Publisher enables ranging
+ - Subscriber enables ranging with max such that out of range (max=0)
+
+ Expect: no discovery
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_SOLICITED,
+ ssi=self.getname()),
+ enable_ranging=True),
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_ACTIVE,
+ ssi=self.getname()),
+ min_distance_mm=None,
+ max_distance_mm=0),
+ expect_discovery=False)
+
+ @test_tracker_info(uuid="12ebd91f-a973-410b-8ee1-0bd86024b921")
+ def test_ranged_discovery_solicited_active_prange_sminmax_outofrange(self):
+ """Verify discovery with ranging:
+ - Solicited Publish/Active Subscribe
+ - Publisher enables ranging
+ - Subscriber enables ranging with min/max such that out of range (min=large,
+ max=large+1)
+
+ Expect: no discovery
+ """
+ self.run_discovery(
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_SOLICITED,
+ ssi=self.getname()),
+ enable_ranging=True),
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_ACTIVE,
+ ssi=self.getname()),
+ min_distance_mm=1000000,
+ max_distance_mm=1000001),
+ expect_discovery=False)
+
+ #########################################################################
+ # Run discovery with ranging configuration & update configurations after
+ # first run.
+ #
+ # Names: test_ranged_updated_discovery_<ptype>_<stype>_<scenario>
+ #
+ # where:
+ # <ptype>_<stype>: unsolicited_passive or solicited_active
+ # <scenario>: test scenario (details in name)
+ #########################################################################
+
+ @test_tracker_info(uuid="59442180-4a6c-428f-b926-86000e8339b4")
+ def test_ranged_updated_discovery_unsolicited_passive_oor_to_ir(self):
+ """Verify discovery with ranging operation with updated configuration:
+ - Unsolicited Publish/Passive Subscribe
+ - Publisher enables ranging
+ - Subscriber:
+ - Starts: Ranging enabled, min/max such that out of range (min=large,
+ max=large+1)
+ - Reconfigured to: Ranging enabled, min/max such that in range (min=0,
+ max=large)
+
+ Expect: discovery + ranging after update
+ """
+ (p_dut, s_dut, p_disc_id,
+ s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
+ self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
+ p_config=None, # no updates
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_PASSIVE,
+ ssi=self.getname()),
+ min_distance_mm=0,
+ max_distance_mm=1000000),
+ expect_discovery=True,
+ expect_range=True)
+
+ @test_tracker_info(uuid="60188508-104d-42d5-ac3a-3605093c45d7")
+ def test_ranged_updated_discovery_unsolicited_passive_pub_unrange(self):
+ """Verify discovery with ranging operation with updated configuration:
+ - Unsolicited Publish/Passive Subscribe
+ - Publisher enables ranging
+ - Subscriber: Ranging enabled, min/max such that out of range (min=large,
+ max=large+1)
+ - Reconfigured to: Publisher disables ranging
+
+ Expect: discovery w/o ranging after update
+ """
+ (p_dut, s_dut, p_disc_id,
+ s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
+ self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
+ p_config=autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_UNSOLICITED,
+ ssi=self.getname()),
+ s_config=None, # no updates
+ expect_discovery=True,
+ expect_range=False)
+
+ @test_tracker_info(uuid="f96b434e-751d-4eb5-ae01-0c5c3a6fb4a2")
+ def test_ranged_updated_discovery_unsolicited_passive_sub_unrange(self):
+ """Verify discovery with ranging operation with updated configuration:
+ - Unsolicited Publish/Passive Subscribe
+ - Publisher enables ranging
+ - Subscriber:
+ - Starts: Ranging enabled, min/max such that out of range (min=large,
+ max=large+1)
+ - Reconfigured to: Ranging disabled
+
+ Expect: discovery w/o ranging after update
+ """
+ (p_dut, s_dut, p_disc_id,
+ s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
+ self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
+ p_config=None, # no updates
+ s_config=autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_PASSIVE,
+ ssi=self.getname()),
+ expect_discovery=True,
+ expect_range=False)
+
+ @test_tracker_info(uuid="78970de8-9362-4647-931a-3513bcf58e80")
+ def test_ranged_updated_discovery_unsolicited_passive_sub_oor(self):
+ """Verify discovery with ranging operation with updated configuration:
+ - Unsolicited Publish/Passive Subscribe
+ - Publisher enables ranging
+ - Subscriber:
+ - Starts: Ranging enabled, min/max such that out of range (min=large,
+ max=large+1)
+ - Reconfigured to: different out-of-range setting
+
+ Expect: no discovery after update
+ """
+ (p_dut, s_dut, p_disc_id,
+ s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
+ self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
+ p_config=None, # no updates
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_PASSIVE,
+ ssi=self.getname()),
+ min_distance_mm=100000,
+ max_distance_mm=100001),
+ expect_discovery=False)
+
+ @test_tracker_info(uuid="0841ad05-4899-4521-bd24-04a8e2e345ac")
+ def test_ranged_updated_discovery_unsolicited_passive_pub_same(self):
+ """Verify discovery with ranging operation with updated configuration:
+ - Unsolicited Publish/Passive Subscribe
+ - Publisher enables ranging
+ - Subscriber: Ranging enabled, min/max such that out of range (min=large,
+ max=large+1)
+ - Reconfigured to: Publisher with same settings (ranging enabled)
+
+ Expect: no discovery after update
+ """
+ (p_dut, s_dut, p_disc_id,
+ s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
+ self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_UNSOLICITED,
+ ssi=self.getname()),
+ enable_ranging=True),
+ s_config=None, # no updates
+ expect_discovery=False)
+
+ @test_tracker_info(uuid="ec6ca57b-f115-4516-813a-4572b930c8d3")
+ def test_ranged_updated_discovery_unsolicited_passive_multi_step(self):
+ """Verify discovery with ranging operation with updated configuration:
+ - Unsolicited Publish/Passive Subscribe
+ - Publisher enables ranging
+ - Subscriber: Ranging enabled, min/max such that out of range (min=large,
+ max=large+1)
+ - Expect: no discovery
+ - Reconfigured to: Ranging enabled, min/max such that in-range (min=0)
+ - Expect: discovery with ranging
+ - Reconfigured to: Ranging enabled, min/max such that out-of-range
+ (min=large)
+ - Expect: no discovery
+ - Reconfigured to: Ranging disabled
+ - Expect: discovery without ranging
+ """
+ (p_dut, s_dut, p_disc_id,
+ s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
+ self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
+ p_config=None, # no updates
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_PASSIVE,
+ ssi=self.getname()),
+ min_distance_mm=0,
+ max_distance_mm=None),
+ expect_discovery=True,
+ expect_range=True)
+ self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
+ p_config=None, # no updates
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_PASSIVE,
+ ssi=self.getname()),
+ min_distance_mm=1000000,
+ max_distance_mm=None),
+ expect_discovery=False)
+ self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
+ p_config=None, # no updates
+ s_config=autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_PASSIVE,
+ ssi=self.getname()),
+ expect_discovery=True,
+ expect_range=False)
+
+ @test_tracker_info(uuid="bbaac63b-000c-415f-bf19-0906f04031cd")
+ def test_ranged_updated_discovery_solicited_active_oor_to_ir(self):
+ """Verify discovery with ranging operation with updated configuration:
+ - Solicited Publish/Active Subscribe
+ - Publisher enables ranging
+ - Subscriber:
+ - Starts: Ranging enabled, min/max such that out of range (min=large,
+ max=large+1)
+ - Reconfigured to: Ranging enabled, min/max such that in range (min=0,
+ max=large)
+
+ Expect: discovery + ranging after update
+ """
+ (p_dut, s_dut, p_disc_id,
+ s_disc_id) = self.run_discovery_prange_sminmax_outofrange(False)
+ self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
+ p_config=None, # no updates
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_ACTIVE,
+ ssi=self.getname()),
+ min_distance_mm=0,
+ max_distance_mm=1000000),
+ expect_discovery=True,
+ expect_range=True)
+
+ @test_tracker_info(uuid="c385b361-7955-4f34-9109-8d8ca81cb4cc")
+ def test_ranged_updated_discovery_solicited_active_pub_unrange(self):
+ """Verify discovery with ranging operation with updated configuration:
+ - Solicited Publish/Active Subscribe
+ - Publisher enables ranging
+ - Subscriber: Ranging enabled, min/max such that out of range (min=large,
+ max=large+1)
+ - Reconfigured to: Publisher disables ranging
+
+ Expect: discovery w/o ranging after update
+ """
+ (p_dut, s_dut, p_disc_id,
+ s_disc_id) = self.run_discovery_prange_sminmax_outofrange(False)
+ self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
+ p_config=autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_SOLICITED,
+ ssi=self.getname()),
+ s_config=None, # no updates
+ expect_discovery=True,
+ expect_range=False)
+
+ @test_tracker_info(uuid="ec5120ea-77ec-48c6-8820-48b82ad3dfd4")
+ def test_ranged_updated_discovery_solicited_active_sub_unrange(self):
+ """Verify discovery with ranging operation with updated configuration:
+ - Solicited Publish/Active Subscribe
+ - Publisher enables ranging
+ - Subscriber:
+ - Starts: Ranging enabled, min/max such that out of range (min=large,
+ max=large+1)
+ - Reconfigured to: Ranging disabled
+
+ Expect: discovery w/o ranging after update
+ """
+ (p_dut, s_dut, p_disc_id,
+ s_disc_id) = self.run_discovery_prange_sminmax_outofrange(False)
+ self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
+ p_config=None, # no updates
+ s_config=autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_ACTIVE,
+ ssi=self.getname()),
+ expect_discovery=True,
+ expect_range=False)
+
+ @test_tracker_info(uuid="6231cb42-91e4-48d3-b9db-b37efbe8537c")
+ def test_ranged_updated_discovery_solicited_active_sub_oor(self):
+ """Verify discovery with ranging operation with updated configuration:
+ - Solicited Publish/Active Subscribe
+ - Publisher enables ranging
+ - Subscriber:
+ - Starts: Ranging enabled, min/max such that out of range (min=large,
+ max=large+1)
+ - Reconfigured to: different out-of-range setting
+
+ Expect: no discovery after update
+ """
+ (p_dut, s_dut, p_disc_id,
+ s_disc_id) = self.run_discovery_prange_sminmax_outofrange(False)
+ self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
+ p_config=None, # no updates
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_ACTIVE,
+ ssi=self.getname()),
+ min_distance_mm=100000,
+ max_distance_mm=100001),
+ expect_discovery=False)
+
+ @test_tracker_info(uuid="ec999420-6a50-455e-b624-f4c9b4cb7ea5")
+ def test_ranged_updated_discovery_solicited_active_pub_same(self):
+ """Verify discovery with ranging operation with updated configuration:
+ - Solicited Publish/Active Subscribe
+ - Publisher enables ranging
+ - Subscriber: Ranging enabled, min/max such that out of range (min=large,
+ max=large+1)
+ - Reconfigured to: Publisher with same settings (ranging enabled)
+
+ Expect: no discovery after update
+ """
+ (p_dut, s_dut, p_disc_id,
+ s_disc_id) = self.run_discovery_prange_sminmax_outofrange(False)
+ self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
+ p_config=autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_SOLICITED,
+ ssi=self.getname()),
+ enable_ranging=True),
+ s_config=None, # no updates
+ expect_discovery=False)
+
+ @test_tracker_info(uuid="ec6ca57b-f115-4516-813a-4572b930c8d3")
+ def test_ranged_updated_discovery_solicited_active_multi_step(self):
+ """Verify discovery with ranging operation with updated configuration:
+ - Unsolicited Publish/Passive Subscribe
+ - Publisher enables ranging
+ - Subscriber: Ranging enabled, min/max such that out of range (min=large,
+ max=large+1)
+ - Expect: no discovery
+ - Reconfigured to: Ranging enabled, min/max such that in-range (min=0)
+ - Expect: discovery with ranging
+ - Reconfigured to: Ranging enabled, min/max such that out-of-range
+ (min=large)
+ - Expect: no discovery
+ - Reconfigured to: Ranging disabled
+ - Expect: discovery without ranging
+ """
+ (p_dut, s_dut, p_disc_id,
+ s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
+ self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
+ p_config=None, # no updates
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_ACTIVE,
+ ssi=self.getname()),
+ min_distance_mm=0,
+ max_distance_mm=None),
+ expect_discovery=True,
+ expect_range=True)
+ self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
+ p_config=None, # no updates
+ s_config=autils.add_ranging_to_sub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_ACTIVE,
+ ssi=self.getname()),
+ min_distance_mm=1000000,
+ max_distance_mm=None),
+ expect_discovery=False)
+ self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
+ p_config=None, # no updates
+ s_config=autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.SUBSCRIBE_TYPE_ACTIVE,
+ ssi=self.getname()),
+ expect_discovery=True,
+ expect_range=False)
+
+ #########################################################################
+
+ @test_tracker_info(uuid="6edc47ab-7300-4bff-b7dd-5de83f58928a")
+ def test_ranged_discovery_multi_session(self):
+ """Verify behavior with multiple concurrent discovery session with different
+ configurations:
+
+ Device A (Publisher):
+ Publisher AA: ranging enabled
+ Publisher BB: ranging enabled
+ Publisher CC: ranging enabled
+ Publisher DD: ranging disabled
+ Device B (Subscriber):
+ Subscriber AA: ranging out-of-range -> no match
+ Subscriber BB: ranging in-range -> match w/range
+ Subscriber CC: ranging disabled -> match w/o range
+ Subscriber DD: ranging out-of-range -> match w/o range
+ """
+ p_dut = self.android_devices[0]
+ p_dut.pretty_name = "Publisher"
+ s_dut = self.android_devices[1]
+ s_dut.pretty_name = "Subscriber"
+
+ # Publisher+Subscriber: attach and wait for confirmation
+ p_id = p_dut.droid.wifiAwareAttach(False)
+ autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
+ time.sleep(self.device_startup_offset)
+ s_id = s_dut.droid.wifiAwareAttach(False)
+ autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
+
+ # Subscriber: start sessions
+ aa_s_disc_id = s_dut.droid.wifiAwareSubscribe(
+ s_id,
+ autils.add_ranging_to_sub(
+ autils.create_discovery_config("AA",
+ aconsts.SUBSCRIBE_TYPE_PASSIVE),
+ min_distance_mm=1000000, max_distance_mm=1000001),
+ True)
+ bb_s_disc_id = s_dut.droid.wifiAwareSubscribe(
+ s_id,
+ autils.add_ranging_to_sub(
+ autils.create_discovery_config("BB",
+ aconsts.SUBSCRIBE_TYPE_PASSIVE),
+ min_distance_mm=0, max_distance_mm=1000000),
+ True)
+ cc_s_disc_id = s_dut.droid.wifiAwareSubscribe(
+ s_id,
+ autils.create_discovery_config("CC", aconsts.SUBSCRIBE_TYPE_PASSIVE),
+ True)
+ dd_s_disc_id = s_dut.droid.wifiAwareSubscribe(
+ s_id,
+ autils.add_ranging_to_sub(
+ autils.create_discovery_config("DD",
+ aconsts.SUBSCRIBE_TYPE_PASSIVE),
+ min_distance_mm=1000000, max_distance_mm=1000001),
+ True)
+
+ autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, aa_s_disc_id))
+ autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, bb_s_disc_id))
+ autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, cc_s_disc_id))
+ autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, dd_s_disc_id))
+
+ # Publisher: start sessions
+ aa_p_disc_id = p_dut.droid.wifiAwarePublish(
+ p_id,
+ autils.add_ranging_to_pub(
+ autils.create_discovery_config("AA",
+ aconsts.PUBLISH_TYPE_UNSOLICITED),
+ enable_ranging=True),
+ True)
+ bb_p_disc_id = p_dut.droid.wifiAwarePublish(
+ p_id,
+ autils.add_ranging_to_pub(
+ autils.create_discovery_config("BB",
+ aconsts.PUBLISH_TYPE_UNSOLICITED),
+ enable_ranging=True),
+ True)
+ cc_p_disc_id = p_dut.droid.wifiAwarePublish(
+ p_id,
+ autils.add_ranging_to_pub(
+ autils.create_discovery_config("CC",
+ aconsts.PUBLISH_TYPE_UNSOLICITED),
+ enable_ranging=True),
+ True)
+ dd_p_disc_id = p_dut.droid.wifiAwarePublish(
+ p_id,
+ autils.create_discovery_config("DD", aconsts.PUBLISH_TYPE_UNSOLICITED),
+ True)
+
+ autils.wait_for_event(p_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, aa_p_disc_id))
+ autils.wait_for_event(p_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, bb_p_disc_id))
+ autils.wait_for_event(p_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, cc_p_disc_id))
+ autils.wait_for_event(p_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, dd_p_disc_id))
+
+ # Expected and unexpected service discovery
+ event = autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, bb_s_disc_id))
+ asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for BB expected!")
+ event = autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, cc_s_disc_id))
+ asserts.assert_false(
+ aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for CC NOT expected!")
+ event = autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, dd_s_disc_id))
+ asserts.assert_false(
+ aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for DD NOT expected!")
+ autils.fail_on_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, aa_s_disc_id))
+
+ # (single) sleep for timeout period and then verify that no further events
+ time.sleep(autils.EVENT_TIMEOUT)
+ autils.verify_no_more_events(p_dut, timeout=0)
+ autils.verify_no_more_events(s_dut, timeout=0)
+
+ #########################################################################
+
+ @test_tracker_info(uuid="deede47f-a54c-46d9-88bb-f4482fbd8470")
+ def test_ndp_concurrency(self):
+ """Verify the behavior of Wi-Fi Aware Ranging whenever an NDP is created -
+ for those devices that have a concurrency limitation that does not allow
+ Aware Ranging, whether direct or as part of discovery.
+
+ Publisher: start 3 services
+ AA w/o ranging
+ BB w/ ranging
+ CC w/ ranging
+ DD w/ ranging
+ Subscriber: start 2 services
+ AA w/o ranging
+ BB w/ ranging out-of-range
+ (do not start CC!)
+ DD w/ ranging in-range
+ Expect AA discovery, DD discovery w/range, but no BB
+ Start NDP in context of AA
+ IF NDP_CONCURRENCY_LIMITATION:
+ Verify discovery on BB w/o range
+ Start EE w/ranging out-of-range
+ Start FF w/ranging in-range
+ IF NDP_CONCURRENCY_LIMITATION:
+ Verify discovery on EE w/o range
+ Verify discovery on FF w/o range
+ Else:
+ Verify discovery on FF w/ range
+ Tear down NDP
+ Subscriber
+ Start CC w/ ranging out-of-range
+ Wait to verify that do not get match
+ Update configuration to be in-range
+ Verify that get match with ranging information
+ """
+ p_dut = self.android_devices[0]
+ p_dut.pretty_name = "Publisher"
+ s_dut = self.android_devices[1]
+ s_dut.pretty_name = "Subscriber"
+
+ # Publisher+Subscriber: attach and wait for confirmation
+ p_id = p_dut.droid.wifiAwareAttach(False)
+ autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
+ time.sleep(self.device_startup_offset)
+ s_id = s_dut.droid.wifiAwareAttach(False)
+ autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
+
+ # Publisher: AA w/o ranging, BB w/ ranging, CC w/ ranging, DD w/ ranging
+ aa_p_id = p_dut.droid.wifiAwarePublish(p_id,
+ autils.create_discovery_config("AA", aconsts.PUBLISH_TYPE_SOLICITED),
+ True)
+ autils.wait_for_event(p_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, aa_p_id))
+ bb_p_id = p_dut.droid.wifiAwarePublish(p_id, autils.add_ranging_to_pub(
+ autils.create_discovery_config("BB", aconsts.PUBLISH_TYPE_UNSOLICITED),
+ enable_ranging=True), True)
+ autils.wait_for_event(p_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, bb_p_id))
+ cc_p_id = p_dut.droid.wifiAwarePublish(p_id, autils.add_ranging_to_pub(
+ autils.create_discovery_config("CC", aconsts.PUBLISH_TYPE_UNSOLICITED),
+ enable_ranging=True), True)
+ autils.wait_for_event(p_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, cc_p_id))
+ dd_p_id = p_dut.droid.wifiAwarePublish(p_id, autils.add_ranging_to_pub(
+ autils.create_discovery_config("DD", aconsts.PUBLISH_TYPE_UNSOLICITED),
+ enable_ranging=True), True)
+ autils.wait_for_event(p_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, dd_p_id))
+
+ # Subscriber: AA w/o ranging, BB w/ranging out-of-range,
+ # DD w /ranging in-range
+ aa_s_id = s_dut.droid.wifiAwareSubscribe(s_id,
+ autils.create_discovery_config("AA", aconsts.SUBSCRIBE_TYPE_ACTIVE),
+ True)
+ autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, aa_s_id))
+ bb_s_id = s_dut.droid.wifiAwareSubscribe(s_id, autils.add_ranging_to_sub(
+ autils.create_discovery_config("BB", aconsts.SUBSCRIBE_TYPE_PASSIVE),
+ min_distance_mm=1000000, max_distance_mm=1000001), True)
+ autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, bb_s_id))
+ dd_s_id = s_dut.droid.wifiAwareSubscribe(s_id, autils.add_ranging_to_sub(
+ autils.create_discovery_config("DD", aconsts.SUBSCRIBE_TYPE_PASSIVE),
+ min_distance_mm=None, max_distance_mm=1000000), True)
+ autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, dd_s_id))
+
+ # verify: AA discovered, BB not discovered, DD discovery w/range
+ event = autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, aa_s_id))
+ asserts.assert_false(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for AA NOT expected!")
+ aa_peer_id_on_sub = event['data'][aconsts.SESSION_CB_KEY_PEER_ID]
+ autils.fail_on_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, bb_s_id))
+ event = autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, dd_s_id))
+ asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for DD expected!")
+
+ # start NDP in context of AA:
+
+ # Publisher: request network (from ANY)
+ p_req_key = autils.request_network(p_dut,
+ p_dut.droid.wifiAwareCreateNetworkSpecifier(aa_p_id, None))
+
+ # Subscriber: request network
+ s_req_key = autils.request_network(s_dut,
+ s_dut.droid.wifiAwareCreateNetworkSpecifier(aa_s_id, aa_peer_id_on_sub))
+
+ # Publisher & Subscriber: wait for network formation
+ p_net_event = autils.wait_for_event_with_keys(p_dut,
+ cconsts.EVENT_NETWORK_CALLBACK,
+ autils.EVENT_TIMEOUT, (
+ cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+ (cconsts.NETWORK_CB_KEY_ID,
+ p_req_key))
+ s_net_event = autils.wait_for_event_with_keys(s_dut,
+ cconsts.EVENT_NETWORK_CALLBACK,
+ autils.EVENT_TIMEOUT, (
+ cconsts.NETWORK_CB_KEY_EVENT,
+ cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+ (cconsts.NETWORK_CB_KEY_ID,
+ s_req_key))
+
+ p_aware_if = p_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+ s_aware_if = s_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+
+ p_ipv6 = p_dut.droid.connectivityGetLinkLocalIpv6Address(p_aware_if).split(
+ "%")[0]
+ s_ipv6 = s_dut.droid.connectivityGetLinkLocalIpv6Address(s_aware_if).split(
+ "%")[0]
+
+ self.log.info("AA NDP Interface names: P=%s, S=%s", p_aware_if, s_aware_if)
+ self.log.info("AA NDP Interface addresses (IPv6): P=%s, S=%s", p_ipv6,
+ s_ipv6)
+
+ if self.RANGING_NDP_CONCURRENCY_LIMITATION:
+ # Expect BB to now discover w/o ranging
+ event = autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, bb_s_id))
+ asserts.assert_false(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for BB NOT expected!")
+
+ # Publishers: EE, FF w/ ranging
+ ee_p_id = p_dut.droid.wifiAwarePublish(p_id, autils.add_ranging_to_pub(
+ autils.create_discovery_config("EE", aconsts.PUBLISH_TYPE_SOLICITED),
+ enable_ranging=True), True)
+ autils.wait_for_event(p_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, ee_p_id))
+ ff_p_id = p_dut.droid.wifiAwarePublish(p_id, autils.add_ranging_to_pub(
+ autils.create_discovery_config("FF", aconsts.PUBLISH_TYPE_UNSOLICITED),
+ enable_ranging=True), True)
+ autils.wait_for_event(p_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, ff_p_id))
+
+ # Subscribers: EE out-of-range, FF in-range
+ ee_s_id = s_dut.droid.wifiAwareSubscribe(s_id, autils.add_ranging_to_sub(
+ autils.create_discovery_config("EE", aconsts.SUBSCRIBE_TYPE_ACTIVE),
+ min_distance_mm=1000000, max_distance_mm=1000001), True)
+ autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, ee_s_id))
+ ff_s_id = s_dut.droid.wifiAwareSubscribe(s_id, autils.add_ranging_to_sub(
+ autils.create_discovery_config("FF", aconsts.SUBSCRIBE_TYPE_PASSIVE),
+ min_distance_mm=None, max_distance_mm=1000000), True)
+ autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, ff_s_id))
+
+ if self.RANGING_NDP_CONCURRENCY_LIMITATION:
+ # Expect EE & FF discovery w/o range
+ event = autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, ee_s_id))
+ asserts.assert_false(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for EE NOT expected!")
+ event = autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, ff_s_id))
+ asserts.assert_false(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for FF NOT expected!")
+ else:
+ event = autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, ff_s_id))
+ asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for FF expected!")
+
+ # tear down NDP
+ p_dut.droid.connectivityUnregisterNetworkCallback(p_req_key)
+ s_dut.droid.connectivityUnregisterNetworkCallback(s_req_key)
+
+ time.sleep(5) # give time for NDP termination to finish
+
+ # Subscriber: start CC out-of-range - no discovery expected!
+ cc_s_id = s_dut.droid.wifiAwareSubscribe(s_id, autils.add_ranging_to_sub(
+ autils.create_discovery_config("CC", aconsts.SUBSCRIBE_TYPE_PASSIVE),
+ min_distance_mm=1000000, max_distance_mm=1000001), True)
+ autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, cc_s_id))
+ autils.fail_on_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, cc_s_id))
+
+ # Subscriber: modify CC to in-range - expect discovery w/ range
+ s_dut.droid.wifiAwareUpdateSubscribe(cc_s_id, autils.add_ranging_to_sub(
+ autils.create_discovery_config("CC", aconsts.SUBSCRIBE_TYPE_PASSIVE),
+ min_distance_mm=None, max_distance_mm=1000001))
+ autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED, cc_s_id))
+ event = autils.wait_for_event(s_dut, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, cc_s_id))
+ asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for CC expected!")
+
+ @test_tracker_info(uuid="d94dac91-4090-4c03-a867-6dfac6558ba3")
+ def test_role_concurrency(self):
+ """Verify the behavior of Wi-Fi Aware Ranging (in the context of discovery)
+ when the device has concurrency limitations which do not permit concurrent
+ Initiator and Responder roles on the same device. In such case it is
+ expected that normal discovery without ranging is executed AND that ranging
+ is restored whenever the concurrency constraints are removed.
+
+ Note: all Subscribers are in-range.
+
+ DUT1: start multiple services
+ Publish AA w/ ranging (unsolicited)
+ Subscribe BB w/ ranging (active)
+ Publish CC w/ ranging (unsolicited)
+ Publish DD w/o ranging (solicited)
+ Subscribe EE w/ ranging (passive)
+ Subscribe FF w/ ranging (active)
+ DUT2: start multiple services
+ Subscribe AA w/ ranging (passive)
+ Publish BB w/ ranging (solicited)
+ Subscribe DD w/o ranging (active)
+ Expect
+ DUT2: AA match w/ range information
+ DUT1: BB match w/o range information (concurrency disables ranging)
+ DUT2: DD match w/o range information
+ DUT1: Terminate AA
+ DUT2:
+ Terminate AA
+ Start Publish EE w/ ranging (unsolicited)
+ DUT1: expect EE w/o ranging
+ DUT1: Terminate CC
+ DUT2: Start Publish FF w/ ranging (solicited)
+ DUT1: expect FF w/ ranging information - should finally be back up
+ """
+ dut1 = self.android_devices[0]
+ dut1.pretty_name = "DUT1"
+ dut2 = self.android_devices[1]
+ dut2.pretty_name = "DUT2"
+
+ # Publisher+Subscriber: attach and wait for confirmation
+ dut1_id = dut1.droid.wifiAwareAttach(False)
+ autils.wait_for_event(dut1, aconsts.EVENT_CB_ON_ATTACHED)
+ time.sleep(self.device_startup_offset)
+ dut2_id = dut2.droid.wifiAwareAttach(False)
+ autils.wait_for_event(dut2, aconsts.EVENT_CB_ON_ATTACHED)
+
+ # DUT1: initial service bringup
+ aa_p_id = dut1.droid.wifiAwarePublish(dut1_id, autils.add_ranging_to_pub(
+ autils.create_discovery_config("AA", aconsts.PUBLISH_TYPE_UNSOLICITED),
+ enable_ranging=True), True)
+ autils.wait_for_event(dut1, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, aa_p_id))
+ bb_s_id = dut1.droid.wifiAwareSubscribe(dut1_id, autils.add_ranging_to_sub(
+ autils.create_discovery_config("BB", aconsts.SUBSCRIBE_TYPE_ACTIVE),
+ min_distance_mm=None, max_distance_mm=1000000), True)
+ autils.wait_for_event(dut1, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, bb_s_id))
+ cc_p_id = dut1.droid.wifiAwarePublish(dut1_id, autils.add_ranging_to_pub(
+ autils.create_discovery_config("CC", aconsts.PUBLISH_TYPE_UNSOLICITED),
+ enable_ranging=True), True)
+ autils.wait_for_event(dut1, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, cc_p_id))
+ dd_p_id = dut1.droid.wifiAwarePublish(dut1_id,
+ autils.create_discovery_config("DD", aconsts.PUBLISH_TYPE_SOLICITED),
+ True)
+ autils.wait_for_event(dut1, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, dd_p_id))
+ ee_s_id = dut1.droid.wifiAwareSubscribe(dut1_id, autils.add_ranging_to_sub(
+ autils.create_discovery_config("EE", aconsts.SUBSCRIBE_TYPE_PASSIVE),
+ min_distance_mm=None, max_distance_mm=1000000), True)
+ autils.wait_for_event(dut1, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, ee_s_id))
+ ff_s_id = dut1.droid.wifiAwareSubscribe(dut1_id, autils.add_ranging_to_sub(
+ autils.create_discovery_config("FF", aconsts.SUBSCRIBE_TYPE_ACTIVE),
+ min_distance_mm=None, max_distance_mm=1000000), True)
+ autils.wait_for_event(dut1, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, ff_s_id))
+
+ # DUT2: initial service bringup
+ aa_s_id = dut2.droid.wifiAwareSubscribe(dut2_id, autils.add_ranging_to_sub(
+ autils.create_discovery_config("AA", aconsts.SUBSCRIBE_TYPE_PASSIVE),
+ min_distance_mm=None, max_distance_mm=1000000), True)
+ autils.wait_for_event(dut2, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, aa_s_id))
+ bb_p_id = dut2.droid.wifiAwarePublish(dut2_id, autils.add_ranging_to_pub(
+ autils.create_discovery_config("BB", aconsts.PUBLISH_TYPE_SOLICITED),
+ enable_ranging=True), True)
+ autils.wait_for_event(dut2, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, bb_p_id))
+ dd_s_id = dut2.droid.wifiAwareSubscribe(dut2_id,
+ autils.create_discovery_config("AA", aconsts.SUBSCRIBE_TYPE_ACTIVE),
+ True)
+ autils.wait_for_event(dut2, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, dd_s_id))
+
+ # Initial set of discovery events for AA, BB, and DD (which are up)
+ event = autils.wait_for_event(dut2, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, aa_s_id))
+ asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for AA expected!")
+ event = autils.wait_for_event(dut1, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, bb_s_id))
+ if self.RANGING_INITIATOR_RESPONDER_CONCURRENCY_LIMITATION:
+ asserts.assert_false(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for BB NOT expected!")
+ else:
+ asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for BB expected!")
+ event = autils.wait_for_event(dut2, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, dd_s_id))
+ asserts.assert_false(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for DD NOT expected!")
+
+ # DUT1/DUT2: terminate AA
+ dut1.droid.wifiAwareDestroyDiscoverySession(aa_p_id)
+ dut2.droid.wifiAwareDestroyDiscoverySession(aa_s_id)
+
+ time.sleep(5) # guarantee that session terminated (and host recovered?)
+
+ # DUT2: try EE service - ranging still disabled
+ ee_p_id = dut2.droid.wifiAwarePublish(dut2_id, autils.add_ranging_to_pub(
+ autils.create_discovery_config("EE", aconsts.PUBLISH_TYPE_UNSOLICITED),
+ enable_ranging=True), True)
+ autils.wait_for_event(dut2, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, ee_p_id))
+
+ event = autils.wait_for_event(dut1, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, ee_s_id))
+ if self.RANGING_INITIATOR_RESPONDER_CONCURRENCY_LIMITATION:
+ asserts.assert_false(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for EE NOT expected!")
+ else:
+ asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for EE expected!")
+
+ # DUT1: terminate CC - last publish w/ ranging on DUT!
+ dut1.droid.wifiAwareDestroyDiscoverySession(cc_p_id)
+
+ time.sleep(5) # guarantee that session terminated (and host recovered?)
+
+ # DUT2: try FF service - ranging should now function
+ ff_p_id = dut2.droid.wifiAwarePublish(dut2_id, autils.add_ranging_to_pub(
+ autils.create_discovery_config("FF", aconsts.PUBLISH_TYPE_SOLICITED),
+ enable_ranging=True), True)
+ autils.wait_for_event(dut2, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, ff_p_id))
+
+ event = autils.wait_for_event(dut1, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, ff_s_id))
+ asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for FF expected!")
+
+
+ @test_tracker_info(uuid="6700eab8-a172-43cd-aed3-e6577ce8fd89")
+ def test_discovery_direct_concurrency(self):
+ """Verify the behavior of Wi-Fi Aware Ranging used as part of discovery and
+ as direct ranging to a peer device.
+
+ Process:
+ - Start YYY service with ranging in-range
+ - Start XXX service with ranging out-of-range
+ - Start performing direct Ranging
+ - While above going on update XXX to be in-range
+ - Keep performing direct Ranging in context of YYY
+ - Stop direct Ranging and look for XXX to discover
+ """
+ dut1 = self.android_devices[0]
+ dut1.pretty_name = "DUT1"
+ dut2 = self.android_devices[1]
+ dut2.pretty_name = "DUT2"
+
+ # DUTs: attach and wait for confirmation
+ dut1_id = dut1.droid.wifiAwareAttach(False)
+ autils.wait_for_event(dut1, aconsts.EVENT_CB_ON_ATTACHED)
+ time.sleep(self.device_startup_offset)
+ dut2_id = dut2.droid.wifiAwareAttach(True)
+ event = autils.wait_for_event(dut2, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+ dut2_mac = event['data']['mac']
+
+ # DUT1: publishers bring-up
+ xxx_p_id = dut1.droid.wifiAwarePublish(dut1_id, autils.add_ranging_to_pub(
+ autils.create_discovery_config("XXX", aconsts.PUBLISH_TYPE_UNSOLICITED),
+ enable_ranging=True), True)
+ autils.wait_for_event(dut1, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, xxx_p_id))
+ yyy_p_id = dut1.droid.wifiAwarePublish(dut1_id, autils.add_ranging_to_pub(
+ autils.create_discovery_config("YYY", aconsts.PUBLISH_TYPE_UNSOLICITED),
+ enable_ranging=True), True)
+ autils.wait_for_event(dut1, autils.decorate_event(
+ aconsts.SESSION_CB_ON_PUBLISH_STARTED, yyy_p_id))
+
+ # DUT2: subscribers bring-up
+ xxx_s_id = dut2.droid.wifiAwareSubscribe(dut2_id, autils.add_ranging_to_sub(
+ autils.create_discovery_config("XXX", aconsts.SUBSCRIBE_TYPE_PASSIVE),
+ min_distance_mm=1000000, max_distance_mm=1000001), True)
+ autils.wait_for_event(dut2, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, xxx_s_id))
+ yyy_s_id = dut2.droid.wifiAwareSubscribe(dut2_id, autils.add_ranging_to_sub(
+ autils.create_discovery_config("YYY", aconsts.SUBSCRIBE_TYPE_PASSIVE),
+ min_distance_mm=None, max_distance_mm=1000000), True)
+ autils.wait_for_event(dut2, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, yyy_s_id))
+
+ # Service discovery: YYY (with range info), but no XXX
+ event = autils.wait_for_event(dut2, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, yyy_s_id))
+ asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+ "Discovery with ranging for YYY expected!")
+ yyy_peer_id_on_sub = event['data'][aconsts.SESSION_CB_KEY_PEER_ID]
+
+ autils.fail_on_event(dut2, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, xxx_s_id))
+
+ # Direct ranging
+ results21 = []
+ for iter in range(10):
+ id = dut2.droid.wifiRttStartRangingToAwarePeerId(yyy_peer_id_on_sub)
+ event = autils.wait_for_event(dut2, rutils.decorate_event(
+ rconsts.EVENT_CB_RANGING_ON_RESULT, id))
+ results21.append(event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS][0])
+
+ time.sleep(5) # while switching roles
+
+ results12 = []
+ for iter in range(10):
+ id = dut1.droid.wifiRttStartRangingToAwarePeerMac(dut2_mac)
+ event = autils.wait_for_event(dut1, rutils.decorate_event(
+ rconsts.EVENT_CB_RANGING_ON_RESULT, id))
+ results12.append(event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS][0])
+
+ stats = [rutils.extract_stats(results12, 0, 0, 0),
+ rutils.extract_stats(results21, 0, 0, 0)]
+
+ # Update XXX to be within range
+ dut2.droid.wifiAwareUpdateSubscribe(xxx_s_id, autils.add_ranging_to_sub(
+ autils.create_discovery_config("XXX", aconsts.SUBSCRIBE_TYPE_PASSIVE),
+ min_distance_mm=None, max_distance_mm=1000000))
+ autils.wait_for_event(dut2, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED, xxx_s_id))
+
+ # Expect discovery on XXX - wait until discovery with ranging:
+ # - 0 or more: without ranging info (due to concurrency limitations)
+ # - 1 or more: with ranging (once concurrency limitation relieved)
+ num_events = 0
+ while True:
+ event = autils.wait_for_event(dut2, autils.decorate_event(
+ aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, xxx_s_id))
+ if aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"]:
+ break
+ num_events = num_events + 1
+ asserts.assert_true(num_events < 10, # arbitrary safety valve
+ "Way too many discovery events without ranging!")
+
+ asserts.explicit_pass("Discovery/Direct RTT Concurrency Pass", extras=stats)
\ No newline at end of file
diff --git a/acts/tests/google/wifi/rtt/functional/RangeApMiscTest.py b/acts/tests/google/wifi/rtt/functional/RangeApMiscTest.py
new file mode 100644
index 0000000..dd5560d
--- /dev/null
+++ b/acts/tests/google/wifi/rtt/functional/RangeApMiscTest.py
@@ -0,0 +1,85 @@
+#!/usr/bin/python3.4
+#
+# Copyright 2017 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from acts import asserts
+from acts.test_utils.wifi import wifi_test_utils as wutils
+from acts.test_utils.wifi.rtt import rtt_const as rconsts
+from acts.test_utils.wifi.rtt import rtt_test_utils as rutils
+from acts.test_utils.wifi.rtt.RttBaseTest import RttBaseTest
+
+
+class RangeApMiscTest(RttBaseTest):
+ """Test class for RTT ranging to Access Points - miscellaneous tests which
+ do not fit into the strict IEEE 802.11mc supporting or non-supporting test
+ beds - e.g. a mixed test."""
+
+ # Number of RTT iterations
+ NUM_ITER = 10
+
+ # Time gap (in seconds) between iterations
+ TIME_BETWEEN_ITERATIONS = 0
+
+ def __init__(self, controllers):
+ RttBaseTest.__init__(self, controllers)
+
+ #############################################################################
+
+ def test_rtt_mixed_80211mc_supporting_aps_wo_privilege(self):
+ """Scan for APs and perform RTT on one supporting and one non-supporting
+ IEEE 802.11mc APs with the device not having privilege access (expect
+ failures)."""
+ dut = self.android_devices[0]
+ rutils.config_privilege_override(dut, True)
+ rtt_aps = rutils.scan_with_rtt_support_constraint(dut, True)
+ non_rtt_aps = rutils.scan_with_rtt_support_constraint(dut, False)
+ mix_list = [rtt_aps[0], non_rtt_aps[0]]
+ dut.log.debug("Visible non-IEEE 802.11mc APs=%s", mix_list)
+ events = rutils.run_ranging(dut, mix_list, self.NUM_ITER,
+ self.TIME_BETWEEN_ITERATIONS)
+ stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
+ self.rtt_reference_distance_margin_mm,
+ self.rtt_min_expected_rssi_dbm,
+ self.lci_reference, self.lcr_reference)
+ dut.log.debug("Stats=%s", stats)
+
+ for bssid, stat in stats.items():
+ asserts.assert_true(stat['num_no_results'] == 0,
+ "Missing (timed-out) results", extras=stats)
+ if bssid == rtt_aps[0][wutils.WifiEnums.BSSID_KEY]:
+ asserts.assert_false(stat['any_lci_mismatch'],
+ "LCI mismatch", extras=stats)
+ asserts.assert_false(stat['any_lcr_mismatch'],
+ "LCR mismatch", extras=stats)
+ asserts.assert_equal(stat['num_invalid_rssi'], 0, "Invalid RSSI",
+ extras=stats)
+ asserts.assert_true(stat['num_failures'] <=
+ self.rtt_max_failure_rate_two_sided_rtt_percentage
+ * stat['num_results'] / 100,
+ "Failure rate is too high", extras=stats)
+ asserts.assert_true(stat['num_range_out_of_margin'] <=
+ self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage
+ * stat['num_success_results'] / 100,
+ "Results exceeding error margin rate is too high",
+ extras=stats)
+ else:
+ asserts.assert_true(stat['num_failures'] == self.NUM_ITER,
+ "All one-sided RTT requests must fail when executed without privilege",
+ extras=stats)
+ for code in stat['status_codes']:
+ asserts.assert_true(code ==
+ rconsts.EVENT_CB_RANGING_STATUS_RESPONDER_DOES_NOT_SUPPORT_IEEE80211MC,
+ "Expected non-support error code", extras=stats)
+ asserts.explicit_pass("RTT test done", extras=stats)
diff --git a/acts/tests/google/wifi/rtt/functional/RangeApNonSupporting11McTest.py b/acts/tests/google/wifi/rtt/functional/RangeApNonSupporting11McTest.py
new file mode 100644
index 0000000..65b67d2
--- /dev/null
+++ b/acts/tests/google/wifi/rtt/functional/RangeApNonSupporting11McTest.py
@@ -0,0 +1,137 @@
+#!/usr/bin/python3.4
+#
+# Copyright 2017 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from acts import asserts
+from acts.test_decorators import test_tracker_info
+from acts.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from acts.test_utils.wifi.rtt import rtt_const as rconsts
+from acts.test_utils.wifi.rtt import rtt_test_utils as rutils
+from acts.test_utils.wifi.rtt.RttBaseTest import RttBaseTest
+
+
+class RangeApNonSupporting11McTest(WifiBaseTest, RttBaseTest):
+ """Test class for RTT ranging to Access Points which do not support IEEE
+ 802.11mc"""
+
+ # Number of RTT iterations
+ NUM_ITER = 10
+
+ # Time gap (in seconds) between iterations
+ TIME_BETWEEN_ITERATIONS = 0
+
+ def __init__(self, controllers):
+ WifiBaseTest.__init__(self, controllers)
+ RttBaseTest.__init__(self, controllers)
+ if "AccessPoint" in self.user_params:
+ self.legacy_configure_ap_and_start()
+
+ #############################################################################
+
+ @test_tracker_info(uuid="cde756e9-11f3-43da-b9ae-9edf85764f82")
+ def test_rtt_non_80211mc_supporting_aps(self):
+ """Scan for APs and perform RTT on non-IEEE 802.11mc supporting APs"""
+ dut = self.android_devices[0]
+ non_rtt_aps = rutils.select_best_scan_results(
+ rutils.scan_with_rtt_support_constraint(dut, False), select_count=1)
+ dut.log.debug("Visible non-IEEE 802.11mc APs=%s", non_rtt_aps)
+ asserts.assert_true(len(non_rtt_aps) > 0, "Need at least one AP!")
+ events = rutils.run_ranging(dut, non_rtt_aps, self.NUM_ITER,
+ self.TIME_BETWEEN_ITERATIONS)
+ stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
+ self.rtt_reference_distance_margin_mm,
+ self.rtt_min_expected_rssi_dbm,
+ self.lci_reference, self.lcr_reference)
+ dut.log.debug("Stats=%s", stats)
+
+ for bssid, stat in stats.items():
+ asserts.assert_true(stat['num_no_results'] == 0,
+ "Missing (timed-out) results", extras=stats)
+ asserts.assert_false(stat['any_lci_mismatch'],
+ "LCI mismatch", extras=stats)
+ asserts.assert_false(stat['any_lcr_mismatch'],
+ "LCR mismatch", extras=stats)
+ asserts.assert_equal(stat['num_invalid_rssi'], 0, "Invalid RSSI",
+ extras=stats)
+ asserts.assert_true(stat['num_failures'] <=
+ self.rtt_max_failure_rate_one_sided_rtt_percentage
+ * stat['num_results'] / 100,
+ "Failure rate is too high", extras=stats)
+ asserts.assert_true(stat['num_range_out_of_margin'] <=
+ self.rtt_max_margin_exceeded_rate_one_sided_rtt_percentage
+ * stat['num_success_results'] / 100,
+ "Results exceeding error margin rate is too high",
+ extras=stats)
+ asserts.explicit_pass("RTT test done", extras=stats)
+
+ @test_tracker_info(uuid="c9e22185-16d4-4fe6-894f-5823587b3288")
+ def test_rtt_non_80211mc_supporting_aps_wo_privilege(self):
+ """Scan for APs and perform RTT on non-IEEE 802.11mc supporting APs with the
+ device not having privilege access (expect failures)."""
+ dut = self.android_devices[0]
+ rutils.config_privilege_override(dut, True)
+ non_rtt_aps = rutils.select_best_scan_results(
+ rutils.scan_with_rtt_support_constraint(dut, False), select_count=1)
+ dut.log.debug("Visible non-IEEE 802.11mc APs=%s", non_rtt_aps)
+ asserts.assert_true(len(non_rtt_aps) > 0, "Need at least one AP!")
+ events = rutils.run_ranging(dut, non_rtt_aps, self.NUM_ITER,
+ self.TIME_BETWEEN_ITERATIONS)
+ stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
+ self.rtt_reference_distance_margin_mm,
+ self.rtt_min_expected_rssi_dbm,
+ self.lci_reference, self.lcr_reference)
+ dut.log.debug("Stats=%s", stats)
+
+ for bssid, stat in stats.items():
+ asserts.assert_true(stat['num_no_results'] == 0,
+ "Missing (timed-out) results", extras=stats)
+ asserts.assert_true(stat['num_failures'] == self.NUM_ITER,
+ "All one-sided RTT requests must fail when executed without privilege",
+ extras=stats)
+ for code in stat['status_codes']:
+ asserts.assert_true(code ==
+ rconsts.EVENT_CB_RANGING_STATUS_RESPONDER_DOES_NOT_SUPPORT_IEEE80211MC,
+ "Expected non-support error code", extras=stats)
+ asserts.explicit_pass("RTT test done", extras=stats)
+
+ @test_tracker_info(uuid="e117af56-bd3f-40ae-a2fd-4175f0daa7fa")
+ def test_rtt_non_80211mc_supporting_ap_faked_as_supporting(self):
+ """Scan for APs which do not support IEEE 802.11mc, maliciously modify the
+ Responder config to indicate support and pass-through to service. Verify
+ that get an error result.
+ """
+ dut = self.android_devices[0]
+ non_rtt_aps = rutils.select_best_scan_results(
+ rutils.scan_with_rtt_support_constraint(dut, False), select_count=1)
+ dut.log.debug("Visible non-IEEE 802.11mc APs=%s", non_rtt_aps)
+ asserts.assert_true(len(non_rtt_aps) > 0, "Need at least one AP!")
+ non_rtt_aps = non_rtt_aps[0:1] # pick first
+ non_rtt_aps[0][rconsts.SCAN_RESULT_KEY_RTT_RESPONDER] = True # falsify
+ dut.log.debug("Visible non-IEEE 802.11mc APs=%s", non_rtt_aps)
+ events = rutils.run_ranging(dut, non_rtt_aps, self.NUM_ITER,
+ self.TIME_BETWEEN_ITERATIONS)
+ stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
+ self.rtt_reference_distance_margin_mm,
+ self.rtt_min_expected_rssi_dbm,
+ self.lci_reference, self.lcr_reference)
+ dut.log.debug("Stats=%s", stats)
+
+ for bssid, stat in stats.items():
+ asserts.assert_true(stat['num_no_results'] == 0,
+ "Missing (timed-out) results", extras=stats)
+ asserts.assert_true(stat['num_failures'] == self.NUM_ITER,
+ "Failures expected for falsified responder config",
+ extras=stats)
+ asserts.explicit_pass("RTT test done", extras=stats)
diff --git a/acts/tests/google/wifi/rtt/functional/RangeApSupporting11McTest.py b/acts/tests/google/wifi/rtt/functional/RangeApSupporting11McTest.py
new file mode 100644
index 0000000..d889a22
--- /dev/null
+++ b/acts/tests/google/wifi/rtt/functional/RangeApSupporting11McTest.py
@@ -0,0 +1,187 @@
+#!/usr/bin/python3.4
+#
+# Copyright 2017 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import queue
+
+from acts import asserts
+from acts.test_decorators import test_tracker_info
+from acts.test_utils.wifi import wifi_test_utils as wutils
+from acts.test_utils.wifi.rtt import rtt_const as rconsts
+from acts.test_utils.wifi.rtt import rtt_test_utils as rutils
+from acts.test_utils.wifi.rtt.RttBaseTest import RttBaseTest
+
+
+class RangeApSupporting11McTest(RttBaseTest):
+ """Test class for RTT ranging to Access Points which support IEEE 802.11mc"""
+
+ # Number of RTT iterations
+ NUM_ITER = 10
+
+ # Time gap (in seconds) between iterations
+ TIME_BETWEEN_ITERATIONS = 0
+
+ def __init__(self, controllers):
+ RttBaseTest.__init__(self, controllers)
+
+ #############################################################################
+
+ @test_tracker_info(uuid="6705270f-924b-4bef-b50a-0f0a7eb9ce52")
+ def test_rtt_80211mc_supporting_aps(self):
+ """Scan for APs and perform RTT only to those which support 802.11mc"""
+ dut = self.android_devices[0]
+ rtt_supporting_aps = rutils.select_best_scan_results(
+ rutils.scan_with_rtt_support_constraint(dut, True, repeat=10),
+ select_count=2)
+ dut.log.debug("RTT Supporting APs=%s", rtt_supporting_aps)
+ events = rutils.run_ranging(dut, rtt_supporting_aps, self.NUM_ITER,
+ self.TIME_BETWEEN_ITERATIONS)
+ stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
+ self.rtt_reference_distance_margin_mm,
+ self.rtt_min_expected_rssi_dbm,
+ self.lci_reference, self.lcr_reference)
+ dut.log.debug("Stats=%s", stats)
+
+ for bssid, stat in stats.items():
+ asserts.assert_true(stat['num_no_results'] == 0,
+ "Missing (timed-out) results", extras=stats)
+ asserts.assert_false(stat['any_lci_mismatch'],
+ "LCI mismatch", extras=stats)
+ asserts.assert_false(stat['any_lcr_mismatch'],
+ "LCR mismatch", extras=stats)
+ asserts.assert_false(stat['invalid_num_attempted'],
+ "Invalid (0) number of attempts", extras=stats)
+ asserts.assert_false(stat['invalid_num_successful'],
+ "Invalid (0) number of successes", extras=stats)
+ asserts.assert_equal(stat['num_invalid_rssi'], 0, "Invalid RSSI",
+ extras=stats)
+ asserts.assert_true(stat['num_failures'] <=
+ self.rtt_max_failure_rate_two_sided_rtt_percentage
+ * stat['num_results'] / 100,
+ "Failure rate is too high", extras=stats)
+ asserts.assert_true(stat['num_range_out_of_margin'] <=
+ self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage
+ * stat['num_success_results'] / 100,
+ "Results exceeding error margin rate is too high", extras=stats)
+ asserts.explicit_pass("RTT test done", extras=stats)
+
+ #########################################################################
+ #
+ # LEGACY API test code
+ #
+ #########################################################################
+
+ @test_tracker_info(uuid="18be9737-2f03-4e35-9a23-f722dea7b82d")
+ def test_legacy_rtt_80211mc_supporting_aps(self):
+ """Scan for APs and perform RTT only to those which support 802.11mc - using
+ the LEGACY API!"""
+ dut = self.android_devices[0]
+ rtt_supporting_aps = rutils.select_best_scan_results(
+ rutils.scan_with_rtt_support_constraint(dut, True, repeat=10),
+ select_count=2)
+ dut.log.debug("RTT Supporting APs=%s", rtt_supporting_aps)
+
+ rtt_configs = []
+ for ap in rtt_supporting_aps:
+ rtt_configs.append(self.rtt_config_from_scan_result(ap))
+ dut.log.debug("RTT configs=%s", rtt_configs)
+
+ results = []
+ num_missing = 0
+ num_failed_aborted = 0
+ for i in range(self.NUM_ITER):
+ idx = dut.droid.wifiRttStartRanging(rtt_configs)
+ event = None
+ try:
+ events = dut.ed.pop_events("WifiRttRanging%d" % idx, 30)
+ dut.log.debug("Event=%s", events)
+ for event in events:
+ if rconsts.EVENT_CB_RANGING_KEY_RESULTS in event["data"]:
+ results.append(
+ event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS])
+ else:
+ self.log.info("RTT failed/aborted - %s", event)
+ results.append([])
+ num_failed_aborted = num_failed_aborted + 1
+ except queue.Empty:
+ self.log.debug("Waiting for RTT event timed out.")
+ results.append([])
+ num_missing = num_missing + 1
+
+ # basic error checking:
+ # 1. no missing
+ # 2. no full failed/aborted (i.e. operation not even tried)
+ # 3. overall (all BSSIDs) success rate > threshold
+ asserts.assert_equal(num_missing, 0,
+ "Missing results (timeout waiting for event)",
+ extras={"data":results})
+ asserts.assert_equal(num_failed_aborted, 0,
+ "Failed or aborted operations (not tried)",
+ extras={"data":results})
+
+ num_results = 0
+ num_errors = 0
+ for result_group in results:
+ num_results = num_results + len(result_group)
+ for result in result_group:
+ if result["status"] != 0:
+ num_errors = num_errors + 1
+
+ extras = [results, {"num_results": num_results, "num_errors": num_errors}]
+ asserts.assert_true(
+ num_errors <= self.rtt_max_failure_rate_two_sided_rtt_percentage
+ * num_results / 100,
+ "Failure rate is too high", extras={"data":extras})
+ asserts.explicit_pass("RTT test done", extras={"data": extras})
+
+ def rtt_config_from_scan_result(self, scan_result):
+ """Creates an Rtt configuration based on the scan result of a network.
+ """
+ WifiEnums = wutils.WifiEnums
+ ScanResult = WifiEnums.ScanResult
+ RttParam = WifiEnums.RttParam
+ RttBW = WifiEnums.RttBW
+ RttPreamble = WifiEnums.RttPreamble
+ RttType = WifiEnums.RttType
+
+ scan_result_channel_width_to_rtt = {
+ ScanResult.CHANNEL_WIDTH_20MHZ: RttBW.BW_20_SUPPORT,
+ ScanResult.CHANNEL_WIDTH_40MHZ: RttBW.BW_40_SUPPORT,
+ ScanResult.CHANNEL_WIDTH_80MHZ: RttBW.BW_80_SUPPORT,
+ ScanResult.CHANNEL_WIDTH_160MHZ: RttBW.BW_160_SUPPORT,
+ ScanResult.CHANNEL_WIDTH_80MHZ_PLUS_MHZ: RttBW.BW_160_SUPPORT
+ }
+ p = {}
+ freq = scan_result[RttParam.frequency]
+ p[RttParam.frequency] = freq
+ p[RttParam.BSSID] = scan_result[WifiEnums.BSSID_KEY]
+ if freq > 5000:
+ p[RttParam.preamble] = RttPreamble.PREAMBLE_VHT
+ else:
+ p[RttParam.preamble] = RttPreamble.PREAMBLE_HT
+ cf0 = scan_result[RttParam.center_freq0]
+ if cf0 > 0:
+ p[RttParam.center_freq0] = cf0
+ cf1 = scan_result[RttParam.center_freq1]
+ if cf1 > 0:
+ p[RttParam.center_freq1] = cf1
+ cw = scan_result["channelWidth"]
+ p[RttParam.channel_width] = cw
+ p[RttParam.bandwidth] = scan_result_channel_width_to_rtt[cw]
+ if scan_result["is80211McRTTResponder"]:
+ p[RttParam.request_type] = RttType.TYPE_TWO_SIDED
+ else:
+ p[RttParam.request_type] = RttType.TYPE_ONE_SIDED
+ return p
diff --git a/acts/tests/google/wifi/rtt/functional/RangeAwareTest.py b/acts/tests/google/wifi/rtt/functional/RangeAwareTest.py
new file mode 100644
index 0000000..d4b7d41
--- /dev/null
+++ b/acts/tests/google/wifi/rtt/functional/RangeAwareTest.py
@@ -0,0 +1,409 @@
+#!/usr/bin/python3.4
+#
+# Copyright 2017 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import queue
+import time
+
+from acts import asserts
+from acts.test_decorators import test_tracker_info
+from acts.test_utils.wifi.aware import aware_const as aconsts
+from acts.test_utils.wifi.aware import aware_test_utils as autils
+from acts.test_utils.wifi.aware.AwareBaseTest import AwareBaseTest
+from acts.test_utils.wifi.rtt import rtt_const as rconsts
+from acts.test_utils.wifi.rtt import rtt_test_utils as rutils
+from acts.test_utils.wifi.rtt.RttBaseTest import RttBaseTest
+
+
+class RangeAwareTest(AwareBaseTest, RttBaseTest):
+ """Test class for RTT ranging to Wi-Fi Aware peers"""
+ SERVICE_NAME = "GoogleTestServiceXY"
+
+ # Number of RTT iterations
+ NUM_ITER = 10
+
+ # Time gap (in seconds) between iterations
+ TIME_BETWEEN_ITERATIONS = 0
+
+ # Time gap (in seconds) when switching between Initiator and Responder
+ TIME_BETWEEN_ROLES = 4
+
+ def __init__(self, controllers):
+ AwareBaseTest.__init__(self, controllers)
+ RttBaseTest.__init__(self, controllers)
+
+ def setup_test(self):
+ """Manual setup here due to multiple inheritance: explicitly execute the
+ setup method from both parents."""
+ AwareBaseTest.setup_test(self)
+ RttBaseTest.setup_test(self)
+
+ def teardown_test(self):
+ """Manual teardown here due to multiple inheritance: explicitly execute the
+ teardown method from both parents."""
+ AwareBaseTest.teardown_test(self)
+ RttBaseTest.teardown_test(self)
+
+ #############################################################################
+
+ def run_rtt_discovery(self, init_dut, resp_mac=None, resp_peer_id=None):
+ """Perform single RTT measurement, using Aware, from the Initiator DUT to
+ a Responder. The RTT Responder can be specified using its MAC address
+ (obtained using out- of-band discovery) or its Peer ID (using Aware
+ discovery).
+
+ Args:
+ init_dut: RTT Initiator device
+ resp_mac: MAC address of the RTT Responder device
+ resp_peer_id: Peer ID of the RTT Responder device
+ """
+ asserts.assert_true(resp_mac is not None or resp_peer_id is not None,
+ "One of the Responder specifications (MAC or Peer ID)"
+ " must be provided!")
+ if resp_mac is not None:
+ id = init_dut.droid.wifiRttStartRangingToAwarePeerMac(resp_mac)
+ else:
+ id = init_dut.droid.wifiRttStartRangingToAwarePeerId(resp_peer_id)
+ try:
+ event = init_dut.ed.pop_event(rutils.decorate_event(
+ rconsts.EVENT_CB_RANGING_ON_RESULT, id), rutils.EVENT_TIMEOUT)
+ result = event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS][0]
+ if resp_mac is not None:
+ rutils.validate_aware_mac_result(result, resp_mac, "DUT")
+ else:
+ rutils.validate_aware_peer_id_result(result, resp_peer_id, "DUT")
+ return result
+ except queue.Empty:
+ return None
+
+ def run_rtt_ib_discovery_set(self, do_both_directions, iter_count,
+ time_between_iterations, time_between_roles):
+ """Perform a set of RTT measurements, using in-band (Aware) discovery.
+
+ Args:
+ do_both_directions: False - perform all measurements in one direction,
+ True - perform 2 measurements one in both directions.
+ iter_count: Number of measurements to perform.
+ time_between_iterations: Number of seconds to wait between iterations.
+ time_between_roles: Number of seconds to wait when switching between
+ Initiator and Responder roles (only matters if
+ do_both_directions=True).
+
+ Returns: a list of the events containing the RTT results (or None for a
+ failed measurement). If both directions are tested then returns a list of
+ 2 elements: one set for each direction.
+ """
+ p_dut = self.android_devices[0]
+ s_dut = self.android_devices[1]
+
+ (p_id, s_id, p_disc_id, s_disc_id,
+ peer_id_on_sub, peer_id_on_pub) = autils.create_discovery_pair(
+ p_dut,
+ s_dut,
+ p_config=autils.add_ranging_to_pub(autils.create_discovery_config(
+ self.SERVICE_NAME, aconsts.PUBLISH_TYPE_UNSOLICITED), True),
+ s_config=autils.add_ranging_to_pub(autils.create_discovery_config(
+ self.SERVICE_NAME, aconsts.SUBSCRIBE_TYPE_PASSIVE), True),
+ device_startup_offset=self.device_startup_offset,
+ msg_id=self.get_next_msg_id())
+
+ resultsPS = []
+ resultsSP = []
+ for i in range(iter_count):
+ if i != 0 and time_between_iterations != 0:
+ time.sleep(time_between_iterations)
+
+ # perform RTT from pub -> sub
+ resultsPS.append(
+ self.run_rtt_discovery(p_dut, resp_peer_id=peer_id_on_pub))
+
+ if do_both_directions:
+ if time_between_roles != 0:
+ time.sleep(time_between_roles)
+
+ # perform RTT from sub -> pub
+ resultsSP.append(
+ self.run_rtt_discovery(s_dut, resp_peer_id=peer_id_on_sub))
+
+ return resultsPS if not do_both_directions else [resultsPS, resultsSP]
+
+ def run_rtt_oob_discovery_set(self, do_both_directions, iter_count,
+ time_between_iterations, time_between_roles):
+ """Perform a set of RTT measurements, using out-of-band discovery.
+
+ Args:
+ do_both_directions: False - perform all measurements in one direction,
+ True - perform 2 measurements one in both directions.
+ iter_count: Number of measurements to perform.
+ time_between_iterations: Number of seconds to wait between iterations.
+ time_between_roles: Number of seconds to wait when switching between
+ Initiator and Responder roles (only matters if
+ do_both_directions=True).
+ enable_ranging: True to enable Ranging, False to disable.
+
+ Returns: a list of the events containing the RTT results (or None for a
+ failed measurement). If both directions are tested then returns a list of
+ 2 elements: one set for each direction.
+ """
+ dut0 = self.android_devices[0]
+ dut1 = self.android_devices[1]
+
+ id0, mac0 = autils.attach_with_identity(dut0)
+ id1, mac1 = autils.attach_with_identity(dut1)
+
+ # wait for for devices to synchronize with each other - there are no other
+ # mechanisms to make sure this happens for OOB discovery (except retrying
+ # to execute the data-path request)
+ time.sleep(autils.WAIT_FOR_CLUSTER)
+
+ # start publisher(s) on the Responder(s) with ranging enabled
+ p_config = autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_UNSOLICITED),
+ enable_ranging=True)
+ dut1.droid.wifiAwarePublish(id1, p_config)
+ autils.wait_for_event(dut1, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+ if do_both_directions:
+ dut0.droid.wifiAwarePublish(id0, p_config)
+ autils.wait_for_event(dut0, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+
+ results01 = []
+ results10 = []
+ for i in range(iter_count):
+ if i != 0 and time_between_iterations != 0:
+ time.sleep(time_between_iterations)
+
+ # perform RTT from dut0 -> dut1
+ results01.append(
+ self.run_rtt_discovery(dut0, resp_mac=mac1))
+
+ if do_both_directions:
+ if time_between_roles != 0:
+ time.sleep(time_between_roles)
+
+ # perform RTT from dut1 -> dut0
+ results10.append(
+ self.run_rtt_discovery(dut1, resp_mac=mac0))
+
+ return results01 if not do_both_directions else [results01, results10]
+
+ def verify_results(self, results, results_reverse_direction=None):
+ """Verifies the results of the RTT experiment.
+
+ Args:
+ results: List of RTT results.
+ results_reverse_direction: List of RTT results executed in the
+ reverse direction. Optional.
+ """
+ stats = rutils.extract_stats(results, self.rtt_reference_distance_mm,
+ self.rtt_reference_distance_margin_mm,
+ self.rtt_min_expected_rssi_dbm)
+ stats_reverse_direction = None
+ if results_reverse_direction is not None:
+ stats_reverse_direction = rutils.extract_stats(results_reverse_direction,
+ self.rtt_reference_distance_mm, self.rtt_reference_distance_margin_mm,
+ self.rtt_min_expected_rssi_dbm)
+ self.log.debug("Stats: %s", stats)
+ if stats_reverse_direction is not None:
+ self.log.debug("Stats in reverse direction: %s", stats_reverse_direction)
+
+ extras = stats if stats_reverse_direction is None else {
+ "forward": stats,
+ "reverse": stats_reverse_direction}
+
+ asserts.assert_true(stats['num_no_results'] == 0,
+ "Missing (timed-out) results", extras=extras)
+ asserts.assert_false(stats['any_lci_mismatch'],
+ "LCI mismatch", extras=extras)
+ asserts.assert_false(stats['any_lcr_mismatch'],
+ "LCR mismatch", extras=extras)
+ asserts.assert_false(stats['invalid_num_attempted'],
+ "Invalid (0) number of attempts", extras=stats)
+ asserts.assert_false(stats['invalid_num_successful'],
+ "Invalid (0) number of successes", extras=stats)
+ asserts.assert_equal(stats['num_invalid_rssi'], 0, "Invalid RSSI",
+ extras=extras)
+ asserts.assert_true(
+ stats['num_failures'] <=
+ self.rtt_max_failure_rate_two_sided_rtt_percentage
+ * stats['num_results'] / 100,
+ "Failure rate is too high", extras=extras)
+ asserts.assert_true(
+ stats['num_range_out_of_margin']
+ <= self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage
+ * stats['num_success_results'] / 100,
+ "Results exceeding error margin rate is too high", extras=extras)
+
+ if stats_reverse_direction is not None:
+ asserts.assert_true(stats_reverse_direction['num_no_results'] == 0,
+ "Missing (timed-out) results",
+ extras=extras)
+ asserts.assert_false(stats['any_lci_mismatch'],
+ "LCI mismatch", extras=extras)
+ asserts.assert_false(stats['any_lcr_mismatch'],
+ "LCR mismatch", extras=extras)
+ asserts.assert_equal(stats['num_invalid_rssi'], 0, "Invalid RSSI",
+ extras=extras)
+ asserts.assert_true(
+ stats_reverse_direction['num_failures']
+ <= self.rtt_max_failure_rate_two_sided_rtt_percentage
+ * stats['num_results'] / 100,
+ "Failure rate is too high", extras=extras)
+ asserts.assert_true(
+ stats_reverse_direction['num_range_out_of_margin']
+ <= self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage
+ * stats['num_success_results'] / 100,
+ "Results exceeding error margin rate is too high",
+ extras=extras)
+
+ asserts.explicit_pass("RTT Aware test done", extras=extras)
+
+ #############################################################################
+
+ @test_tracker_info(uuid="9e4e7ab4-2254-498c-9788-21e15ed9a370")
+ def test_rtt_oob_discovery_one_way(self):
+ """Perform RTT between 2 Wi-Fi Aware devices. Use out-of-band discovery
+ to communicate the MAC addresses to the peer. Test one-direction RTT only.
+ """
+ rtt_results = self.run_rtt_oob_discovery_set(do_both_directions=False,
+ iter_count=self.NUM_ITER,
+ time_between_iterations=self.TIME_BETWEEN_ITERATIONS,
+ time_between_roles=self.TIME_BETWEEN_ROLES)
+ self.verify_results(rtt_results)
+
+ @test_tracker_info(uuid="22edba77-eeb2-43ee-875a-84437550ad84")
+ def test_rtt_oob_discovery_both_ways(self):
+ """Perform RTT between 2 Wi-Fi Aware devices. Use out-of-band discovery
+ to communicate the MAC addresses to the peer. Test RTT both-ways:
+ switching rapidly between Initiator and Responder.
+ """
+ rtt_results1, rtt_results2 = self.run_rtt_oob_discovery_set(
+ do_both_directions=True, iter_count=self.NUM_ITER,
+ time_between_iterations=self.TIME_BETWEEN_ITERATIONS,
+ time_between_roles=self.TIME_BETWEEN_ROLES)
+ self.verify_results(rtt_results1, rtt_results2)
+
+ @test_tracker_info(uuid="18cef4be-95b4-4f7d-a140-5165874e7d1c")
+ def test_rtt_ib_discovery_one_way(self):
+ """Perform RTT between 2 Wi-Fi Aware devices. Use in-band (Aware) discovery
+ to communicate the MAC addresses to the peer. Test one-direction RTT only.
+ """
+ rtt_results = self.run_rtt_ib_discovery_set(do_both_directions=False,
+ iter_count=self.NUM_ITER,
+ time_between_iterations=self.TIME_BETWEEN_ITERATIONS,
+ time_between_roles=self.TIME_BETWEEN_ROLES)
+ self.verify_results(rtt_results)
+
+ @test_tracker_info(uuid="c67c8e70-c417-42d9-9bca-af3a89f1ddd9")
+ def test_rtt_ib_discovery_both_ways(self):
+ """Perform RTT between 2 Wi-Fi Aware devices. Use in-band (Aware) discovery
+ to communicate the MAC addresses to the peer. Test RTT both-ways:
+ switching rapidly between Initiator and Responder.
+ """
+ rtt_results1, rtt_results2 = self.run_rtt_ib_discovery_set(
+ do_both_directions=True, iter_count=self.NUM_ITER,
+ time_between_iterations=self.TIME_BETWEEN_ITERATIONS,
+ time_between_roles=self.TIME_BETWEEN_ROLES)
+ self.verify_results(rtt_results1, rtt_results2)
+
+ @test_tracker_info(uuid="54f9693d-45e5-4979-adbb-1b875d217c0c")
+ def test_rtt_without_initiator_aware(self):
+ """Try to perform RTT operation when there is no local Aware session (on the
+ Initiator). The Responder is configured normally: Aware on and a Publisher
+ with Ranging enable. Should FAIL."""
+ init_dut = self.android_devices[0]
+ resp_dut = self.android_devices[1]
+
+ # Enable a Responder and start a Publisher
+ resp_id = resp_dut.droid.wifiAwareAttach(True)
+ autils.wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
+ resp_ident_event = autils.wait_for_event(resp_dut,
+ aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+ resp_mac = resp_ident_event['data']['mac']
+
+ resp_config = autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_UNSOLICITED),
+ enable_ranging=True)
+ resp_dut.droid.wifiAwarePublish(resp_id, resp_config)
+ autils.wait_for_event(resp_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+
+ # Initiate an RTT to Responder (no Aware started on Initiator!)
+ results = []
+ num_no_responses = 0
+ num_successes = 0
+ for i in range(self.NUM_ITER):
+ result = self.run_rtt_discovery(init_dut, resp_mac=resp_mac)
+ self.log.debug("result: %s", result)
+ results.append(result)
+ if result is None:
+ num_no_responses = num_no_responses + 1
+ elif (result[rconsts.EVENT_CB_RANGING_KEY_STATUS]
+ == rconsts.EVENT_CB_RANGING_STATUS_SUCCESS):
+ num_successes = num_successes + 1
+
+ asserts.assert_equal(num_no_responses, 0, "No RTT response?",
+ extras={"data":results})
+ asserts.assert_equal(num_successes, 0, "Aware RTT w/o Aware should FAIL!",
+ extras={"data":results})
+ asserts.explicit_pass("RTT Aware test done", extras={"data":results})
+
+ @test_tracker_info(uuid="87a69053-8261-4928-8ec1-c93aac7f3a8d")
+ def test_rtt_without_responder_aware(self):
+ """Try to perform RTT operation when there is no peer Aware session (on the
+ Responder). Should FAIL."""
+ init_dut = self.android_devices[0]
+ resp_dut = self.android_devices[1]
+
+ # Enable a Responder and start a Publisher
+ resp_id = resp_dut.droid.wifiAwareAttach(True)
+ autils.wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
+ resp_ident_event = autils.wait_for_event(resp_dut,
+ aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+ resp_mac = resp_ident_event['data']['mac']
+
+ resp_config = autils.add_ranging_to_pub(
+ autils.create_discovery_config(self.SERVICE_NAME,
+ aconsts.PUBLISH_TYPE_UNSOLICITED),
+ enable_ranging=True)
+ resp_dut.droid.wifiAwarePublish(resp_id, resp_config)
+ autils.wait_for_event(resp_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+
+ # Disable Responder
+ resp_dut.droid.wifiAwareDestroy(resp_id)
+
+ # Enable the Initiator
+ init_id = init_dut.droid.wifiAwareAttach()
+ autils.wait_for_event(init_dut, aconsts.EVENT_CB_ON_ATTACHED)
+
+ # Initiate an RTT to Responder (no Aware started on Initiator!)
+ results = []
+ num_no_responses = 0
+ num_successes = 0
+ for i in range(self.NUM_ITER):
+ result = self.run_rtt_discovery(init_dut, resp_mac=resp_mac)
+ self.log.debug("result: %s", result)
+ results.append(result)
+ if result is None:
+ num_no_responses = num_no_responses + 1
+ elif (result[rconsts.EVENT_CB_RANGING_KEY_STATUS]
+ == rconsts.EVENT_CB_RANGING_STATUS_SUCCESS):
+ num_successes = num_successes + 1
+
+ asserts.assert_equal(num_no_responses, 0, "No RTT response?",
+ extras={"data":results})
+ asserts.assert_equal(num_successes, 0, "Aware RTT w/o Aware should FAIL!",
+ extras={"data":results})
+ asserts.explicit_pass("RTT Aware test done", extras={"data":results})
diff --git a/acts/tests/google/wifi/rtt/functional/RangeSoftApTest.py b/acts/tests/google/wifi/rtt/functional/RangeSoftApTest.py
new file mode 100644
index 0000000..f0c4f4c
--- /dev/null
+++ b/acts/tests/google/wifi/rtt/functional/RangeSoftApTest.py
@@ -0,0 +1,95 @@
+#!/usr/bin/python3.4
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from acts import asserts
+from acts.test_decorators import test_tracker_info
+from acts.test_utils.tel.tel_test_utils import WIFI_CONFIG_APBAND_5G
+from acts.test_utils.wifi import wifi_test_utils as wutils
+from acts.test_utils.wifi.rtt import rtt_const as rconsts
+from acts.test_utils.wifi.rtt import rtt_test_utils as rutils
+from acts.test_utils.wifi.rtt.RttBaseTest import RttBaseTest
+
+
+class RangeSoftApTest(RttBaseTest):
+ """Test class for RTT ranging to an Android Soft AP."""
+
+ # Soft AP SSID
+ SOFT_AP_SSID = "RTT_TEST_SSID"
+
+ # Soft AP Password (irrelevant)
+ SOFT_AP_PASSWORD = "ABCDEFGH"
+
+ # Number of RTT iterations
+ NUM_ITER = 10
+
+ def __init__(self, controllers):
+ RttBaseTest.__init__(self, controllers)
+
+ #########################################################################
+
+ @test_tracker_info(uuid="578f0725-31e3-4e60-ad62-0212d93cf5b8")
+ def test_rtt_to_soft_ap(self):
+ """Set up a Soft AP on one device and try performing an RTT ranging to it
+ from another device. The attempt must fail - RTT on Soft AP must be
+ disabled."""
+ sap = self.android_devices[0]
+ sap.pretty_name = "SoftAP"
+ client = self.android_devices[1]
+ client.pretty_name = "Client"
+
+ # start Soft AP
+ wutils.start_wifi_tethering(sap, self.SOFT_AP_SSID, self.SOFT_AP_PASSWORD,
+ band=WIFI_CONFIG_APBAND_5G, hidden=False)
+
+ try:
+ # start scanning on the client
+ wutils.start_wifi_connection_scan_and_ensure_network_found(client,
+ self.SOFT_AP_SSID)
+ scans = client.droid.wifiGetScanResults()
+ scanned_softap = None
+ for scanned_ap in scans:
+ if scanned_ap[wutils.WifiEnums.SSID_KEY] == self.SOFT_AP_SSID:
+ scanned_softap = scanned_ap
+ break
+
+ asserts.assert_false(scanned_softap == None, "Soft AP not found in scan!",
+ extras=scans)
+
+ # validate that Soft AP does not advertise 802.11mc support
+ asserts.assert_false(
+ rconsts.SCAN_RESULT_KEY_RTT_RESPONDER in scanned_softap and
+ scanned_softap[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER],
+ "Soft AP advertises itself as supporting 802.11mc!",
+ extras=scanned_softap)
+
+ # falsify the SoftAP's support for IEEE 802.11 so we try a 2-sided RTT
+ scanned_softap[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER] = True # falsify
+
+ # actually try ranging to the Soft AP
+ events = rutils.run_ranging(client, [scanned_softap], self.NUM_ITER, 0)
+ stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
+ self.rtt_reference_distance_margin_mm,
+ self.rtt_min_expected_rssi_dbm,
+ self.lci_reference, self.lcr_reference)
+
+ asserts.assert_equal(
+ stats[scanned_ap[wutils.WifiEnums.BSSID_KEY]]['num_failures'],
+ self.NUM_ITER, "Some RTT operations to Soft AP succeed!?",
+ extras=stats)
+
+ asserts.explicit_pass("SoftAP + RTT validation done", extras=events)
+ finally:
+ wutils.stop_wifi_tethering(sap)
diff --git a/acts/tests/google/wifi/rtt/functional/RttDisableTest.py b/acts/tests/google/wifi/rtt/functional/RttDisableTest.py
new file mode 100644
index 0000000..1816cd5
--- /dev/null
+++ b/acts/tests/google/wifi/rtt/functional/RttDisableTest.py
@@ -0,0 +1,109 @@
+#!/usr/bin/python3.4
+#
+# Copyright 2017 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from acts import asserts
+from acts import utils
+from acts.test_decorators import test_tracker_info
+from acts.test_utils.wifi.rtt import rtt_const as rconsts
+from acts.test_utils.wifi.rtt import rtt_test_utils as rutils
+from acts.test_utils.wifi.rtt.RttBaseTest import RttBaseTest
+from acts.test_utils.wifi.WifiBaseTest import WifiBaseTest
+
+
+class RttDisableTest(WifiBaseTest, RttBaseTest):
+ """Test class for RTT ranging enable/disable flows."""
+
+ MODE_DISABLE_WIFI = 0
+ MODE_ENABLE_DOZE = 1
+ MODE_DISABLE_LOCATIONING = 2
+
+ def __init__(self, controllers):
+ WifiBaseTest.__init__(self, controllers)
+ RttBaseTest.__init__(self, controllers)
+ if "AccessPoint" in self.user_params:
+ self.legacy_configure_ap_and_start()
+
+ def run_disable_rtt(self, disable_mode):
+ """Validate the RTT disabled flows: whether by disabling Wi-Fi or entering
+ doze mode.
+
+ Args:
+ disable_mode: The particular mechanism in which RTT is disabled. One of
+ the MODE_* constants.
+ """
+ dut = self.android_devices[0]
+
+ # validate start-up conditions
+ asserts.assert_true(dut.droid.wifiIsRttAvailable(), "RTT is not available")
+
+ # scan to get some APs to be used later
+ all_aps = rutils.select_best_scan_results(rutils.scan_networks(dut),
+ select_count=1)
+ asserts.assert_true(len(all_aps) > 0, "Need at least one visible AP!")
+
+ # disable RTT and validate broadcast & API
+ if disable_mode == self.MODE_DISABLE_WIFI:
+ # disabling Wi-Fi is not sufficient: since scan mode (and hence RTT) will
+ # remain enabled - we need to disable the Wi-Fi chip aka Airplane Mode
+ asserts.assert_true(utils.force_airplane_mode(dut, True),
+ "Can not turn on airplane mode on: %s" % dut.serial)
+ elif disable_mode == self.MODE_ENABLE_DOZE:
+ asserts.assert_true(utils.enable_doze(dut), "Can't enable doze")
+ elif disable_mode == self.MODE_DISABLE_LOCATIONING:
+ utils.set_location_service(dut, False)
+
+ rutils.wait_for_event(dut, rconsts.BROADCAST_WIFI_RTT_NOT_AVAILABLE)
+ asserts.assert_false(dut.droid.wifiIsRttAvailable(), "RTT is available")
+
+ # request a range and validate error
+ id = dut.droid.wifiRttStartRangingToAccessPoints(all_aps[0:1])
+ event = rutils.wait_for_event(dut, rutils.decorate_event(
+ rconsts.EVENT_CB_RANGING_ON_FAIL, id))
+ asserts.assert_equal(event["data"][rconsts.EVENT_CB_RANGING_KEY_STATUS],
+ rconsts.RANGING_FAIL_CODE_RTT_NOT_AVAILABLE,
+ "Invalid error code")
+
+ # enable RTT and validate broadcast & API
+ if disable_mode == self.MODE_DISABLE_WIFI:
+ asserts.assert_true(utils.force_airplane_mode(dut, False),
+ "Can not turn off airplane mode on: %s" % dut.serial)
+ elif disable_mode == self.MODE_ENABLE_DOZE:
+ asserts.assert_true(utils.disable_doze(dut), "Can't disable doze")
+ elif disable_mode == self.MODE_DISABLE_LOCATIONING:
+ utils.set_location_service(dut, True)
+
+ rutils.wait_for_event(dut, rconsts.BROADCAST_WIFI_RTT_AVAILABLE)
+ asserts.assert_true(dut.droid.wifiIsRttAvailable(), "RTT is not available")
+
+ ############################################################################
+
+ @test_tracker_info(uuid="498c49ab-a188-4612-998d-c47b35ff285e")
+ def test_disable_wifi(self):
+ """Validate that getting expected broadcast when Wi-Fi is disabled and that
+ any range requests are rejected."""
+ self.run_disable_rtt(self.MODE_DISABLE_WIFI)
+
+ @test_tracker_info(uuid="f71f731f-4aaf-402b-8595-db94b625b544")
+ def test_enable_doze(self):
+ """Validate that getting expected broadcast when RTT is disabled due to doze
+ mode and that any range requests are rejected."""
+ self.run_disable_rtt(self.MODE_ENABLE_DOZE)
+
+ @test_tracker_info(uuid="6a1c83a8-9eaf-49db-b547-5131cba0eafe")
+ def test_disable_location(self):
+ """Validate that getting expected broadcast when locationing is disabled and
+ that any range requests are rejected."""
+ self.run_disable_rtt(self.MODE_DISABLE_LOCATIONING)
diff --git a/acts/tests/google/wifi/rtt/functional/RttRequestManagementTest.py b/acts/tests/google/wifi/rtt/functional/RttRequestManagementTest.py
new file mode 100644
index 0000000..82c1058
--- /dev/null
+++ b/acts/tests/google/wifi/rtt/functional/RttRequestManagementTest.py
@@ -0,0 +1,140 @@
+#!/usr/bin/python3.4
+#
+# Copyright 2017 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import random
+import time
+
+from acts import asserts
+from acts.test_decorators import test_tracker_info
+from acts.test_utils.wifi.rtt import rtt_const as rconsts
+from acts.test_utils.wifi.rtt import rtt_test_utils as rutils
+from acts.test_utils.wifi.rtt.RttBaseTest import RttBaseTest
+
+
+class RttRequestManagementTest(RttBaseTest):
+ """Test class for RTT request management flows."""
+
+ SPAMMING_LIMIT = 20
+
+ def __init__(self, controllers):
+ RttBaseTest.__init__(self, controllers)
+
+ #############################################################################
+
+ @test_tracker_info(uuid="29ff4a02-2952-47df-bf56-64f30c963093")
+ def test_cancel_ranging(self):
+ """Request a 'large' number of range operations with various UIDs (using the
+ work-source API), then cancel some of them.
+
+ We can't guarantee a reaction time - it is possible that a cancelled test
+ was already finished and it's results dispatched back. The test therefore
+ stacks the request queue. The sequence is:
+
+ - Request:
+ - 50 tests @ UIDs = {uid1, uid2, uid3}
+ - 2 tests @ UIDs = {uid2, uid3}
+ - 1 test2 @ UIDs = {uid1, uid2, uid3}
+ - Cancel UIDs = {uid2, uid3}
+
+ Expect to receive only 51 results.
+ """
+ dut = self.android_devices[0]
+ max_peers = dut.droid.wifiRttMaxPeersInRequest()
+
+ all_uids = [1000, 20, 30] # 1000 = System Server (makes requests foreground)
+ some_uids = [20, 30]
+
+ aps = rutils.select_best_scan_results(
+ rutils.scan_with_rtt_support_constraint(dut, True, repeat=10),
+ select_count=1)
+ dut.log.info("RTT Supporting APs=%s", aps)
+
+ asserts.assert_true(
+ len(aps) > 0,
+ "Need at least one AP which supports 802.11mc!")
+ if len(aps) > max_peers:
+ aps = aps[0:max_peers]
+
+ group1_ids = []
+ group2_ids = []
+ group3_ids = []
+
+ # step 1: request <spam_limit> ranging operations on [uid1, uid2, uid3]
+ for i in range(self.SPAMMING_LIMIT):
+ group1_ids.append(
+ dut.droid.wifiRttStartRangingToAccessPoints(aps, all_uids))
+
+ # step 2: request 2 ranging operations on [uid2, uid3]
+ for i in range(2):
+ group2_ids.append(
+ dut.droid.wifiRttStartRangingToAccessPoints(aps, some_uids))
+
+ # step 3: request 1 ranging operation on [uid1, uid2, uid3]
+ for i in range(1):
+ group3_ids.append(
+ dut.droid.wifiRttStartRangingToAccessPoints(aps, all_uids))
+
+ # step 4: cancel ranging requests on [uid2, uid3]
+ dut.droid.wifiRttCancelRanging(some_uids)
+
+ # collect results
+ for i in range(len(group1_ids)):
+ rutils.wait_for_event(dut, rutils.decorate_event(
+ rconsts.EVENT_CB_RANGING_ON_RESULT, group1_ids[i]))
+ time.sleep(rutils.EVENT_TIMEOUT) # optimize time-outs below to single one
+ for i in range(len(group2_ids)):
+ rutils.fail_on_event(dut, rutils.decorate_event(
+ rconsts.EVENT_CB_RANGING_ON_RESULT, group2_ids[i]), 0)
+ for i in range(len(group3_ids)):
+ rutils.wait_for_event(dut, rutils.decorate_event(
+ rconsts.EVENT_CB_RANGING_ON_RESULT, group3_ids[i]))
+
+ @test_tracker_info(uuid="48297480-c026-4780-8c13-476e7bea440c")
+ def test_throttling(self):
+ """Request sequential range operations using a bogus UID (which will
+ translate as a throttled process) and similarly using the ACTS/sl4a as
+ the source (a foreground/unthrottled process)."""
+ dut = self.android_devices[0]
+ max_peers = dut.droid.wifiRttMaxPeersInRequest()
+
+ # Need to use a random number since the system keeps states and so the
+ # background uid will be throttled on the next run of this script
+ fake_uid = [random.randint(10, 9999)]
+
+ aps = rutils.select_best_scan_results(
+ rutils.scan_with_rtt_support_constraint(dut, True, repeat=10),
+ select_count=1)
+ dut.log.info("RTT Supporting APs=%s", aps)
+
+ asserts.assert_true(
+ len(aps) > 0,
+ "Need at least one AP which supports 802.11mc!")
+ if len(aps) > max_peers:
+ aps = aps[0:max_peers]
+
+ id1 = dut.droid.wifiRttStartRangingToAccessPoints(aps) # as ACTS/sl4a
+ id2 = dut.droid.wifiRttStartRangingToAccessPoints(aps, fake_uid)
+ id3 = dut.droid.wifiRttStartRangingToAccessPoints(aps, fake_uid)
+ id4 = dut.droid.wifiRttStartRangingToAccessPoints(aps) # as ACTS/sl4a
+
+ rutils.wait_for_event(dut, rutils.decorate_event(
+ rconsts.EVENT_CB_RANGING_ON_RESULT, id1))
+ rutils.wait_for_event(dut, rutils.decorate_event(
+ rconsts.EVENT_CB_RANGING_ON_RESULT, id2))
+ rutils.wait_for_event(dut, rutils.decorate_event(
+ rconsts.EVENT_CB_RANGING_ON_FAIL, id3))
+ rutils.wait_for_event(dut, rutils.decorate_event(
+ rconsts.EVENT_CB_RANGING_ON_RESULT, id4))
diff --git a/acts/tests/google/wifi/rtt/stress/StressRangeApTest.py b/acts/tests/google/wifi/rtt/stress/StressRangeApTest.py
new file mode 100644
index 0000000..497c125
--- /dev/null
+++ b/acts/tests/google/wifi/rtt/stress/StressRangeApTest.py
@@ -0,0 +1,79 @@
+#!/usr/bin/python3.4
+#
+# Copyright 2017 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from acts import asserts
+from acts.base_test import BaseTestClass
+from acts.test_utils.wifi.rtt import rtt_test_utils as rutils
+from acts.test_utils.wifi.rtt.RttBaseTest import RttBaseTest
+
+
+class StressRangeApTest(RttBaseTest):
+ """Test class for stress testing of RTT ranging to Access Points"""
+
+ def __init__(self, controllers):
+ BaseTestClass.__init__(self, controllers)
+
+ #############################################################################
+
+ def test_rtt_supporting_ap_only(self):
+ """Scan for APs and perform RTT only to those which support 802.11mc.
+
+ Stress test: repeat ranging to the same AP. Verify rate of success and
+ stability of results.
+ """
+ dut = self.android_devices[0]
+ rtt_supporting_aps = rutils.scan_with_rtt_support_constraint(dut, True,
+ repeat=10)
+ dut.log.debug("RTT Supporting APs=%s", rtt_supporting_aps)
+
+ num_iter = self.stress_test_min_iteration_count
+
+ max_peers = dut.droid.wifiRttMaxPeersInRequest()
+ asserts.assert_true(
+ len(rtt_supporting_aps) > 0,
+ "Need at least one AP which supports 802.11mc!")
+ if len(rtt_supporting_aps) > max_peers:
+ rtt_supporting_aps = rtt_supporting_aps[0:max_peers]
+
+ events = rutils.run_ranging(dut, rtt_supporting_aps, num_iter, 0,
+ self.stress_test_target_run_time_sec)
+ stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
+ self.rtt_reference_distance_margin_mm,
+ self.rtt_min_expected_rssi_dbm,
+ self.lci_reference, self.lcr_reference,
+ summary_only=True)
+ dut.log.debug("Stats=%s", stats)
+
+ for bssid, stat in stats.items():
+ asserts.assert_true(stat['num_no_results'] == 0,
+ "Missing (timed-out) results", extras=stats)
+ asserts.assert_false(stat['any_lci_mismatch'],
+ "LCI mismatch", extras=stats)
+ asserts.assert_false(stat['any_lcr_mismatch'],
+ "LCR mismatch", extras=stats)
+ asserts.assert_equal(stat['num_invalid_rssi'], 0, "Invalid RSSI",
+ extras=stats)
+ asserts.assert_true(stat['num_failures'] <=
+ self.rtt_max_failure_rate_two_sided_rtt_percentage
+ * stat['num_results'] / 100,
+ "Failure rate is too high", extras=stats)
+ asserts.assert_true(stat['num_range_out_of_margin'] <=
+ self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage
+ * stat['num_success_results'] / 100,
+ "Results exceeding error margin rate is too high",
+ extras=stats)
+ asserts.explicit_pass("RTT test done", extras=stats)
+
diff --git a/acts/tests/google/wifi/rtt/stress/StressRangeAwareTest.py b/acts/tests/google/wifi/rtt/stress/StressRangeAwareTest.py
new file mode 100644
index 0000000..3073898
--- /dev/null
+++ b/acts/tests/google/wifi/rtt/stress/StressRangeAwareTest.py
@@ -0,0 +1,137 @@
+#!/usr/bin/python3.4
+#
+# Copyright 2018 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import queue
+import time
+
+from acts import asserts
+from acts.test_utils.wifi.aware import aware_const as aconsts
+from acts.test_utils.wifi.aware import aware_test_utils as autils
+from acts.test_utils.wifi.aware.AwareBaseTest import AwareBaseTest
+from acts.test_utils.wifi.rtt import rtt_const as rconsts
+from acts.test_utils.wifi.rtt import rtt_test_utils as rutils
+from acts.test_utils.wifi.rtt.RttBaseTest import RttBaseTest
+
+
+class StressRangeAwareTest(AwareBaseTest, RttBaseTest):
+ """Test class for stress testing of RTT ranging to Wi-Fi Aware peers."""
+ SERVICE_NAME = "GoogleTestServiceXY"
+
+ def __init__(self, controllers):
+ AwareBaseTest.__init__(self, controllers)
+ RttBaseTest.__init__(self, controllers)
+
+ def setup_test(self):
+ """Manual setup here due to multiple inheritance: explicitly execute the
+ setup method from both parents."""
+ AwareBaseTest.setup_test(self)
+ RttBaseTest.setup_test(self)
+
+ def teardown_test(self):
+ """Manual teardown here due to multiple inheritance: explicitly execute the
+ teardown method from both parents."""
+ AwareBaseTest.teardown_test(self)
+ RttBaseTest.teardown_test(self)
+
+ #############################################################################
+
+ def run_rtt_discovery(self, init_dut, resp_mac=None, resp_peer_id=None):
+ """Perform single RTT measurement, using Aware, from the Initiator DUT to
+ a Responder. The RTT Responder can be specified using its MAC address
+ (obtained using out- of-band discovery) or its Peer ID (using Aware
+ discovery).
+
+ Args:
+ init_dut: RTT Initiator device
+ resp_mac: MAC address of the RTT Responder device
+ resp_peer_id: Peer ID of the RTT Responder device
+ """
+ asserts.assert_true(resp_mac is not None or resp_peer_id is not None,
+ "One of the Responder specifications (MAC or Peer ID)"
+ " must be provided!")
+ if resp_mac is not None:
+ id = init_dut.droid.wifiRttStartRangingToAwarePeerMac(resp_mac)
+ else:
+ id = init_dut.droid.wifiRttStartRangingToAwarePeerId(resp_peer_id)
+ try:
+ event = init_dut.ed.pop_event(rutils.decorate_event(
+ rconsts.EVENT_CB_RANGING_ON_RESULT, id), rutils.EVENT_TIMEOUT)
+ result = event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS][0]
+ if resp_mac is not None:
+ rutils.validate_aware_mac_result(result, resp_mac, "DUT")
+ else:
+ rutils.validate_aware_peer_id_result(result, resp_peer_id, "DUT")
+ return result
+ except queue.Empty:
+ return None
+
+ def test_stress_rtt_ib_discovery_set(self):
+ """Perform a set of RTT measurements, using in-band (Aware) discovery, and
+ switching Initiator and Responder roles repeatedly.
+
+ Stress test: repeat ranging operations. Verify rate of success and
+ stability of results.
+ """
+ p_dut = self.android_devices[0]
+ s_dut = self.android_devices[1]
+
+ (p_id, s_id, p_disc_id, s_disc_id,
+ peer_id_on_sub, peer_id_on_pub) = autils.create_discovery_pair(
+ p_dut,
+ s_dut,
+ p_config=autils.add_ranging_to_pub(autils.create_discovery_config(
+ self.SERVICE_NAME, aconsts.PUBLISH_TYPE_UNSOLICITED), True),
+ s_config=autils.add_ranging_to_pub(autils.create_discovery_config(
+ self.SERVICE_NAME, aconsts.SUBSCRIBE_TYPE_PASSIVE), True),
+ device_startup_offset=self.device_startup_offset,
+ msg_id=self.get_next_msg_id())
+
+ results = []
+ start_clock = time.time()
+ iterations_done = 0
+ run_time = 0
+ while iterations_done < self.stress_test_min_iteration_count or (
+ self.stress_test_target_run_time_sec != 0
+ and run_time < self.stress_test_target_run_time_sec):
+ results.append(self.run_rtt_discovery(p_dut, resp_peer_id=peer_id_on_pub))
+ results.append(self.run_rtt_discovery(s_dut, resp_peer_id=peer_id_on_sub))
+
+ iterations_done = iterations_done + 1
+ run_time = time.time() - start_clock
+
+ stats = rutils.extract_stats(results, self.rtt_reference_distance_mm,
+ self.rtt_reference_distance_margin_mm,
+ self.rtt_min_expected_rssi_dbm,
+ summary_only=True)
+ self.log.debug("Stats: %s", stats)
+ asserts.assert_true(stats['num_no_results'] == 0,
+ "Missing (timed-out) results", extras=stats)
+ asserts.assert_false(stats['any_lci_mismatch'],
+ "LCI mismatch", extras=stats)
+ asserts.assert_false(stats['any_lcr_mismatch'],
+ "LCR mismatch", extras=stats)
+ asserts.assert_equal(stats['num_invalid_rssi'], 0, "Invalid RSSI",
+ extras=stats)
+ asserts.assert_true(
+ stats['num_failures'] <=
+ self.rtt_max_failure_rate_two_sided_rtt_percentage
+ * stats['num_results'] / 100,
+ "Failure rate is too high", extras=stats)
+ asserts.assert_true(
+ stats['num_range_out_of_margin']
+ <= self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage
+ * stats['num_success_results'] / 100,
+ "Results exceeding error margin rate is too high", extras=stats)